[
  {
    "path": ".eslintignore",
    "content": "browser\ndocs\nlib\n_build\n"
  },
  {
    "path": ".eslintrc.yml",
    "content": "root: true\nparser: '@babel/eslint-parser'\nparserOptions:\n  ecmaVersion: 2020\n  sourceType: module\n  requireConfigFile: false\nenv:\n  node: true\n  es6: true\nextends:\n  - 'eslint:recommended'\n  - 'plugin:prettier/recommended'\nplugins:\n  - prettier\nrules:\n  id-length: off\n  class-methods-use-this:\n    - error\n    - exceptMethods:\n      # These methods are often overridden to hide implementation\n      # in parent class and throw error on invokation.\n      - field\n      - script\n      - missing\n      - format\n      - gapPolicy\n      - validationMethod\n      - scoreMode\n      - value\n      - executionHint\n      - _warn\n  no-unused-vars:\n    - error\n    - argsIgnorePattern: '^_'\n  no-console: off\n  prefer-const: error\n  no-var: error\n  no-irregular-whitespace:\n    - error\n    - skipComments: true\n"
  },
  {
    "path": ".gitattributes",
    "content": "* text=auto\n*.js text eol=lf\n*.sh text eol=lf\n"
  },
  {
    "path": ".github/workflows/build.yml",
    "content": "name: build\n\non:\n  push:\n    tags: [v*]\n    branches: [master]\n  pull_request:\n\npermissions:\n  id-token: write # to enable use of OIDC for trusted publishing and npm provenance\n  contents: write # to be able to publish a GitHub release\n  issues: write # to be able to comment on released issues\n  pull-requests: write # to be able to comment on released pull requests\n\njobs:\n  check:\n    runs-on: ubuntu-latest\n\n    strategy:\n      matrix:\n        node-version: [20.x, 22.x, 24.x]\n\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n      - name: Use Node.js ${{ matrix.node-version }}\n        uses: actions/setup-node@v4\n        with:\n          node-version: ${{ matrix.node-version }}\n          cache: 'npm'\n      - name: Install dependencies\n        run: npm ci\n      - name: Check\n        run: npm run check\n      - name: Coveralls\n        uses: coverallsapp/github-action@1.1.3\n        with:\n          github-token: ${{ secrets.GITHUB_TOKEN }}\n          flag-name: ${{matrix.os}}-node-${{ matrix.node }}\n          parallel: true\n\n  finish:\n    needs: check\n    runs-on: ubuntu-latest\n    steps:\n      - name: Coveralls Finished\n        uses: coverallsapp/github-action@1.1.3\n        with:\n          github-token: ${{ secrets.GITHUB_TOKEN }}\n          parallel-finished: true\n\n  release:\n    needs: check\n\n    if: github.ref == 'refs/heads/master'\n\n    runs-on: ubuntu-latest\n\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n        with:\n          fetch-depth: 0\n      - name: Setup Node.js\n        uses: actions/setup-node@v4\n        with:\n          node-version: '24.x'\n          registry-url: 'https://registry.npmjs.org'\n          cache: 'npm'\n      - name: Install dependencies\n        run: npm ci\n      - name: Release\n        env:\n          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n        run: npx semantic-release@25\n"
  },
  {
    "path": ".gitignore",
    "content": "# Logs\nlogs\n*.log\nnpm-debug.log*\n\n# Runtime data\npids\n*.pid\n*.seed\n\n# Directory for instrumented libs generated by jscoverage/JSCover\nlib-cov\n\n# Coverage directory used by tools like istanbul\ncoverage\n\n# nyc test coverage\n.nyc_output\n\n# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)\n.grunt\n\n# node-waf configuration\n.lock-wscript\n\n# Compiled binary addons (http://nodejs.org/api/addons.html)\nbuild/Release\n\n# Dependency directories\nnode_modules\njspm_packages\n\n# Optional npm cache directory\n.npm\n\n# Optional REPL history\n.node_repl_history\n\n# local test files\nexperiment.js\n\n# babel generated files\nlib\n\n# files generated for tests(pre compilation)\n_build\n\n# generated docs\nbrowser/\ndocs/assets/\ndocs/index.html\n\n# ides\n.idea\n"
  },
  {
    "path": ".nvmrc",
    "content": "24\n"
  },
  {
    "path": ".prettierrc.json",
    "content": "{\n  \"tabWidth\": 4,\n  \"semi\": true,\n  \"singleQuote\": true,\n  \"trailingComma\": \"none\",\n  \"proseWrap\": \"preserve\",\n  \"arrowParens\": \"avoid\",\n  \"printWidth\": 80,\n  \"overrides\": [\n    {\n      \"files\": \"*.md\",\n      \"options\": {\n        \"tabWidth\": 2,\n        \"proseWrap\": \"always\"\n      }\n    },\n    {\n      \"files\": \"*.json\",\n      \"options\": {\n        \"tabWidth\": 2\n      }\n    }\n  ]\n}\n"
  },
  {
    "path": ".releaserc.json",
    "content": "{\n  \"branches\": [\n    \"master\",\n    \"next\"\n  ],\n  \"plugins\": [\n    \"@semantic-release/commit-analyzer\",\n    \"@semantic-release/release-notes-generator\",\n    [\n      \"@semantic-release/npm\",\n      {\n        \"npmPublish\": true,\n        \"pkgRoot\": \".\"\n      }\n    ],\n    \"@semantic-release/github\"\n  ]\n}\n\n"
  },
  {
    "path": ".vscode/extensions.json",
    "content": "{\n\t// See http://go.microsoft.com/fwlink/?LinkId=827846\n\t// for the documentation about the extensions.json format\n\t\"recommendations\": [\n\t\t// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp\n\t\t\"dbaeumer.vscode-eslint\",\n\t\t\"HookyQR.beautify\",\n\t\t\"eg2.vscode-npm-script\",\n\t\t\"dkundel.vscode-npm-source\",\n    \"esbenp.prettier-vscode\"\n\t]\n}\n"
  },
  {
    "path": ".vscode/settings.json",
    "content": "// Place your settings in this file to overwrite default and user settings.\n{\n  \"eslint.enable\": true,\n  \"files.eol\": \"\\n\",\n  \"vsicons.presets.angular\": false,\n  \"editor.detectIndentation\": true,\n  \"[json]\": {\n    \"editor.tabSize\": 2\n  },\n  \"editor.codeActionsOnSave\": {\n    \"source.fixAll.eslint\": \"explicit\"\n  }\n}\n"
  },
  {
    "path": "CODE_OF_CONDUCT.md",
    "content": "# Contributor Covenant Code of Conduct\n\n## Our Pledge\n\nIn the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.\n\n## Our Standards\n\nExamples of behavior that contributes to creating a positive environment include:\n\n* Using welcoming and inclusive language\n* Being respectful of differing viewpoints and experiences\n* Gracefully accepting constructive criticism\n* Focusing on what is best for the community\n* Showing empathy towards other community members\n\nExamples of unacceptable behavior by participants include:\n\n* The use of sexualized language or imagery and unwelcome sexual attention or advances\n* Trolling, insulting/derogatory comments, and personal or political attacks\n* Public or private harassment\n* Publishing others' private information, such as a physical or electronic address, without explicit permission\n* Other conduct which could reasonably be considered inappropriate in a professional setting\n\n## Our Responsibilities\n\nProject maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.\n\nProject maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.\n\n## Scope\n\nThis Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.\n\n## Enforcement\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at sudo.suhas@gmail.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.\n\nProject maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.\n\n## Attribution\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]\n\n[homepage]: http://contributor-covenant.org\n[version]: http://contributor-covenant.org/version/1/4/\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing\n\nThank you for taking the time to contribute to elastic-builder! Your work is truly\nappreciated.\n\nPlease follow this guide to make that PR the best that it can be!\n\n## Guidelines\n\n* Write small commits with concise, descriptive messages.\n* Include tests for any new feature, and regression tests for any bug fix.\n* Write [es2015+ javascript][1].\n* Try to keep the code style consistent. Follow existing patterns.\n* Modify or add to the README if your feature needs instructions on how to use it.\n\n## Development\n\nFork, then clone the repo:\n```\ngit clone https://github.com/your-username/elastic-builder.git\n```\n\nInstall dependencies using npm.\n```\nnpm install\n```\n\n### Write code\n\nTypically, your changes will go in the `src` directory (the `lib` directory\ncontains transpiled babel code) and the `test` directory.\n\nNo need to generate the built files, these will be added when a new version of\nelastic-builder is published to npm.\n\n### Run tests\n\nThis project uses eslint for javascript linting and vitest for testing. Run\nlinting using `npm run lint` and run tests using `npm test`. Or run both using:\n```\nnpm run check\n```\nThis should take care of formatting as well thanks to [eslint-plugin-prettier][3].\n\nYou can also run tests in watch mode during development:\n```\nnpm run test:watch\n```\n\nTo generate coverage reports:\n```\nnpm run test:src\n```\n\nCoverage reports are generated in the `coverage/` directory.\n\n### (Optional) Add yourself as a contributor\n\nThanks for contributing! Go ahead and add yourself to the list of contributors\nin the npm package manifest `package.json`.\n\n### Submit your PR\n\nThis is the last step! Make sure your PR is aimed to merge with the `master`\nbranch.\n\nYou should also write a good PR message with information on why this feature or\nfix is necesary or a good idea. For features, be sure to include information on\nhow to use the feature; and for bugs, information on how to reproduce the bug is\nhelpful!\n\n## Need help?\n\nIf you have any questions about the feature or fix you want to make, or if you\nhave doubts about the approach, or anything else you're not sure about, the best\nway to get in touch is to [open an issue][4]. I am happy to help out.\n\n[1]: https://babeljs.io/docs/learn-es2015/\n[2]: https://nodejs.org/\n[3]: https://github.com/not-an-aardvark/eslint-plugin-prettier\n[4]: https://github.com/sudo-suhas/elastic-builder/issues/new\n"
  },
  {
    "path": "LICENSE",
    "content": "MIT License\n\nCopyright (c) 2017 Suhas Karanth\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "README.md",
    "content": "# elastic-builder\n\n[![npm version][version-badge]][package] [![Build Status][build-badge]][build]\n[![Coverage Status][coverage-badge]][coverage]\n[![semantic-release][semantic-release-badge]][semantic-release]\n\nA Node.js implementation of the [Elasticsearch][elasticsearch] DSL for use with\nthe [official elasticsearch javascript client][es-js-client] with builder\nsyntax.\n\n![elastic-builder](elastic-builder.png)\n\n**Check out the [API reference documentation][api-docs].**\n\nRelevant blog post:\nhttps://blog.logrocket.com/elasticsearch-query-body-builder-node-js/\n\nelastic-builder includes TypeScript definition for superior development\nexperience.\n\n## Node.js Requirements\n\n`elastic-builder` supports Node.js **20.x**, **22.x**, and **24.x** (LTS versions).\n\n## Elasticsearch compatibility\n\n`elastic-builder` was built for 5.x query DSL. However, the library should be\nusable with 2.x as well. For older versions of the DSL, you can try\n[`elastic.js`][elastic-js-fork] or [`bodybuilder`][bodybuilder]\n\n`elastic-builder` is also compatible with elasticsearch 6.0(alpha) for the most\npart. However, there are some [breaking changes][es-6-breaking-changes] which\nhave been called out in the docs(ex: [`Script.file`][api-docs-script-file].\n\nWhat's Included:\n\n- [Request Body Search][es-search-request-body]\n- [Queries][es-query-dsl]\n- [Aggregations][es-search-aggregations]\n- [Suggesters][es-search-suggesters]\n- [Search Template][es-search-template.html]\n\n## Install\n\n```\nnpm install elastic-builder --save\n```\n\n## Usage\n\n```js\nconst esb = require('elastic-builder'); // the builder\n\nconst requestBody = esb.requestBodySearch()\n  .query(esb.matchQuery('message', 'this is a test'));\n\n// OR\n\nconst requestBody = new esb.RequestBodySearch().query(\n  new esb.MatchQuery('message', 'this is a test')\n);\n\nrequestBody.toJSON(); // or print to console - esb.prettyPrint(requestBody)\n{\n  \"query\": {\n    \"match\": {\n      \"message\": \"this is a test\"\n    }\n  }\n}\n```\n\nFor each class, `MyClass`, a utility function `myClass` has been provided which\nconstructs the object for us without the need for `new` keyword.\n\n## REPL\n\nTry it out on the command line using the node REPL:\n\n```\n# Start the repl\nnode ./node_modules/elastic-builder/repl.js\n# The builder is available in the context variable esb\nelastic-builder > esb.prettyPrint(\n...   esb.requestBodySearch()\n...     .query(esb.matchQuery('message', 'this is a test'))\n... );\n{\n  \"query\": {\n    \"match\": {\n      \"message\": \"this is a test\"\n    }\n  }\n}\n```\n\n## Motivation\n\nElasticsearch only provides a low level client for making requests.\n[`elastic.js`][elastic-js] was a relatively popular library for building the\nrequest search body. However, this project is not being maintained nor is the\n[fork][elastic-js-fork]. There were [several changes][es-5-breaking-changes] in\nthe 5.0 release which make the older libraries unusable.\n\nThis library is a port of `elastic.js` to es6 with elasticsearch 5.x\ncompatibility.\n\n## API Reference\n\nAPI reference can be accessed here - https://elastic-builder.js.org/docs. The\ndocs include examples ported from the [official elasticsearch\nreference][es-reference].\n\nAPI documentation was generated using [documentation.js][documentation-js]. It\nis being hosted with help from this awesome project -\nhttps://github.com/js-org/dns.js.org\n\n## Recipes\n\nThe library has a few helper recipes:\n\n- [Missing query][es-missing-query]\n- [Random sort query][es-random-score-query]\n- [Filter query][es-filter-query]\n\n```js\nconst qry = esb.cookMissingQuery('user');\n\nqry.toJSON();\n{\n  \"bool\": {\n    \"must_not\": {\n      \"exists\": { \"field\": \"user\" }\n    }\n  }\n}\n```\n\nCheck out the [reference docs][api-docs-recipes] for more examples.\n\nIf you have any recipes, please do share or better yet, create a [pull\nrequest][create-pull-request] :smile:.\n\n## Changelog\n\n[releases][releases]\n\n## Examples\n\n**Usage with official elasticsearch client:**\n\n```js\n'use strict';\n\nconst elasticsearch = require('elasticsearch');\nconst esb = require('elastic-builder');\n\nconst client = new elasticsearch.Client({\n  host: 'localhost:9200',\n  log: 'trace'\n});\n\nconst requestBody = esb\n  .requestBodySearch()\n  .query(esb.matchQuery('body', 'elasticsearch'));\n\nclient\n  .search({\n    index: 'twitter',\n    type: 'tweets',\n    body: requestBody.toJSON()\n  })\n  .then(resp => {\n    const hits = resp.hits.hits;\n  })\n  .catch(err => {\n    console.trace(err.message);\n  });\n```\n\n```js\n// Bool query\nconst requestBody = esb.requestBodySearch().query(\n  esb.boolQuery()\n    .must(esb.matchQuery('last_name', 'smith'))\n    .filter(esb.rangeQuery('age').gt(30))\n);\nrequestBody.toJSON();\n{\n  \"query\": {\n    \"bool\": {\n      \"must\": {\n        \"match\": { \"last_name\": \"smith\" }\n      },\n      \"filter\": {\n        \"range\": { \"age\": { \"gt\": 30 } }\n      }\n    }\n  }\n}\n\n// Multi Match Query\nconst requestBody = esb.requestBodySearch().query(\n  esb.multiMatchQuery(['title', 'body'], 'Quick brown fox')\n    .type('best_fields')\n    .tieBreaker(0.3)\n    .minimumShouldMatch('30%')\n);\nrequestBody.toJSON();\n{\n  \"multi_match\": {\n    \"query\": \"Quick brown fox\",\n    \"type\": \"best_fields\",\n    \"fields\": [\"title\", \"body\"],\n    \"tie_breaker\": 0.3,\n    \"minimum_should_match\": \"30%\"\n  }\n}\n\n// Aggregation\nconst requestBody = esb.requestBodySearch()\n  .size(0)\n  .agg(esb.termsAggregation('popular_colors', 'color'));\nrequestBody.toJSON();\n{\n  \"size\": 0,\n  \"aggs\": {\n    \"popular_colors\": {\n      \"terms\": { \"field\": \"color\" }\n    }\n  }\n}\n\n// Nested Aggregation\nconst requestBody = esb.requestBodySearch()\n  .size(0)\n  .agg(\n    esb.termsAggregation('colors', 'color')\n      .agg(esb.avgAggregation('avg_price', 'price'))\n      .agg(esb.termsAggregation('make', 'make'))\n  );\nrequestBody.toJSON();\n{\n  \"size\": 0,\n  \"aggs\": {\n    \"colors\": {\n      \"terms\": { \"field\": \"color\" },\n      \"aggs\": {\n        \"avg_price\": {\n          \"avg\": { \"field\": \"price\" }\n        },\n        \"make\": {\n          \"terms\": { \"field\": \"make\" }\n        }\n      }\n    }\n  }\n}\n\n// If you prefer using the `new` keyword\nconst agg = new esb.TermsAggregation('countries', 'artist.country')\n  .order('rock>playback_stats.avg', 'desc')\n  .agg(\n    new esb.FilterAggregation('rock', new esb.TermQuery('genre', 'rock')).agg(\n      new esb.StatsAggregation('playback_stats', 'play_count')\n    )\n  );\nagg.toJSON();\n{\n  \"countries\": {\n    \"terms\": {\n      \"field\": \"artist.country\",\n      \"order\": { \"rock>playback_stats.avg\": \"desc\" }\n    },\n    \"aggs\": {\n      \"rock\": {\n        \"filter\": {\n          \"term\": { \"genre\": \"rock\" }\n        },\n        \"aggs\": {\n          \"playback_stats\": {\n            \"stats\": { \"field\": \"play_count\" }\n          }\n        }\n      }\n    }\n  }\n}\n\n// Sort\nconst requestBody = esb.requestBodySearch()\n  .query(esb.boolQuery().filter(esb.termQuery('message', 'test')))\n  .sort(esb.sort('timestamp', 'desc'))\n  .sorts([\n    esb.sort('channel', 'desc'),\n    esb.sort('categories', 'desc'),\n    // The order defaults to desc when sorting on the _score,\n    // and defaults to asc when sorting on anything else.\n    esb.sort('content'),\n    esb.sort('price').order('desc').mode('avg')\n  ]);\nrequestBody.toJSON();\n{\n  \"query\": {\n    \"bool\": {\n      \"filter\": {\n        \"term\": { \"message\": \"test\" }\n      }\n    }\n  },\n  \"sort\": [\n    { \"timestamp\": { \"order\": \"desc\" } },\n    { \"channel\": { \"order\": \"desc\" } },\n    { \"categories\": { \"order\": \"desc\" } },\n    \"content\",\n    { \"price\": { \"order\": \"desc\", \"mode\": \"avg\" } }\n  ]\n}\n\n// From / size\nconst requestBody = esb.requestBodySearch()\n  .query(esb.matchAllQuery())\n  .size(5)\n  .from(10);\nrequestBody.toJSON();\n{\n  \"query\": { \"match_all\": {} },\n  \"size\": 5,\n  \"from\": 10\n}\n```\n\nFor more examples, check out the [reference docs][api-docs].\n\n## Validation\n\n`elastic-builder` provides lightweight validation where ever possible:\n\n```\n$ node ./node_modules/elastic-builder/repl.js\nelastic-builder > esb.multiMatchQuery().field('title').field('body').query('Quick brown fox').type('bwst_fields')\nSee https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-multi-match-query.html\nGot 'type' - bwst_fields\nError: The 'type' parameter should belong to Set {\n  'best_fields',\n  'most_fields',\n  'cross_fields',\n  'phrase',\n  'phrase_prefix' }\n    at MultiMatchQuery.type (E:\\Projects\\repos\\elastic-builder\\lib\\queries\\full-text-queries\\multi-match-query.js:134:23)\n    at repl:1:77\n    at ContextifyScript.Script.runInContext (vm.js:35:29)\n    at REPLServer.defaultEval (repl.js:342:29)\n    at bound (domain.js:280:14)\n    at REPLServer.runBound [as eval] (domain.js:293:12)\n    at REPLServer.<anonymous> (repl.js:538:10)\n    at emitOne (events.js:96:13)\n    at REPLServer.emit (events.js:188:7)\n    at REPLServer.Interface._onLine (readline.js:239:10)\n```\n\n## Tests\n\nRun unit tests:\n\n```\nnpm test\n```\n\nRun tests in watch mode:\n\n```\nnpm run test:watch\n```\n\nRun tests with coverage:\n\n```\nnpm run test:src\n```\n\nCoverage reports are generated in the `coverage/` directory.\n\n## Credits\n\n`elastic-builder` is heavily inspired by [`elastic.js`][elastic-js] and the\n[fork][elastic-js-fork] by Erwan Pigneul.\n\n[`bodybuilder`][bodybuilder] for documentation style, build setup, demo page.\n\n## License\n\nMIT © [Suhas Karanth][sudo-suhas]\n\n[version-badge]: https://badge.fury.io/js/elastic-builder.svg\n[package]: https://www.npmjs.com/package/elastic-builder\n[build-badge]:\n  https://github.com/sudo-suhas/elastic-builder/actions/workflows/build.yml/badge.svg\n[build]: https://github.com/sudo-suhas/elastic-builder/actions/workflows/build.yml\n[coverage-badge]:\n  https://coveralls.io/repos/github/sudo-suhas/elastic-builder/badge.svg?branch=master\n[coverage]: https://coveralls.io/github/sudo-suhas/elastic-builder?branch=master\n[semantic-release-badge]:\n  https://img.shields.io/badge/%20%20%F0%9F%93%A6%F0%9F%9A%80-semantic--release-e10079.svg\n[semantic-release]: https://github.com/semantic-release/semantic-release\n[elasticsearch]: https://www.elasticsearch.org/\n[es-js-client]:\n  https://www.elasticsearch.org/guide/en/elasticsearch/client/javascript-api/current/index.html\n[es-reference]:\n  https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html\n[es-5-breaking-changes]:\n  https://www.elastic.co/guide/en/elasticsearch/reference/current/breaking_50_search_changes.html\n[es-6-breaking-changes]:\n  https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking_60_search_changes.html\n[es-search-request-body]:\n  https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-body.html\n[es-query-dsl]:\n  https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html\n[es-missing-query]:\n  https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-exists-query.html#_literal_missing_literal_query\n[es-random-score-query]:\n  https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html#function-random\n[es-filter-query]:\n  https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-bool-query.html\n[es-search-aggregations]:\n  https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations.html\n[es-search-suggesters]:\n  https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters.html\n[es-search-template.html]:\n  https://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html\n[api-docs]: https://elastic-builder.js.org/docs\n[api-docs-recipes]: https://elastic-builder.js.org/docs#recipes\n[api-docs-script-file]: https://elastic-builder.js.org/docs/#scriptfile\n[releases]: https://github.com/sudo-suhas/elastic-builder/releases\n[elastic-js]: https://github.com/fullscale/elastic.js\n[elastic-js-fork]: https://github.com/ErwanPigneul/elastic.js\n[bodybuilder]: https://github.com/danpaz/bodybuilder\n[documentation-js]: https://github.com/documentationjs/documentation\n[create-pull-request]:\n  https://help.github.com/articles/creating-a-pull-request-from-a-fork/\n[sudo-suhas]: https://github.com/sudo-suhas\n"
  },
  {
    "path": "deploy-docs.sh",
    "content": "#!/bin/bash -xe\n# See http://tldp.org/LDP/abs/html/options.html\n# -x -> Print each command to stdout before executing it, expand commands\n# -e -> Abort script at first error, when a command exits with non-zero status\n#   (except in until or while loops, if-tests, list constructs)\n\nif ! hash gh-pages 2> /dev/null; then\n    npm i -g gh-pages\nfi\n\ngh-pages --add \\\n    --dist . \\\n    --src \"{browser/*,docs/*}\" \\\n    --repo \"https://git:${GITHUB_TOKEN}@github.com/sudo-suhas/elastic-builder.git\" \\\n    --message \"docs: Build docs for $(npm run -s print-version)\" \\\n    --user \"github-actions-bot <support+actions@github.com>\"\n"
  },
  {
    "path": "docs/documentation.yml",
    "content": "toc:\n  - name: elastic-builder\n    file: intro.md\n  - RequestBodySearch\n  - name: Queries\n    description: |\n      These classes allow creation and manipulation of objects which map to\n      elasticsearch DSL for queries.\n  - Query\n  - MatchAllQuery\n  - MatchNoneQuery\n  - name: Full Text Queries\n  - FullTextQueryBase\n  - MonoFieldQueryBase\n  - MatchQuery\n  - MatchPhraseQueryBase\n  - MatchPhraseQuery\n  - MatchPhrasePrefixQuery\n  - MultiMatchQuery\n  - CommonTermsQuery\n  - QueryStringQueryBase\n  - QueryStringQuery\n  - SimpleQueryStringQuery\n  - name: Term Level Queries\n  - ValueTermQueryBase\n  - TermQuery\n  - TermsQuery\n  - TermsSetQuery\n  - MultiTermQueryBase\n  - RangeQuery\n  - ExistsQuery\n  - PrefixQuery\n  - WildcardQuery\n  - RegexpQuery\n  - FuzzyQuery\n  - TypeQuery\n  - IdsQuery\n  - name: Compound Queries\n  - ConstantScoreQuery\n  - BoolQuery\n  - DisMaxQuery\n  - FunctionScoreQuery\n  - BoostingQuery\n  - name: Joining Queries\n  - JoiningQueryBase\n  - NestedQuery\n  - HasChildQuery\n  - HasParentQuery\n  - ParentIdQuery\n  - name: Geo Queries\n  - GeoQueryBase\n  - GeoShapeQuery\n  - GeoBoundingBoxQuery\n  - GeoDistanceQuery\n  - GeoPolygonQuery\n  - name: Specialized Queries\n  - MoreLikeThisQuery\n  - ScriptQuery\n  - PercolateQuery\n  - name: Span Queries\n  - SpanQueryBase\n  - SpanTermQuery\n  - SpanMultiTermQuery\n  - SpanFirstQuery\n  - SpanNearQuery\n  - SpanOrQuery\n  - SpanNotQuery\n  - SpanLittleBigQueryBase\n  - SpanContainingQuery\n  - SpanWithinQuery\n  - SpanFieldMaskingQuery\n  - name: Aggregations\n    description: |\n      These classes can be used to leverage the aggregations framework which helps provide\n      aggregated data based on a search query. They can be composed together with queries\n      and other aggregations in order to build complex summaries of the data.\n  - Aggregation\n  - name: Metrics Aggregations\n  - MetricsAggregationBase\n  - AvgAggregation\n  - CardinalityAggregation\n  - ExtendedStatsAggregation\n  - GeoBoundsAggregation\n  - GeoCentroidAggregation\n  - MaxAggregation\n  - MinAggregation\n  - PercentilesAggregation\n  - PercentileRanksAggregation\n  - ScriptedMetricAggregation\n  - StatsAggregation\n  - SumAggregation\n  - TopHitsAggregation\n  - ValueCountAggregation\n  - name: Bucket Aggregations\n  - BucketAggregationBase\n  - AdjacencyMatrixAggregation\n  - AutoDateHistogramAggregation\n  - ChildrenAggregation\n  - CompositeAggregation\n  - name: Values Source\n    description: |\n      The sources parameter controls the sources that should be used to build\n      the composite buckets. There are three different types of values source:\n        - [`Terms`](#termsvaluessource)\n        - [`Histogram`](#histogramvaluessource)\n        - [`Date Histogram`](#datehistogramvaluessource)\n  - ValuesSourceBase\n  - TermsValuesSource\n  - HistogramValuesSource\n  - DateHistogramValuesSource\n  - HistogramAggregationBase\n  - DateHistogramAggregation\n  - RangeAggregationBase\n  - DateRangeAggregation\n  - DiversifiedSamplerAggregation\n  - FilterAggregation\n  - FiltersAggregation\n  - GeoDistanceAggregation\n  - GeoHashGridAggregation\n  - GlobalAggregation\n  - HistogramAggregation\n  - IpRangeAggregation\n  - MissingAggregation\n  - NestedAggregation\n  - ParentAggregation\n  - RangeAggregation\n  - RareTermsAggregation\n  - ReverseNestedAggregation\n  - SamplerAggregation\n  - TermsAggregationBase\n  - SignificantAggregationBase\n  - SignificantTermsAggregation\n  - SignificantTextAggregation\n  - TermsAggregation\n  - name: Pipeline Aggregations\n  - PipelineAggregationBase\n  - AvgBucketAggregation\n  - DerivativeAggregation\n  - MaxBucketAggregation\n  - MinBucketAggregation\n  - SumBucketAggregation\n  - StatsBucketAggregation\n  - ExtendedStatsBucketAggregation\n  - PercentilesBucketAggregation\n  - MovingAverageAggregation\n  - MovingFunctionAggregation\n  - CumulativeSumAggregation\n  - BucketScriptAggregation\n  - BucketSelectorAggregation\n  - SerialDifferencingAggregation\n  - BucketSortAggregation\n  - name: Matrix Aggregations\n  - MatrixStatsAggregation\n  - name: Score Functions\n  - ScoreFunction\n  - ScriptScoreFunction\n  - WeightScoreFunction\n  - RandomScoreFunction\n  - FieldValueFactorFunction\n  - DecayScoreFunction\n  - name: Suggesters\n  - Suggester\n  - AnalyzedSuggesterBase\n  - TermSuggester\n  - DirectGenerator\n  - PhraseSuggester\n  - CompletionSuggester\n  - name: Miscellaneous\n  - Highlight\n  - Script\n  - GeoPoint\n  - GeoShape\n  - IndexedShape\n  - Sort\n  - Rescore\n  - InnerHits\n  - SearchTemplate\n"
  },
  {
    "path": "docs/intro.md",
    "content": "https://github.com/sudo-suhas/elastic-builder\n\n`elastic-builder` is a library for easily building elasticsearch request body for search.\nIt implements the builder syntax for building complex queries combining queries and aggregations.\n\nWhat's Included:\n  * [Request Body Search](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-body.html)\n  * [Queries](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html)\n  * [Aggregations](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations.html)\n  * [Suggesters](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters.html)\n  * [Search Template](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html)\n\nThe complete library documentation is present here.\n\nThere are two ways to use the classes for constructing queries:\n\n```js\n// Import the library\nconst esb = require('elastic-builder'); // the builder\n\n// Use `new` keyword for constructor instances of class\nconst requestBody = new esb.RequestBodySearch()\n    .query(new esb.MatchQuery('message', 'this is a test'));\n\n// Or use helper methods which construct the object without need for the `new` keyword\nconst requestBody = esb.requestBodySearch()\n    .query(esb.matchQuery('message', 'this is a test'));\n\n// Build the request body\nrequestBody.toJSON()\n{\n   \"query\": {\n     \"match\": {\n       \"message\": \"this is a test\"\n     }\n   }\n }\n```\n\n**Demo** - https://elastic-builder.js.org/\n\n**ProTip:** The source is transpiled using babel for compatibility with older versions of node and used by default.\nBut this is not required in node env 6 and above. You can directly use the `src` files:\n\n```js\nconst esb = require('elastic-builder/src');\n```\n\nThis module is heavily influenced by [elastic.js](https://github.com/fullscale/elastic.js)(not maintained anymore).\n"
  },
  {
    "path": "docs/publish.md",
    "content": "# Publish to npm\n\nThis is primarily a reference for how to publish a new version of this package\nto npm. Currently, @sudo-suhas is the only one doing this.\n\n1. Ensure master branch is up to date `git checkout master && git pull && git status`.\n2. Ensure logged in as the expected npm user `npm whoami`.\n4. Bump the npm version `npm version major|minor|patch`. This will also:\n    - Run ~tests~ and style checks locally.\n    - Generate built files.\n    - Commit built files to master branch.\n    - Generate a tagged version commit.\n    - Push to GitHub and ~trigger a Travis build~.\n    - ~Travis CI should automatically publish to npm~.\n"
  },
  {
    "path": "jsconfig.json",
    "content": "{\n    // See https://go.microsoft.com/fwlink/?LinkId=759670\n    // for the documentation about the jsconfig.json format\n    \"compilerOptions\": {\n        \"target\": \"es6\",\n        \"module\": \"commonjs\",\n        \"allowSyntheticDefaultImports\": true,\n        \"sourceMap\": true\n    },\n    \"exclude\": [\n        \"node_modules\",\n        \"bower_components\",\n        \"jspm_packages\",\n        \"tmp\",\n        \"temp\"\n    ]\n}\n"
  },
  {
    "path": "package.json",
    "content": "{\n  \"name\": \"elastic-builder\",\n  \"version\": \"0.0.0-development\",\n  \"description\": \"A JavaScript implementation of the elasticsearch Query DSL\",\n  \"main\": \"src/index.js\",\n  \"files\": [\n    \"src/\",\n    \"repl.js\",\n    \"index.d.ts\"\n  ],\n  \"types\": \"./src/index.d.ts\",\n  \"scripts\": {\n    \"build:umd\": \"webpack ./src/index.js --output-path browser --output-filename elastic-builder.min.js --mode production\",\n    \"build:docs\": \"documentation build src/index.js --github -o docs -f html -c ./docs/documentation.yml\",\n    \"build\": \"npm run build:umd && npm run build:docs\",\n    \"lint\": \"eslint src test\",\n    \"lint:fix\": \"npm run lint -- --fix\",\n    \"precommit\": \"lint-staged\",\n    \"test:src\": \"vitest run --coverage\",\n    \"test:typedef\": \"tsc --noEmit ./test/typedef.test.ts\",\n    \"test\": \"npm run test:src && npm run test:typedef\",\n    \"test:watch\": \"vitest\",\n    \"check\": \"npm run lint && npm test\",\n    \"print-version\": \"cross-env-shell echo v$npm_package_version\",\n    \"prepublishOnly\": \"npm run -s build\",\n    \"postpublish\": \"bash deploy-docs.sh\"\n  },\n  \"license\": \"MIT\",\n  \"engines\": {\n    \"node\": \">=20.0.0\"\n  },\n  \"devDependencies\": {\n    \"@babel/cli\": \"^7.25.9\",\n    \"@babel/core\": \"^7.25.9\",\n    \"@babel/eslint-parser\": \"^7.25.9\",\n    \"@babel/plugin-transform-runtime\": \"^7.25.9\",\n    \"@babel/preset-env\": \"^7.25.9\",\n    \"@vitest/coverage-v8\": \"^2.1.8\",\n    \"babel-loader\": \"^10.0.0\",\n    \"cross-env\": \"^7.0.3\",\n    \"documentation\": \"^8.0.0\",\n    \"eslint\": \"^8.57.1\",\n    \"eslint-config-prettier\": \"^9.1.0\",\n    \"eslint-plugin-prettier\": \"^5.2.1\",\n    \"eslint-plugin-vitest\": \"^0.5.4\",\n    \"husky\": \"^0.14.3\",\n    \"lint-staged\": \"^7.2.0\",\n    \"lodash\": \"^4.17.21\",\n    \"prettier\": \"^3.4.2\",\n    \"sinon\": \"^17.0.2\",\n    \"terser-webpack-plugin\": \"^5.3.14\",\n    \"typescript\": \"^5.9.2\",\n    \"vitest\": \"^2.1.8\",\n    \"webpack\": \"^5.101.2\",\n    \"webpack-cli\": \"^6.0.1\"\n  },\n  \"lint-staged\": {\n    \"src/**/*.js\": [\n      \"eslint --fix\",\n      \"git add\"\n    ],\n    \"test/**/*.js\": [\n      \"eslint --fix\",\n      \"git add\"\n    ]\n  },\n  \"homepage\": \"https://elastic-builder.js.org\",\n  \"repository\": {\n    \"type\": \"git\",\n    \"url\": \"https://github.com/sudo-suhas/elastic-builder.git\"\n  },\n  \"bugs\": {\n    \"url\": \"https://github.com/sudo-suhas/elastic-builder/issues\"\n  },\n  \"keywords\": [\n    \"elasticsearch\",\n    \"elastic-builder\",\n    \"querying\",\n    \"queries\",\n    \"query\",\n    \"elastic\",\n    \"search\",\n    \"elasticjs\",\n    \"ejs\",\n    \"dsl\"\n  ],\n  \"author\": \"Suhas Karanth <sudo.suhas@gmail.com>\",\n  \"contributors\": [\n    \"austin ce <austin.cawley@gmail.com>\",\n    \"Julien Maitrehenry <julien.maitrehenry@me.com>\",\n    \"ochan12 <mateochando@gmail.com>\",\n    \"kennylindahl <haxxblaster@gmail.com>\",\n    \"foxstarius <aj.franzon@gmail.com>\",\n    \"sandeep952 <sandy335582@gmail.com>\",\n    \"florian-lackner365 <florian.lackner@365talents.com>\",\n    \"Alejandro Marulanda <alejokf@gmail.com>\"\n  ]\n}\n"
  },
  {
    "path": "repl.js",
    "content": "'use strict';\n\nconst repl = require('repl');\n\nconst esb = require('./lib');\n\nrepl.start('elastic-builder > ').context.esb = esb;\n"
  },
  {
    "path": "roadmap.md",
    "content": "# Roadmap\n\n - [x] Setup github pages, add CNAME\n - [x] Improve README\n     + [x] Usage\n     + [x] Examples\n     + [x] Credits\n - [x] Host documentation, demo using js.org\n - [x] Tests\n - [x] travis setup with npm publish\n - [x] Elasticsearch docs pull request\n - [x] Documentation examples\n - [x] Add an `index.d.ts` file for better intellisense - http://www.typescriptlang.org/docs/handbook/declaration-files/introduction.html, https://github.com/Microsoft/TypeScript/issues/8335\n - [x] Add Adjacency matrix Aggregation to Bucket Aggregations\n - [x] Suggesters\n - [ ] Use ES6 modules\n"
  },
  {
    "path": "src/README.md",
    "content": "# Source Guide\nAll good open source repositories have an excellent README guide or a website which\nexplains how to _use_ it. But most of them don't have any kind of guide for the source itself.\nAlthough this is less likely to be useful for the average user, it can be quite useful for\nanyone who is looking to contribute or perhaps wants to debug a weird behavior.\n\nYou absolutely don't _need_ to read this. But you could. If you wanted to.\n\n## Project structure overview\nAll the source files written in ES6 are present in the `src` folder.\n\n```js\n// package.json snippet\n{\n  // ...\n  // Files to be picked up by npm\n  \"files\": [\n    \"browser/\",\n    \"src/\",\n    \"repl.js\",\n    \"index.d.ts\"\n  ],\n  // ...\n}\n```\n\nThese files are then packed into a minified `umd` module\nfor use in the browser and kept in the `browser` folder.\nThis is also used by the demo hosted on https://elastic-builder.js.org\n\nThe code in `src` folder doesn't use es6 imports. So it can be directly used without the transpiled code.\nEarlier, the transpiled code used to jumble the documentation so there was an advantage to using\nfrom `elastic-builder/src`. But now I have added the type definition so that advantage no longer applies.\n\nStarting from the base folder `src`, `index.js` simply pulls in all the concrete classes and re-exports\nthem under the same object so that you can access all queries and aggregations. It also adds\nhelper methods so that the class, for example `MatchQuery` can be instantiated with `esb.matchQuery()`\nthereby avoiding the `new` keyword. This means there are 2 ways of doing the same thing. While this is not ideal,\nthe reason for doing so because the documentation, type definitions declare classes and\nit might be confusing to access them using only functions. I am open to rethinking this approach.\n`recipes.js` has a few helper methods for augmenting the base queries and aggregations.\nFor example, although elasticsearch no longer supports the `missing` query, there is a helper recipe for that.\nI am also considering adding a `bodybuilder` style bool query builder to the recipes.\n\nThe library uses inheritance and es6 class syntax quite heavily. The base classes for\nrequest body search, queries, aggregations and other misc classes are present in `core`.\nThe queries, aggregations are organised to mirror the elasticsearch reference guide.\nWhereever there is common behavior for multiple classes, the methods are moved to a base class.\nStrictly speaking, all the base classes are instantiable.\nBut this is less likely to be useful and hence are not exported.\nYou could very easily override or extend any behavior for these classes.\nSome classes have a relatively long inheritance chain.\n\n```\nQuery -> FullTextQueryBase -> MonoFieldQueryBase -> MatchPhraseQueryBase -> MatchPhraseQuery\n```\nThis can get a little confusing but I am hoping that the type definition solves\nmost of the woes.\n\nTests are written using `ava` and each file is independent. I am using macros heavily.\nProbably harder to understand. But very good for reusing test code.\nTo be able to test with different node versions,\nwe need to use `babel` again. However, the `lib` files do not have source maps(not useful in prod) so cannot be used\nto measure coverage. Earlier, I setup `ava` to use `babel-register` but with the sheer number of files,\nthis performed very poorly. So as suggested in a `ava` recipe, I setup pre-compilation step to transpile\ntests to `_build` folder(not tracked in git).\n\nYou can pretty much ignore the `assets` folder.\n\n## Release\nI am using `release-it` for making releases. One of the key factors for doing this is to\nauto generate release notes. I also looked into conventional changelog and am trying to follow that\nfor commits. On starting the release, it runs tests, builds `lib`, generates documentation(`docs` folder)\nand creates the github release. I manually wait for all tests in travis to pass before running `npm publish`.\nThere are other options like semantic release which can take care of the entire workflow but\nprovide less flexibility. I am not yet completely comfortable with git branches and am using the `master`\nbranch itself for pushing commits. So I don't _always_ want to release when I push to master.\n\n## Dependencies\nFor the project prod dependencies, I am using the modularised `lodash` libraries\nalong with `babel-runtime`. The reason for doing this, although quite unlikely,\nis because if someone does use it, they shouldn't have to load 100 kb worth of `lodash`.\nThere are plugins available for only exporting what you use but the end user may not be aware of that.\n\n"
  },
  {
    "path": "src/_/index.js",
    "content": "'use strict';\n\n/**\n * Checks if value is null or undefined.\n *\n * @param {*} value The value to check.\n * @returns {boolean} Returns true if value is null or undefined, else false.\n */\nexports.isNil = function isNil(value) {\n    return value === null || value === undefined;\n};\n\n/**\n * Checks if value is a string.\n *\n * @param {*} value The value to check.\n * @returns {boolean} Returns true if value is a string, else false.\n */\nexports.isString = function isString(value) {\n    return typeof value === 'string';\n};\n\n/**\n * Checks if value is an object type (not null, not an array).\n *\n * @param {*} value The value to check.\n * @returns {boolean} Returns true if value is an object, else false.\n */\nexports.isObject = function isObject(value) {\n    return value !== null && typeof value === 'object' && !Array.isArray(value);\n};\n\n/**\n * Checks if object has a direct property (own property).\n *\n * @param {Object} object The object to check.\n * @param {string} key The property name to check.\n * @returns {boolean} Returns true if object has the property, else false.\n */\nexports.has = function has(object, key) {\n    return object != null && Object.prototype.hasOwnProperty.call(object, key);\n};\n\n/**\n * Checks if object has a property (including inherited properties).\n *\n * @param {Object} object The object to check.\n * @param {string} key The property name to check.\n * @returns {boolean} Returns true if object has the property, else false.\n */\nexports.hasIn = function hasIn(object, key) {\n    return object != null && key in object;\n};\n\n/**\n * Creates a new object excluding specified keys.\n *\n * @param {Object} object The source object.\n * @param {Array<string>} keys The keys to exclude.\n * @returns {Object} Returns the new object with specified keys omitted.\n */\nexports.omit = function omit(object, keys) {\n    if (object == null) return {};\n    const result = {};\n    const keysToOmit = new Set(keys);\n    for (const key in object) {\n        if (\n            Object.prototype.hasOwnProperty.call(object, key) &&\n            !keysToOmit.has(key)\n        ) {\n            result[key] = object[key];\n        }\n    }\n    return result;\n};\n\n/**\n * Checks if value is empty (null, undefined, empty string, empty array, or empty object).\n *\n * @param {*} value The value to check.\n * @returns {boolean} Returns true if value is empty, else false.\n */\nexports.isEmpty = function isEmpty(value) {\n    if (value == null) return true;\n    if (typeof value === 'string' || Array.isArray(value)) {\n        return value.length === 0;\n    }\n    if (typeof value === 'object') {\n        return Object.keys(value).length === 0;\n    }\n    return false;\n};\n\n/**\n * Gets the first element of an array.\n *\n * @param {Array} array The array to query.\n * @returns {*} Returns the first element of array.\n */\nexports.head = function head(array) {\n    return array != null ? array[0] : undefined;\n};\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/adjacency-matrix-aggregation.js",
    "content": "'use strict';\n\nconst {\n    Query,\n    util: { checkType, setDefault }\n} = require('../../core');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-adjacency-matrix-aggregation.html';\n\n/**\n * A bucket aggregation returning a form of adjacency matrix.\n * The request provides a collection of named filter expressions,\n * similar to the `filters` aggregation request. Each bucket in the response\n * represents a non-empty cell in the matrix of intersecting filters.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-adjacency-matrix-aggregation.html)\n *\n * @example\n * const agg = esb.adjacencyMatrixAggregation('interactions').filters({\n *     grpA: esb.termsQuery('accounts', ['hillary', 'sidney']),\n *     grpB: esb.termsQuery('accounts', ['donald', 'mitt']),\n *     grpC: esb.termsQuery('accounts', ['vladimir', 'nigel'])\n * });\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n *\n * @extends BucketAggregationBase\n */\nclass AdjacencyMatrixAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name) {\n        super(name, 'adjacency_matrix');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on AdjacencyMatrixAggregation\n     */\n    field() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('field is not supported in AdjacencyMatrixAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on AdjacencyMatrixAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error(\n            'script is not supported in AdjacencyMatrixAggregation'\n        );\n    }\n\n    /**\n     * Sets a named filter query.\n     *\n     * @param {string} filterName Name for the filter.\n     * @param {Query} filterQuery Query to filter on. Example - term query.\n     * @returns {AdjacencyMatrixAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If `filterQuery` is not an instance of `Query`\n     */\n    filter(filterName, filterQuery) {\n        checkType(filterQuery, Query);\n\n        setDefault(this._aggsDef, 'filters', {});\n\n        this._aggsDef.filters[filterName] = filterQuery;\n        return this;\n    }\n\n    /**\n     * Assigns filters to already added filters.\n     * Does not mix with anonymous filters.\n     * If anonymous filters are present, they will be overwritten.\n     *\n     * @param {Object} filterQueries Object with multiple key value pairs\n     * where filter name is the key and filter query is the value.\n     * @returns {AdjacencyMatrixAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If `filterQueries` is not an instance of object\n     */\n    filters(filterQueries) {\n        checkType(filterQueries, Object);\n\n        setDefault(this._aggsDef, 'filters', {});\n\n        Object.assign(this._aggsDef.filters, filterQueries);\n        return this;\n    }\n\n    /**\n     * Sets the `separator` parameter to use a separator string other than\n     * the default of the ampersand.\n     *\n     * @param {string} sep the string used to separate keys in intersections buckets\n     * e.g. & character for keyed filters A and B would return an\n     * intersection bucket named A&B\n     * @returns {AdjacencyMatrixAggregation} returns `this` so that calls can be chained\n     */\n    separator(sep) {\n        this._aggsDef.separator = sep;\n        return this;\n    }\n}\n\nmodule.exports = AdjacencyMatrixAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/auto-date-histogram-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\n/**\n * A multi-bucket aggregation similar to the Date histogram aggregation except instead of\n * providing an interval to use as the width of each bucket, a target number of buckets\n * is provided indicating the number of buckets needed and the interval of the buckets\n * is automatically chosen to best achieve that target. The number of buckets returned\n * will always be less than or equal to this target number.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-autodatehistogram-aggregation.html)\n *\n * @example\n * const agg = esb.autoDateHistogramAggregation('sales_over_time', 'date', 15);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string} field The field to aggregate on\n * @param {number} buckets Bucket count to generate histogram over.\n *\n * @extends BucketAggregationBase\n */\nclass AutoDateHistogramAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field, buckets) {\n        super(name, 'auto_date_histogram', field);\n        if (!_.isNil(buckets)) this._aggsDef.buckets = buckets;\n    }\n\n    /**\n     * Sets the histogram bucket count. Buckets are generated based on this value.\n     *\n     * @param {number} buckets Bucket count to generate histogram over.\n     * @returns {AutoDateHistogramAggregation} returns `this` so that calls can be chained\n     */\n    buckets(buckets) {\n        this._aggsDef.buckets = buckets;\n        return this;\n    }\n\n    /**\n     * The minimum_interval allows the caller to specify the minimum rounding interval that\n     * should be used. This can make the collection process more efficient, as the\n     * aggregation will not attempt to round at any interval lower than minimum_interval.\n     *\n     * Accepted units: year, month, day, hour, minute, second\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-autodatehistogram-aggregation.html#_minimum_interval_parameter)\n     *\n     * @example\n     * const agg = esb.autoDateHistogramAggregation(\n     *     'sales_over_time',\n     *     'date',\n     *     5\n     * ).minimumInterval('minute');\n     *\n     * @param {string} interval Minimum Rounding Interval Example: 'minute'\n     * @returns {AutoDateHistogramAggregation} returns `this` so that calls can be chained\n     */\n    minimumInterval(interval) {\n        this._aggsDef.minimum_interval = interval;\n        return this;\n    }\n\n    /**\n     * Sets the format expression for `key_as_string` in response buckets.\n     * If no format is specified, then it will use the first format specified in the field mapping.\n     *\n     * @example\n     * const agg = esb.autoDateHistogramAggregation(\n     *     'sales_over_time',\n     *     'date',\n     *     5\n     * ).format('yyyy-MM-dd');\n     *\n     * @param {string} fmt Format mask to apply on aggregation response. Example: ####.00.\n     * For Date Histograms, supports expressive [date format pattern](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html#date-format-pattern)\n     * @returns {AutoDateHistogramAggregation} returns `this` so that calls can be chained\n     */\n    format(fmt) {\n        this._aggsDef.format = fmt;\n        return this;\n    }\n\n    /**\n     * Sets the missing parameter which defines how documents\n     * that are missing a value should be treated.\n     *\n     * @example\n     * const agg = esb.autoDateHistogramAggregation('quantity', 'quantity', 10).missing(0);\n     *\n     * @param {string} value\n     * @returns {AutoDateHistogramAggregation} returns `this` so that calls can be chained\n     */\n    missing(value) {\n        this._aggsDef.missing = value;\n        return this;\n    }\n\n    /**\n     * Date-times are stored in Elasticsearch in UTC.\n     * By default, all bucketing and rounding is also done in UTC.\n     * The `time_zone` parameter can be used to indicate that bucketing should use a different time zone.\n     * Sets the date time zone\n     *\n     * @example\n     * const agg = esb.autoDateHistogramAggregation('by_day', 'date', 15).timeZone(\n     *     '-01:00'\n     * );\n     *\n     * @param {string} tz Time zone. Time zones may either be specified\n     * as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as a timezone id,\n     * an identifier used in the TZ database like America/Los_Angeles.\n     * @returns {AutoDateHistogramAggregation} returns `this` so that calls can be chained\n     */\n    timeZone(tz) {\n        this._aggsDef.time_zone = tz;\n        return this;\n    }\n}\n\nmodule.exports = AutoDateHistogramAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/bucket-aggregation-base.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Aggregation,\n    Script,\n    util: { checkType }\n} = require('../../core');\n\n/**\n * The `BucketAggregationBase` provides support for common options used across\n * various bucket `Aggregation` implementations.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} name a valid aggregation name\n * @param {string} aggType type of aggregation\n * @param {string=} field The field to aggregate on\n *\n * @extends Aggregation\n */\nclass BucketAggregationBase extends Aggregation {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, aggType, field) {\n        super(name, aggType);\n\n        if (!_.isNil(field)) this._aggsDef.field = field;\n    }\n\n    /**\n     * Sets field to run aggregation on.\n     *\n     * @param {string} field a valid field name\n     * @returns {BucketAggregationBase} returns `this` so that calls can be chained\n     */\n    field(field) {\n        this._aggsDef.field = field;\n        return this;\n    }\n\n    /**\n     * Sets script parameter for aggregation.\n     *\n     * @example\n     * // Generating the terms using a script\n     * const agg = esb.termsAggregation('genres').script(\n     *     esb.script('file', 'my_script').params({ field: 'genre' })\n     * );\n     *\n     * @example\n     * // Value script\n     * const agg = esb.termsAggregation('genres', 'genre').script(\n     *     esb.script('inline', \"'Genre: ' +_value\").lang('painless')\n     * );\n     *\n     * @param {Script} script\n     * @returns {BucketAggregationBase} returns `this` so that calls can be chained\n     * @throws {TypeError} If `script` is not an instance of `Script`\n     */\n    script(script) {\n        checkType(script, Script);\n        this._aggsDef.script = script;\n        return this;\n    }\n}\n\nmodule.exports = BucketAggregationBase;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/children-aggregation.js",
    "content": "'use strict';\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-children-aggregation.html';\n\n/**\n * A special single bucket aggregation that enables aggregating\n * from buckets on parent document types to buckets on child documents.\n *\n * This aggregation relies on the `_parent` field in the mapping.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-children-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.termsAggregation('top-tags', 'tags.keyword')\n *             .size(10)\n *             .agg(\n *                 esb.childrenAggregation('to-answers')\n *                     .type('answer')\n *                     .agg(\n *                         esb.termsAggregation(\n *                             'top-names',\n *                             'owner.display_name.keyword'\n *                         ).size(10)\n *                     )\n *             )\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n *\n * @extends BucketAggregationBase\n */\nclass ChildrenAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name) {\n        super(name, 'children');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on ChildrenAggregation\n     */\n    field() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('field is not supported in ChildrenAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on ChildrenAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in ChildrenAggregation');\n    }\n\n    /**\n     * Sets the child type/mapping for aggregation.\n     *\n     * @param {string} type The child type that the buckets in the parent space should be mapped to.\n     * @returns {ChildrenAggregation} returns `this` so that calls can be chained\n     */\n    type(type) {\n        this._aggsDef.type = type;\n        return this;\n    }\n}\n\nmodule.exports = ChildrenAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/composite-agg-values-sources/date-histogram-values-source.js",
    "content": "'use strict';\n\nconst _ = require('../../../_');\n\nconst ValuesSourceBase = require('./values-source-base');\n\nconst REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-composite-aggregation.html#_date_histogram';\n\n/**\n * `DateHistogramValuesSource` is a source for the `CompositeAggregation` that\n * handles date histograms. It works very similar to a histogram aggregation\n * with a slightly different syntax.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-composite-aggregation.html#_date_histogram)\n *\n * @example\n * const valueSrc = esb.CompositeAggregation.dateHistogramValuesSource(\n *   'date', // name\n *   'timestamp', // field\n *   '1d' // interval\n * );\n *\n * @param {string} name\n * @param {string=} field The field to aggregate on\n * @param {string|number=} interval Interval to generate histogram over.\n *\n * @extends ValuesSourceBase\n */\nclass DateHistogramValuesSource extends ValuesSourceBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field, interval) {\n        super('date_histogram', REF_URL, name, field);\n\n        if (!_.isNil(interval)) this._opts.interval = interval;\n    }\n\n    /**\n     * Sets the histogram interval. Buckets are generated based on this interval value.\n     *\n     * @param {string|number} interval Interval to generate histogram over.\n     * @returns {DateHistogramValuesSource} returns `this` so that calls can be chained\n     */\n    interval(interval) {\n        this._opts.interval = interval;\n        return this;\n    }\n\n    /**\n     * Calendar-aware intervals are configured with the calendarInterval parameter.\n     * The combined interval field for date histograms is deprecated from ES 7.2.\n     *\n     * @example\n     * const agg = esb.dateHistogramValuesSource('by_month', 'date').calendarInterval(\n     *     'month'\n     * );\n     *\n     * @param {string} interval Interval to generate histogram over.\n     * You can specify calendar intervals using the unit name, such as month, or as\n     * a single unit quantity, such as 1M. For example, day and 1d are equivalent.\n     * Multiple quantities, such as 2d, are not supported.\n     * @returns {DateHistogramValuesSource} returns `this` so that calls can be chained\n     */\n    calendarInterval(interval) {\n        this._opts.calendar_interval = interval;\n        return this;\n    }\n\n    /**\n     * Fixed intervals are configured with the fixedInterval parameter.\n     * The combined interval field for date histograms is deprecated from ES 7.2.\n     *\n     * @example\n     * const agg = esb.dateHistogramValuesSource('by_minute', 'date').calendarInterval(\n     *     '60s'\n     * );\n     *\n     * @param {string} interval Interval to generate histogram over.\n     * Intervals are a fixed number of SI units and never deviate, regardless\n     * of where they fall on the calendar. However, it means fixed intervals\n     * cannot express other units such as months, since the duration of a\n     * month is not a fixed quantity.\n     * The accepted units for fixed intervals are:\n     * millseconds (ms), seconds (s), minutes (m), hours (h) and days (d).\n     * @returns {DateHistogramValuesSource} returns `this` so that calls can be chained\n     */\n    fixedInterval(interval) {\n        this._opts.fixed_interval = interval;\n        return this;\n    }\n\n    /**\n     * Sets the date time zone\n     *\n     * Date-times are stored in Elasticsearch in UTC. By default, all bucketing\n     * and rounding is also done in UTC. The `time_zone` parameter can be used\n     * to indicate that bucketing should use a different time zone.\n     *\n     * @param {string} tz Time zone. Time zones may either be specified\n     * as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as a timezone id,\n     * an identifier used in the TZ database like America/Los_Angeles.\n     * @returns {DateHistogramValuesSource} returns `this` so that calls can be chained\n     */\n    timeZone(tz) {\n        this._opts.time_zone = tz;\n        return this;\n    }\n\n    /**\n     * Sets the format expression for `key_as_string` in response buckets.\n     * If no format is specified, then it will use the first format specified\n     * in the field mapping.\n     *\n     * @example\n     * const valueSrc = esb.CompositeAggregation.valuesSource\n     *   .dateHistogram('date', 'timestamp', '1d')\n     *   .format('yyyy-MM-dd');\n     *\n     * @param {string} fmt Format mask to apply on aggregation response.\n     * For Date Histograms, supports expressive [date format pattern](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html#date-format-pattern)\n     * @returns {DateHistogramValuesSource} returns `this` so that calls can be chained\n     */\n    format(fmt) {\n        this._opts.format = fmt;\n        return this;\n    }\n}\n\nmodule.exports = DateHistogramValuesSource;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/composite-agg-values-sources/histogram-values-source.js",
    "content": "'use strict';\n\nconst _ = require('../../../_');\n\nconst ValuesSourceBase = require('./values-source-base');\n\nconst REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-composite-aggregation.html#_histogram';\n\n/**\n * `HistogramValuesSource` is a source for the `CompositeAggregation` that handles\n * histograms. It works very similar to a histogram aggregation with a slightly\n * different syntax.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-composite-aggregation.html#_histogram)\n *\n * @example\n * const valueSrc = esb.CompositeAggregation.histogramValuesSource(\n *   'histo', // name\n *   'price', // field\n *   5 // interval\n * );\n *\n * @param {string} name\n * @param {string=} field The field to aggregate on\n * @param {number=} interval Interval to generate histogram over.\n *\n * @extends ValuesSourceBase\n */\nclass HistogramValuesSource extends ValuesSourceBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field, interval) {\n        super('histogram', REF_URL, name, field);\n\n        if (!_.isNil(interval)) this._opts.interval = interval;\n    }\n\n    /**\n     * Sets the histogram interval. Buckets are generated based on this interval value.\n     *\n     * @param {number} interval Interval to generate histogram over.\n     * @returns {HistogramValuesSource} returns `this` so that calls can be chained\n     */\n    interval(interval) {\n        this._opts.interval = interval;\n        return this;\n    }\n}\n\nmodule.exports = HistogramValuesSource;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/composite-agg-values-sources/index.js",
    "content": "'use strict';\n\nexports.ValuesSourceBase = require('./values-source-base');\n\nexports.TermsValuesSource = require('./terms-values-source');\nexports.HistogramValuesSource = require('./histogram-values-source');\nexports.DateHistogramValuesSource = require('./date-histogram-values-source');\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/composite-agg-values-sources/terms-values-source.js",
    "content": "'use strict';\n\nconst ValuesSourceBase = require('./values-source-base');\n\nconst REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-composite-aggregation.html#_terms';\n\n/**\n * `TermsValuesSource` is a source for the `CompositeAggregation` that handles\n * terms. It works very similar to a terms aggregation with a slightly different\n * syntax.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-composite-aggregation.html#_terms)\n *\n * @example\n * const valueSrc = esb.CompositeAggregation.termsValuesSource('product').script({\n *   source: \"doc['product'].value\",\n *   lang: 'painless'\n * });\n *\n * @param {string} name\n * @param {string=} field The field to aggregate on\n *\n * @extends ValuesSourceBase\n */\nclass TermsValuesSource extends ValuesSourceBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super('terms', REF_URL, name, field);\n    }\n}\n\nmodule.exports = TermsValuesSource;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/composite-agg-values-sources/values-source-base.js",
    "content": "'use strict';\n\nconst _ = require('../../../_');\n\nconst {\n    util: { invalidParam, recursiveToJSON }\n} = require('../../../core');\n\nconst invalidOrderParam = invalidParam('', 'order', \"'asc' or 'desc'\");\n\n/**\n * Base class implementation for all Composite Aggregation values sources.\n *\n * **NOTE:** Instantiating this directly should not be required.\n *\n * @param {string} valueSrcType Type of value source\n * @param {string} refUrl Elasticsearch reference URL\n * @param {string} name\n * @param {string=} field The field to aggregate on\n *\n * @throws {Error} if `name` is empty\n * @throws {Error} if `valueSrcType` is empty\n */\nclass ValuesSourceBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(valueSrcType, refUrl, name, field) {\n        if (_.isEmpty(valueSrcType))\n            throw new Error('ValuesSourceBase `valueSrcType` cannot be empty');\n\n        this._name = name;\n        this._valueSrcType = valueSrcType;\n        this._refUrl = refUrl;\n\n        this._body = {};\n        this._opts = this._body[valueSrcType] = {};\n\n        if (!_.isNil(field)) this._opts.field = field;\n    }\n\n    /**\n     * Field to use for this source.\n     *\n     * @param {string} field a valid field name\n     * @returns {ValuesSourceBase} returns `this` so that calls can be chained\n     */\n    field(field) {\n        this._opts.field = field;\n        return this;\n    }\n\n    /**\n     * Script to use for this source.\n     *\n     * @param {Script|Object|string} script\n     * @returns {ValuesSourceBase} returns `this` so that calls can be chained\n     * @throws {TypeError} If `script` is not an instance of `Script`\n     */\n    script(script) {\n        this._opts.script = script;\n        return this;\n    }\n\n    /**\n     * Specifies the type of values produced by this source, e.g. `string` or\n     * `date`.\n     *\n     * @param {string} valueType\n     * @returns {ValuesSourceBase} returns `this` so that calls can be chained\n     */\n    valueType(valueType) {\n        this._opts.value_type = valueType;\n        return this;\n    }\n\n    /**\n     * Order specifies the order in the values produced by this source. It can\n     * be either `asc` or `desc`.\n     *\n     * @param {string} order The `order` option can have the following values.\n     * `asc`, `desc` to sort in ascending, descending order respectively.\n     * @returns {ValuesSourceBase} returns `this` so that calls can be chained.\n     */\n    order(order) {\n        if (_.isNil(order)) invalidOrderParam(order, this._refUrl);\n\n        const orderLower = order.toLowerCase();\n        if (orderLower !== 'asc' && orderLower !== 'desc') {\n            invalidOrderParam(order, this._refUrl);\n        }\n\n        this._opts.order = orderLower;\n        return this;\n    }\n\n    /**\n     * Missing specifies the value to use when the source finds a missing value\n     * in a document.\n     *\n     * Note: This option was deprecated in\n     * [Elasticsearch v6](https://www.elastic.co/guide/en/elasticsearch/reference/6.8/breaking-changes-6.0.html#_literal_missing_literal_is_deprecated_in_the_literal_composite_literal_aggregation).\n     * From 6.4 and later, use `missing_bucket` instead.\n     *\n     * @param {string|number} value\n     * @returns {ValuesSourceBase} returns `this` so that calls can be chained\n     */\n    missing(value) {\n        this._opts.missing = value;\n        return this;\n    }\n\n    /**\n     * Specifies whether to include documents without a value for a given source\n     * in the response. Defaults to `false` (not included).\n     *\n     * Note: This method is incompatible with elasticsearch 6.3 and older.\n     * Use it only with elasticsearch 6.4 and later.\n     *\n     * @param {boolean} value\n     * @returns {ValuesSourceBase} returns `this` so that calls can be chained\n     */\n    missingBucket(value) {\n        this._opts.missing_bucket = value;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the Composite\n     * Aggregation values source.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        return { [this._name]: recursiveToJSON(this._body) };\n    }\n}\n\nmodule.exports = ValuesSourceBase;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/composite-aggregation.js",
    "content": "'use strict';\n\nconst {\n    Aggregation,\n    util: { checkType, constructorWrapper }\n} = require('../../core');\n\nconst {\n    ValuesSourceBase,\n    TermsValuesSource,\n    HistogramValuesSource,\n    DateHistogramValuesSource\n} = require('./composite-agg-values-sources');\n\n/**\n * CompositeAggregation is a multi-bucket values source based aggregation that\n * can be used to calculate unique composite values from source documents.\n *\n * Unlike the other multi-bucket aggregation the composite aggregation can be\n * used to paginate **all** buckets from a multi-level aggregation efficiently.\n * This aggregation provides a way to stream **all** buckets of a specific\n * aggregation similarly to what scroll does for documents.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-composite-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *   .agg(\n *     esb.compositeAggregation('my_buckets')\n *       .sources(\n *         esb.CompositeAggregation.termsValuesSource('product', 'product')\n *       )\n *   )\n *\n * NOTE: This query was added in elasticsearch v6.1.\n *\n * @param {string} name a valid aggregation name\n *\n * @extends Aggregation\n */\nclass CompositeAggregation extends Aggregation {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name) {\n        super(name, 'composite');\n\n        this._aggsDef.sources = [];\n    }\n\n    /**\n     * Specifies the Composite Aggregation values sources to use in the\n     * aggregation.\n     *\n     * @example\n     * const { CompositeAggregation } = esb;\n     * const reqBody = esb.requestBodySearch()\n     *   .agg(\n     *     esb.compositeAggregation('my_buckets')\n     *       .sources(\n     *         CompositeAggregation.dateHistogramValuesSource(\n     *           'date',\n     *           'timestamp',\n     *           '1d'\n     *         ),\n     *         CompositeAggregation.termsValuesSource('product', 'product')\n     *       )\n     *   );\n     *\n     * @param {...ValuesSourceBase} sources\n     * @returns {CompositeAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If any of the rest parameters `sources` is not an\n     * instance of `ValuesSourceBase`\n     */\n    sources(...sources) {\n        sources.forEach(valueSrc => checkType(valueSrc, ValuesSourceBase));\n\n        this._aggsDef.sources = this._aggsDef.sources.concat(sources);\n        return this;\n    }\n\n    /**\n     * Defines how many composite buckets should be returned. Each composite\n     * bucket is considered as a single bucket so setting a size of 10 will\n     * return the first 10 composite buckets created from the values source. The\n     * response contains the values for each composite bucket in an array\n     * containing the values extracted from each value source.\n     *\n     * @param {number} size\n     * @returns {CompositeAggregation} returns `this` so that calls can be chained\n     */\n    size(size) {\n        this._aggsDef.size = size;\n        return this;\n    }\n\n    /**\n     * The `after` parameter can be used to retrieve the composite buckets that\n     * are after the last composite buckets returned in a previous round.\n     *\n     * @example\n     * const { CompositeAggregation } = esb;\n     * const reqBody = esb.requestBodySearch().agg(\n     *   esb.compositeAggregation('my_buckets')\n     *     .size(2)\n     *     .sources(\n     *       CompositeAggregation.dateHistogramValuesSource(\n     *         'date',\n     *         'timestamp',\n     *         '1d'\n     *       ).order('desc'),\n     *       CompositeAggregation.termsValuesSource('product', 'product').order('asc')\n     *     )\n     *     .after({ date: 1494288000000, product: 'mad max' })\n     * );\n     *\n     * @param {Object} afterKey\n     * @returns {CompositeAggregation} returns `this` so that calls can be chained\n     */\n    after(afterKey) {\n        this._aggsDef.after = afterKey;\n        return this;\n    }\n}\n\nCompositeAggregation.TermsValuesSource = TermsValuesSource;\nCompositeAggregation.termsValuesSource = constructorWrapper(TermsValuesSource);\n\nCompositeAggregation.HistogramValuesSource = HistogramValuesSource;\nCompositeAggregation.histogramValuesSource = constructorWrapper(\n    HistogramValuesSource\n);\n\nCompositeAggregation.DateHistogramValuesSource = DateHistogramValuesSource;\nCompositeAggregation.dateHistogramValuesSource = constructorWrapper(\n    DateHistogramValuesSource\n);\n\nmodule.exports = CompositeAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/date-histogram-aggregation.js",
    "content": "'use strict';\n\nconst HistogramAggregationBase = require('./histogram-aggregation-base');\n\n/**\n * A multi-bucket aggregation similar to the histogram except it can only be applied on date values.\n * The interval can be specified by date/time expressions.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-datehistogram-aggregation.html#_scripts)\n *\n * @example\n * const agg = esb.dateHistogramAggregation('sales_over_time', 'date', 'month');\n *\n * @example\n * const agg = esb.dateHistogramAggregation(\n *     'sales_over_time',\n *     'date',\n *     '1M'\n * ).format('yyyy-MM-dd');\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n * @param {string=} interval Interval to generate histogram over.\n * Available expressions for interval: year, quarter, month, week, day, hour, minute, second\n *\n * @extends HistogramAggregationBase\n */\nclass DateHistogramAggregation extends HistogramAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field, interval) {\n        super(name, 'date_histogram', field, interval);\n    }\n\n    /**\n     * Date-times are stored in Elasticsearch in UTC.\n     * By default, all bucketing and rounding is also done in UTC.\n     * The `time_zone` parameter can be used to indicate that bucketing should use a different time zone.\n     * Sets the date time zone\n     *\n     * @example\n     * const agg = esb.dateHistogramAggregation('by_day', 'date', 'day').timeZone(\n     *     '-01:00'\n     * );\n     *\n     * @param {string} tz Time zone. Time zones may either be specified\n     * as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as a timezone id,\n     * an identifier used in the TZ database like America/Los_Angeles.\n     * @returns {DateHistogramAggregation} returns `this` so that calls can be chained\n     */\n    timeZone(tz) {\n        this._aggsDef.time_zone = tz;\n        return this;\n    }\n\n    /**\n     * Calendar-aware intervals are configured with the calendarInterval parameter.\n     * The combined interval field for date histograms is deprecated from ES 7.2.\n     *\n     * @example\n     * const agg = esb.dateHistogramAggregation('by_month', 'date').calendarInterval(\n     *     'month'\n     * );\n     *\n     * @param {string} interval Interval to generate histogram over.\n     * You can specify calendar intervals using the unit name, such as month, or as\n     * a single unit quantity, such as 1M. For example, day and 1d are equivalent.\n     * Multiple quantities, such as 2d, are not supported.\n     * @returns {DateHistogramAggregation} returns `this` so that calls can be chained\n     */\n    calendarInterval(interval) {\n        this._aggsDef.calendar_interval = interval;\n        return this;\n    }\n\n    /**\n     * Fixed intervals are configured with the fixedInterval parameter.\n     * The combined interval field for date histograms is deprecated from ES 7.2.\n     *\n     * @param {string} interval Interval to generate histogram over.\n     * Intervals are a fixed number of SI units and never deviate, regardless\n     * of where they fall on the calendar. However, it means fixed intervals\n     * cannot express other units such as months, since the duration of a\n     * month is not a fixed quantity.\n     *\n     * @example\n     * const agg = esb.dateHistogramAggregation('by_minute', 'date').calendarInterval(\n     *     '60s'\n     * );\n     *\n     * The accepted units for fixed intervals are:\n     * millseconds (ms), seconds (s), minutes (m), hours (h) and days (d).\n     * @returns {DateHistogramAggregation} returns `this` so that calls can be chained\n     */\n    fixedInterval(interval) {\n        this._aggsDef.fixed_interval = interval;\n        return this;\n    }\n}\n\nmodule.exports = DateHistogramAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/date-range-aggregation.js",
    "content": "'use strict';\n\nconst RangeAggregationBase = require('./range-aggregation-base');\n\n/**\n * A range aggregation that is dedicated for date values. The main difference\n * between this aggregation and the normal range aggregation is that the from\n * and to values can be expressed in Date Math expressions, and it is also\n * possible to specify a date format by which the from and to response fields\n * will be returned.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html)\n *\n * @example\n * const agg = esb.dateRangeAggregation('range', 'date')\n *     .format('MM-yyy')\n *     .ranges([{ to: 'now-10M/M' }, { from: 'now-10M/M' }]);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends RangeAggregationBase\n */\nclass DateRangeAggregation extends RangeAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'date_range', field);\n    }\n\n    /**\n     * Sets the date time zone.\n     * Date-times are stored in Elasticsearch in UTC.\n     * By default, all bucketing and rounding is also done in UTC.\n     * The `time_zone` parameter can be used to indicate that\n     * bucketing should use a different time zone.\n     *\n     * @example\n     * const agg = esb.dateRangeAggregation('range', 'date')\n     *     .timeZone('CET')\n     *     .ranges([\n     *         { to: '2016/02/01' },\n     *         { from: '2016/02/01', to: 'now/d' },\n     *         { from: 'now/d' }\n     *     ]);\n     *\n     * @param {string} tz Time zone. Time zones may either be specified\n     * as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as a timezone id,\n     * an identifier used in the TZ database like America/Los_Angeles.\n     * @returns {DateRangeAggregation} returns `this` so that calls can be chained\n     */\n    timeZone(tz) {\n        this._aggsDef.time_zone = tz;\n        return this;\n    }\n}\n\nmodule.exports = DateRangeAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/diversified-sampler-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { invalidParam },\n    consts: { EXECUTION_HINT_SET }\n} = require('../../core');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-diversified-sampler-aggregation.html';\n\nconst invalidExecutionHintParam = invalidParam(\n    ES_REF_URL,\n    'execution_hint',\n    EXECUTION_HINT_SET\n);\n\n/**\n * A filtering aggregation used to limit any sub aggregations' processing\n * to a sample of the top-scoring documents. Diversity settings\n * are used to limit the number of matches that share a common value such as an \"author\".\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-diversified-sampler-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.queryStringQuery('tags:elasticsearch'))\n *     .agg(\n *         esb.diversifiedSamplerAggregation('my_unbiased_sample', 'author')\n *             .shardSize(200)\n *             .agg(\n *                 esb.significantTermsAggregation(\n *                     'keywords',\n *                     'tags'\n *                 ).exclude(['elasticsearch'])\n *             )\n *     );\n *\n * @example\n * // Use a script to produce a hash of the multiple values in a tags field\n * // to ensure we don't have a sample that consists of the same repeated\n * // combinations of tags\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.queryStringQuery('tags:kibana'))\n *     .agg(\n *         esb.diversifiedSamplerAggregation('my_unbiased_sample')\n *             .shardSize(200)\n *             .maxDocsPerValue(3)\n *             .script(esb.script('inline', \"doc['tags'].values.hashCode()\"))\n *             .agg(\n *                 esb.significantTermsAggregation(\n *                     'keywords',\n *                     'tags'\n *                 ).exclude(['kibana'])\n *             )\n *     );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends BucketAggregationBase\n */\nclass DiversifiedSamplerAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'diversified_sampler', field);\n    }\n\n    /**\n     * The shard_size parameter limits how many top-scoring documents\n     * are collected in the sample processed on each shard. The default value is 100.\n     *\n     * @param {number} size Maximum number of documents to return from each shard(Integer)\n     * @returns {DiversifiedSamplerAggregation} returns `this` so that calls can be chained\n     */\n    shardSize(size) {\n        this._aggsDef.shard_size = size;\n        return this;\n    }\n\n    /**\n     * Used to control the maximum number of documents collected\n     * on any one shard which share a common value.\n     * Applies on a per-shard basis only for the purposes of shard-local sampling.\n     *\n     * @param {number} maxDocsPerValue Default 1.(Integer)\n     * @returns {DiversifiedSamplerAggregation} returns `this` so that calls can be chained\n     */\n    maxDocsPerValue(maxDocsPerValue) {\n        this._aggsDef.max_docs_per_value = maxDocsPerValue;\n        return this;\n    }\n\n    /**\n     * This setting can influence the management of the values used\n     * for de-duplication. Each option will hold up to shard_size\n     * values in memory while performing de-duplication but\n     * the type of value held can be controlled\n     *\n     * @param {string} hint the possible values are `map`, `global_ordinals`,\n     * `global_ordinals_hash` and `global_ordinals_low_cardinality`\n     * @returns {DiversifiedSamplerAggregation} returns `this` so that calls can be chained\n     * @throws {Error} If Execution Hint is outside the accepted set.\n     */\n    executionHint(hint) {\n        if (_.isNil(hint)) invalidExecutionHintParam(hint);\n\n        const hintLower = hint.toLowerCase();\n        if (!EXECUTION_HINT_SET.has(hintLower)) {\n            invalidExecutionHintParam(hint);\n        }\n\n        this._aggsDef.execution_hint = hintLower;\n        return this;\n    }\n}\n\nmodule.exports = DiversifiedSamplerAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/filter-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Query,\n    util: { checkType }\n} = require('../../core');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-filter-aggregation.html';\n\n/**\n * Defines a single bucket of all the documents in the current document set\n * context that match a specified filter. Often this will be used to narrow down\n * the current aggregation context to a specific set of documents.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-filter-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.filterAggregation(\n *             't_shirts',\n *             esb.termQuery('type', 't-shirt')\n *         ).agg(esb.avgAggregation('avg_price', 'price'))\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {Query=} filterQuery Query to filter on. Example - term query.\n *\n * @extends BucketAggregationBase\n */\nclass FilterAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, filterQuery) {\n        super(name, 'filter');\n\n        if (!_.isNil(filterQuery)) this.filter(filterQuery);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on FilterAggregation\n     */\n    field() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('field is not supported in FilterAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on FilterAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in FilterAggregation');\n    }\n\n    // NOTE: Special case. filter does not set a key inside\n    // this._aggsDef but sets the entire object itself\n    // Generic getOpt will fail for this.\n    // Just a simple override should handle it though\n\n    /**\n     * Set the filter query for Filter Aggregation.\n     *\n     * @param {Query} filterQuery Query to filter on. Example - term query.\n     * @returns {FilterAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If `filterQuery` is not an instance of `Query`\n     */\n    filter(filterQuery) {\n        checkType(filterQuery, Query);\n        this._aggsDef = this._aggs[this.aggType] = filterQuery;\n        return this;\n    }\n}\n\nmodule.exports = FilterAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/filters-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Query,\n    util: { checkType, setDefault }\n} = require('../../core');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-filters-aggregation.html';\n\n/**\n * Defines a single bucket of all the documents in the current document set\n * context that match a specified filter. Often this will be used to narrow down\n * the current aggregation context to a specific set of documents.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-filters-aggregation.html)\n *\n * @example\n * const agg = esb.filtersAggregation('messages')\n *     .filter('errors', esb.matchQuery('body', 'error'))\n *     .filter('warnings', esb.matchQuery('body', 'warning'));\n *\n *\n * @example\n * const agg = esb.filtersAggregation('messages')\n *     .anonymousFilters([\n *         esb.matchQuery('body', 'error'),\n *         esb.matchQuery('body', 'warning')\n *     ])\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n *\n * @extends BucketAggregationBase\n */\nclass FiltersAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name) {\n        super(name, 'filters');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on FiltersAggregation\n     */\n    field() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('field is not supported in FiltersAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on FiltersAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in FiltersAggregation');\n    }\n\n    /**\n     * Print warning message to console namespaced by class name.\n     *\n     * @param {string} msg\n     * @private\n     */\n    _warn(msg) {\n        console.warn(`[FiltersAggregation] ${msg}`);\n    }\n\n    /**\n     * Check and puts an object for the `filters` key in\n     * internal aggregation representation object.\n     * If the key has a value but is not an object,\n     * a warning is printed.\n     * @private\n     */\n    _checkNamedFilters() {\n        if (\n            !setDefault(this._aggsDef, 'filters', {}) &&\n            Array.isArray(this._aggsDef.filters)\n        ) {\n            this._warn('Do not mix named and anonymous filters!');\n            this._warn('Overwriting anonymous filters.');\n            this._aggsDef.filters = {};\n        }\n    }\n\n    /**\n     * Check and puts an array for the `filters` key in\n     * internal aggregation representation object.\n     * If the key has a value but is not an array,\n     * a warning is printed.\n     * @private\n     */\n    _checkAnonymousFilters() {\n        if (\n            !setDefault(this._aggsDef, 'filters', []) &&\n            !Array.isArray(this._aggsDef.filters)\n        ) {\n            this._warn('Do not mix named and anonymous filters!');\n            this._warn('Overwriting named filters.');\n            this._aggsDef.filters = [];\n        }\n    }\n\n    /**\n     * Sets a named filter query.\n     * Does not mix with anonymous filters.\n     * If anonymous filters are present, they will be overwritten.\n     *\n     * @param {string} bucketName Name for bucket which will collect\n     * all documents that match its associated filter.\n     * @param {Query} filterQuery Query to filter on. Example - term query.\n     * @returns {FiltersAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If `filterQuery` is not an instance of `Query`\n     */\n    filter(bucketName, filterQuery) {\n        checkType(filterQuery, Query);\n\n        this._checkNamedFilters();\n\n        this._aggsDef.filters[bucketName] = filterQuery;\n        return this;\n    }\n\n    /**\n     * Assigns filters to already added filters.\n     * Does not mix with anonymous filters.\n     * If anonymous filters are present, they will be overwritten.\n     *\n     * @param {Object} filterQueries Object with multiple key value pairs\n     * where bucket name is the key and filter query is the value.\n     * @returns {FiltersAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If `filterQueries` is not an instance of object\n     */\n    filters(filterQueries) {\n        checkType(filterQueries, Object);\n\n        this._checkNamedFilters();\n\n        Object.assign(this._aggsDef.filters, filterQueries);\n        return this;\n    }\n\n    /**\n     * Appends an anonymous filter query.\n     * Does not mix with named filters.\n     * If named filters are present, they will be overwritten.\n     *\n     * @param {*} filterQuery Query to filter on. Example - term query.\n     * @returns {FiltersAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If `filterQuery` is not an instance of `Query`\n     */\n    anonymousFilter(filterQuery) {\n        checkType(filterQuery, Query);\n\n        this._checkAnonymousFilters();\n\n        this._aggsDef.filters.push(filterQuery);\n        return this;\n    }\n\n    /**\n     * Appends an array of anonymous filters.\n     * Does not mix with named filters.\n     * If named filters are present, they will be overwritten.\n     *\n     * @param {*} filterQueries Array of queries to filter on and generate buckets.\n     * Example - term query.\n     * @returns {FiltersAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If `filterQueries` is not an instance of Array\n     */\n    anonymousFilters(filterQueries) {\n        checkType(filterQueries, Array);\n\n        this._checkAnonymousFilters();\n\n        this._aggsDef.filters = this._aggsDef.filters.concat(filterQueries);\n        return this;\n    }\n\n    /**\n     * Adds a bucket to the response which will contain all documents\n     * that do not match any of the given filters.\n     * Returns the other bucket bucket either in a bucket\n     * (named `_other_` by default) if named filters are being used,\n     * or as the last bucket if anonymous filters are being used\n     *\n     * @param {boolean} enable `True` to return `other` bucket with documents\n     * that do not match any filters and `False` to disable computation\n     * @param {string=} otherBucketKey Optional key for the other bucket.\n     * Default is `_other_`.\n     * @returns {FiltersAggregation} returns `this` so that calls can be chained\n     */\n    otherBucket(enable, otherBucketKey) {\n        this._aggsDef.other_bucket = enable;\n\n        !_.isEmpty(otherBucketKey) && this.otherBucketKey(otherBucketKey);\n\n        return this;\n    }\n\n    /**\n     * Sets the key for the other bucket to a value other than the default `_other_`.\n     * Setting this parameter will implicitly set the other_bucket parameter to true.\n     * If anonymous filters are being used, setting this parameter will not make sense.\n     *\n     * @example\n     * const agg = esb.filtersAggregation('messages')\n     *     .filter('errors', esb.matchQuery('body', 'error'))\n     *     .filter('warnings', esb.matchQuery('body', 'warning'))\n     *     .otherBucketKey('other_messages');\n     *\n     * @param {string} otherBucketKey\n     * @returns {FiltersAggregation} returns `this` so that calls can be chained\n     */\n    otherBucketKey(otherBucketKey) {\n        this._aggsDef.other_bucket_key = otherBucketKey;\n        return this;\n    }\n}\n\nmodule.exports = FiltersAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/geo-distance-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    GeoPoint,\n    util: { checkType, invalidParam },\n    consts: { UNIT_SET }\n} = require('../../core');\n\nconst RangeAggregationBase = require('./range-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-geodistance-aggregation.html';\n\nconst invalidUnitParam = invalidParam(ES_REF_URL, 'unit', UNIT_SET);\nconst invalidDistanceTypeParam = invalidParam(\n    ES_REF_URL,\n    'distance_type',\n    \"'plane' or 'arc'\"\n);\n\n/**\n * A multi-bucket aggregation that works on geo_point fields and conceptually\n * works very similar to the range aggregation. The user can define a point of\n * origin and a set of distance range buckets. The aggregation evaluate the\n * distance of each document value from the origin point and determines the\n * buckets it belongs to based on the ranges (a document belongs to a bucket\n * if the distance between the document and the origin falls within the distance\n * range of the bucket).\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-geodistance-aggregation.html)\n *\n * @example\n * const agg = esb.geoDistanceAggregation('rings_around_amsterdam', 'location')\n *     .origin(esb.geoPoint().string('52.3760, 4.894'))\n *     .ranges([{ to: 100000 }, { from: 100000, to: 300000 }, { from: 300000 }]);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends RangeAggregationBase\n */\nclass GeoDistanceAggregation extends RangeAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'geo_distance', field);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GeoDistanceAggregation\n     */\n    format() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('format is not supported in GeoDistanceAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GeoDistanceAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in GeoDistanceAggregation');\n    }\n\n    /**\n     * Sets the point of origin from where distances will be measured.\n     *\n     * @param {GeoPoint} point A valid `GeoPoint` object.\n     * @returns {GeoDistanceAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If `point` is not an instance of `GeoPoint`\n     */\n    origin(point) {\n        checkType(point, GeoPoint);\n\n        this._aggsDef.origin = point;\n        return this;\n    }\n\n    /**\n     * Sets the distance unit.  Valid values are:\n     * mi (miles), in (inches), yd (yards),\n     * km (kilometers), cm (centimeters), mm (millimeters),\n     * ft(feet), NM(nauticalmiles)\n     *\n     * @example\n     * const agg = esb.geoDistanceAggregation('rings_around_amsterdam', 'location')\n     *     .origin(esb.geoPoint().string('52.3760, 4.894'))\n     *     .unit('km')\n     *     .ranges([{ to: 100 }, { from: 100, to: 300 }, { from: 300 }]);\n     *\n     * @param {string} unit Distance unit, default is `m`(meters).\n     * @returns {GeoDistanceAggregation} returns `this` so that calls can be chained\n     * @throws {Error} If Unit is outside the accepted set.\n     */\n    unit(unit) {\n        if (!UNIT_SET.has(unit)) {\n            invalidUnitParam(unit);\n        }\n\n        this._aggsDef.unit = unit;\n        return this;\n    }\n\n    /**\n     * Sets the distance calculation mode, `arc` or `plane`.\n     * The `arc` calculation is the more accurate.\n     * The `plane` is the faster but least accurate.\n     *\n     * @example\n     * const agg = esb.geoDistanceAggregation('rings_around_amsterdam', 'location')\n     *     .origin(esb.geoPoint().string('52.3760, 4.894'))\n     *     .unit('km')\n     *     .distanceType('plane')\n     *     .ranges([{ to: 100 }, { from: 100, to: 300 }, { from: 300 }]);\n     *\n     * @param {string} type\n     * @returns {GeoDistanceAggregation} returns `this` so that calls can be chained\n     * @throws {Error} If `type` is neither `plane` nor `arc`.\n     */\n    distanceType(type) {\n        if (_.isNil(type)) invalidDistanceTypeParam(type);\n\n        const typeLower = type.toLowerCase();\n        if (typeLower !== 'plane' && typeLower !== 'arc')\n            invalidDistanceTypeParam(type);\n\n        this._aggsDef.distance_type = typeLower;\n        return this;\n    }\n}\n\nmodule.exports = GeoDistanceAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/geo-hash-grid-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-geohashgrid-aggregation.html';\n\n/**\n * A multi-bucket aggregation that works on geo_point fields and groups points\n * into buckets that represent cells in a grid. The resulting grid can be sparse\n * and only contains cells that have matching data. Each cell is labeled using a\n * geohash which is of user-definable precision.\n\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-geohashgrid-aggregation.html)\n *\n * @example\n * const agg = esb.geoHashGridAggregation('large-grid', 'location').precision(3);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends BucketAggregationBase\n */\nclass GeoHashGridAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'geohash_grid', field);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GeoHashGridAggregation\n     */\n    format() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('format is not supported in GeoHashGridAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GeoHashGridAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in GeoHashGridAggregation');\n    }\n\n    /**\n     * Sets the precision for the generated geohash.\n     *\n     * @param {number} precision Precision can be between 1 and 12\n     * @returns {GeoHashGridAggregation} returns `this` so that calls can be chained\n     * @throws {Error} If precision is not between 1 and 12.\n     */\n    precision(precision) {\n        if (_.isNil(precision) || precision < 1 || precision > 12) {\n            throw new Error('`precision` can only be value from 1 to 12.');\n        }\n\n        this._aggsDef.precision = precision;\n        return this;\n    }\n\n    /**\n     * Sets the maximum number of geohash buckets to return.\n     * When results are trimmed, buckets are prioritised\n     * based on the volumes of documents they contain.\n     *\n     * @param {number} size Optional. The maximum number of geohash\n     * buckets to return (defaults to 10,000).\n     * @returns {GeoHashGridAggregation} returns `this` so that calls can be chained\n     */\n    size(size) {\n        this._aggsDef.size = size;\n        return this;\n    }\n\n    /**\n     * Determines how many geohash_grid the coordinating node\n     * will request from each shard.\n     *\n     * @param {number} shardSize Optional.\n     * @returns {GeoHashGridAggregation} returns `this` so that calls can be chained\n     */\n    shardSize(shardSize) {\n        this._aggsDef.shard_size = shardSize;\n        return this;\n    }\n}\n\nmodule.exports = GeoHashGridAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/geo-hex-grid-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-geohexgrid-aggregation.html';\n\n/**\n * A multi-bucket aggregation that groups geo_point and geo_shape values into buckets\n * that represent a grid. The resulting grid can be sparse and only contains cells\n * that have matching data. Each cell corresponds to a H3 cell index and is labeled\n * using the H3Index representation.\n\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-geohexgrid-aggregation.html)\n *\n * NOTE: This aggregation was added in elasticsearch v8.1.0.\n *\n * @example\n * const agg = esb.geoHexGridAggregation('hex-grid', 'location').precision(3);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends BucketAggregationBase\n */\nclass GeoHexGridAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'geohex_grid', field);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GeoHexGridAggregation\n     */\n    format() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('format is not supported in GeoHexGridAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GeoHexGridAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in GeoHexGridAggregation');\n    }\n\n    /**\n     * Sets the precision for the generated geohex.\n     *\n     * @param {number} precision Precision can be between 0 and 15\n     * @returns {GeoHexGridAggregation} returns `this` so that calls can be chained\n     * @throws {Error} If precision is not between 0 and 15.\n     */\n    precision(precision) {\n        if (_.isNil(precision) || precision < 0 || precision > 15) {\n            throw new Error('`precision` can only be value from 0 to 15.');\n        }\n\n        this._aggsDef.precision = precision;\n        return this;\n    }\n\n    /**\n     * Sets the maximum number of geohex buckets to return.\n     * When results are trimmed, buckets are prioritised\n     * based on the volumes of documents they contain.\n     *\n     * @param {number} size Optional. The maximum number of geohex\n     * buckets to return (defaults to 10,000).\n     * @returns {GeoHexGridAggregation} returns `this` so that calls can be chained\n     */\n    size(size) {\n        this._aggsDef.size = size;\n        return this;\n    }\n\n    /**\n     * Determines how many geohex_grid the coordinating node\n     * will request from each shard.\n     *\n     * @param {number} shardSize Optional.\n     * @returns {GeoHexGridAggregation} returns `this` so that calls can be chained\n     */\n    shardSize(shardSize) {\n        this._aggsDef.shard_size = shardSize;\n        return this;\n    }\n}\n\nmodule.exports = GeoHexGridAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/geo-tile-grid-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    GeoPoint,\n    util: { checkType, setDefault }\n} = require('../../core');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-geotilegrid-aggregation.html';\n\n/**\n * A multi-bucket aggregation that works on geo_point fields and groups points\n * into buckets that represent cells in a grid. The resulting grid can be sparse\n * and only contains cells that have matching data. Each cell corresponds to a\n * map tile as used by many online map sites. Each cell is labeled using a\n * \"{zoom}/{x}/{y}\" format, where zoom is equal to the user-specified precision.\n\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-geotilegrid-aggregation.html)\n *\n * NOTE: This query was added in elasticsearch v7.0.\n *\n * @example\n * const agg = esb.geoTileGridAggregation('large-grid', 'location').precision(8);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends BucketAggregationBase\n */\nclass GeoTileGridAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'geotile_grid', field);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GeoTileGridAggregation\n     */\n    format() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('format is not supported in GeoTileGridAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GeoTileGridAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in GeoTileGridAggregation');\n    }\n\n    /**\n     * The integer zoom of the key used to define cells/buckets in the results.\n     * Defaults to 7.\n     *\n     * @param {number} precision Precision can be between 0 and 29\n     * @returns {GeoTileGridAggregation} returns `this` so that calls can be chained\n     * @throws {Error} If precision is not between 0 and 29.\n     */\n    precision(precision) {\n        if (_.isNil(precision) || precision < 0 || precision > 29) {\n            throw new Error('`precision` can only be value from 0 to 29.');\n        }\n\n        this._aggsDef.precision = precision;\n        return this;\n    }\n\n    /**\n     * Sets the maximum number of geotile buckets to return.\n     * When results are trimmed, buckets are prioritised\n     * based on the volumes of documents they contain.\n     *\n     * @param {number} size Optional. The maximum number of geotile\n     * buckets to return (defaults to 10,000).\n     * @returns {GeoTileGridAggregation} returns `this` so that calls can be chained\n     */\n    size(size) {\n        this._aggsDef.size = size;\n        return this;\n    }\n\n    /**\n     * Determines how many geotile_grid buckets the coordinating node\n     * will request from each shard. To allow for more accurate counting of the\n     * top cells returned in the final result the aggregation defaults to\n     * returning `max(10,(size x number-of-shards))` buckets from each shard.\n     * If this heuristic is undesirable, the number considered from each shard\n     * can be over-ridden using this parameter.\n     *\n     * @param {number} shardSize Optional.\n     * @returns {GeoTileGridAggregation} returns `this` so that calls can be chained\n     */\n    shardSize(shardSize) {\n        this._aggsDef.shard_size = shardSize;\n        return this;\n    }\n\n    /**\n     * Sets the top left coordinate for the bounding box used to filter the\n     * points in the bucket.\n     *\n     * @param {GeoPoint} point A valid `GeoPoint`\n     * @returns {GeoTileGridAggregation} returns `this` so that calls can be chained.\n     */\n    topLeft(point) {\n        checkType(point, GeoPoint);\n        setDefault(this._aggsDef, 'bounds', {});\n        this._aggsDef.bounds.top_left = point;\n        return this;\n    }\n\n    /**\n     * Sets the bottom right coordinate for the bounding box used to filter the\n     * points in the bucket.\n     *\n     * @param {GeoPoint} point A valid `GeoPoint`\n     * @returns {GeoTileGridAggregation} returns `this` so that calls can be chained.\n     */\n    bottomRight(point) {\n        checkType(point, GeoPoint);\n        setDefault(this._aggsDef, 'bounds', {});\n        this._aggsDef.bounds.bottom_right = point;\n        return this;\n    }\n\n    /**\n     * Sets the top right coordinate for the bounding box used to filter the\n     * points in the bucket.\n     *\n     * @param {GeoPoint} point A valid `GeoPoint`\n     * @returns {GeoTileGridAggregation} returns `this` so that calls can be chained.\n     */\n    topRight(point) {\n        checkType(point, GeoPoint);\n        setDefault(this._aggsDef, 'bounds', {});\n        this._aggsDef.bounds.top_right = point;\n        return this;\n    }\n\n    /**\n     * Sets the bottom left coordinate for the bounding box used to filter the\n     * points in the bucket.\n     *\n     * @param {GeoPoint} point A valid `GeoPoint`\n     * @returns {GeoTileGridAggregation} returns `this` so that calls can be chained.\n     */\n    bottomLeft(point) {\n        checkType(point, GeoPoint);\n        setDefault(this._aggsDef, 'bounds', {});\n        this._aggsDef.bounds.bottom_left = point;\n        return this;\n    }\n\n    /**\n     * Sets value for top of the bounding box.\n     *\n     * @param {number} val\n     * @returns {GeoTileGridAggregation} returns `this` so that calls can be chained.\n     */\n    top(val) {\n        setDefault(this._aggsDef, 'bounds', {});\n        this._aggsDef.bounds.top = val;\n        return this;\n    }\n\n    /**\n     * Sets value for left of the bounding box.\n     *\n     * @param {number} val\n     * @returns {GeoTileGridAggregation} returns `this` so that calls can be chained.\n     */\n    left(val) {\n        setDefault(this._aggsDef, 'bounds', {});\n        this._aggsDef.bounds.left = val;\n        return this;\n    }\n\n    /**\n     * Sets value for bottom of the bounding box.\n     *\n     * @param {number} val\n     * @returns {GeoTileGridAggregation} returns `this` so that calls can be chained.\n     */\n    bottom(val) {\n        setDefault(this._aggsDef, 'bounds', {});\n        this._aggsDef.bounds.bottom = val;\n        return this;\n    }\n\n    /**\n     * Sets value for right of the bounding box.\n     *\n     * @param {number} val\n     * @returns {GeoTileGridAggregation} returns `this` so that calls can be chained.\n     */\n    right(val) {\n        setDefault(this._aggsDef, 'bounds', {});\n        this._aggsDef.bounds.right = val;\n        return this;\n    }\n}\n\nmodule.exports = GeoTileGridAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/global-aggregation.js",
    "content": "'use strict';\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-global-aggregation.html';\n\n/**\n * Defines a single bucket of all the documents within the search execution\n * context. This context is defined by the indices and the document types you’re\n * searching on, but is not influenced by the search query itself.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-global-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.matchQuery('type', 't-shirt'))\n *     .agg(\n *         esb.globalAggregation('all_products').agg(\n *             esb.avgAggregation('avg_price', 'price')\n *         )\n *     )\n *     .agg(esb.avgAggregation('t_shirts', 'price'));\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n *\n * @extends BucketAggregationBase\n */\nclass GlobalAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name) {\n        super(name, 'global');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GlobalAggregation\n     */\n    field() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('field is not supported in GlobalAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GlobalAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in GlobalAggregation');\n    }\n}\n\nmodule.exports = GlobalAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/histogram-aggregation-base.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { invalidParam }\n} = require('../../core');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst invalidDirectionParam = invalidParam('', 'direction', \"'asc' or 'desc'\");\n\n/**\n * The `HistogramAggregationBase` provides support for common options used across\n * various histogram `Aggregation` implementations like Histogram Aggregation,\n * Date Histogram aggregation.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string} aggType Type of aggregation\n * @param {string=} field The field to aggregate on\n * @param {string|number=} interval Interval to generate histogram over.\n *\n * @extends BucketAggregationBase\n */\nclass HistogramAggregationBase extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, aggType, field, interval) {\n        super(name, aggType, field);\n\n        if (!_.isNil(interval)) this._aggsDef.interval = interval;\n    }\n\n    /**\n     * Sets the histogram interval. Buckets are generated based on this interval value.\n     *\n     * @param {string} interval Interval to generate histogram over.\n     * For date histograms, available expressions for interval:\n     * year, quarter, month, week, day, hour, minute, second\n     * @returns {HistogramAggregationBase} returns `this` so that calls can be chained\n     */\n    interval(interval) {\n        this._aggsDef.interval = interval;\n        return this;\n    }\n\n    /**\n     * Sets the format expression for `key_as_string` in response buckets.\n     * If no format is specified, then it will use the first format specified in the field mapping.\n     *\n     * @example\n     * const agg = esb.dateHistogramAggregation(\n     *     'sales_over_time',\n     *     'date',\n     *     '1M'\n     * ).format('yyyy-MM-dd');\n     *\n     * @param {string} fmt Format mask to apply on aggregation response. Example: ####.00.\n     * For Date Histograms, supports expressive [date format pattern](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html#date-format-pattern)\n     * @returns {HistogramAggregationBase} returns `this` so that calls can be chained\n     */\n    format(fmt) {\n        this._aggsDef.format = fmt;\n        return this;\n    }\n\n    /**\n     * The offset parameter is used to change the start value of each bucket\n     * by the specified positive (+) or negative offset (-).\n     * Negative offset is not applicable on HistogramAggregation.\n     * In case of DateHistogramAggregation, duration can be\n     * a value such as 1h for an hour, or 1d for a day.\n     *\n     * @example\n     * const agg = esb.dateHistogramAggregation('by_day', 'date', 'day').offset('6h');\n     *\n     * @param {string} offset Time or bucket key offset for bucketing.\n     * @returns {HistogramAggregationBase} returns `this` so that calls can be chained\n     */\n    offset(offset) {\n        this._aggsDef.offset = offset;\n        return this;\n    }\n\n    /**\n     * Sets the ordering for buckets\n     *\n     * @example\n     * const agg = esb.histogramAggregation('prices', 'price', 50)\n     *     .order('_count', 'desc');\n     *\n     * @example\n     * const agg = esb.histogramAggregation('prices', 'price', 50)\n     *     .order('promoted_products>rating_stats.avg', 'desc')\n     *     .agg(\n     *         esb.filterAggregation('promoted_products')\n     *             .filter(esb.termQuery('promoted', 'true'))\n     *             .agg(esb.statsAggregation('rating_stats', 'rating'))\n     *     );\n     *\n     * @param {string} key\n     * @param {string} direction `asc` or `desc`\n     * @returns {HistogramAggregationBase} returns `this` so that calls can be chained\n     */\n    order(key, direction = 'desc') {\n        if (_.isNil(direction)) invalidDirectionParam(direction);\n\n        const directionLower = direction.toLowerCase();\n        if (directionLower !== 'asc' && directionLower !== 'desc') {\n            invalidDirectionParam(direction);\n        }\n\n        if (_.has(this._aggsDef, 'order')) {\n            if (!Array.isArray(this._aggsDef.order)) {\n                this._aggsDef.order = [this._aggsDef.order];\n            }\n\n            this._aggsDef.order.push({ [key]: directionLower });\n        } else {\n            this._aggsDef.order = { [key]: directionLower };\n        }\n\n        return this;\n    }\n\n    /**\n     * Sets the minimum number of matching documents in range to return the bucket.\n     *\n     * @example\n     * const agg = esb.histogramAggregation('prices', 'price', 50).minDocCount(1);\n     *\n     * @param {number} minDocCnt Integer value for minimum number of documents\n     * required to return bucket in response\n     * @returns {HistogramAggregationBase} returns `this` so that calls can be chained\n     */\n    minDocCount(minDocCnt) {\n        this._aggsDef.min_doc_count = minDocCnt;\n        return this;\n    }\n\n    /**\n     * Set's the range/bounds for the histogram aggregation.\n     * Useful when you want to include buckets that might be\n     * outside the bounds of indexed documents.\n     *\n     * @example\n     * const agg = esb.histogramAggregation('prices', 'price', 50).extendedBounds(0, 500);\n     *\n     * @param {number|string} min Start bound / minimum bound value\n     * For histogram aggregation, Integer value can be used.\n     * For Date histogram, date expression can be used.\n     * Available expressions for interval:\n     * year, quarter, month, week, day, hour, minute, second\n     * @param {number|string} max End bound / maximum bound value\n     * For histogram aggregation, Integer value can be used.\n     * For Date histogram, date expression can be used.\n     * Available expressions for interval:\n     * year, quarter, month, week, day, hour, minute, second\n     * @returns {HistogramAggregationBase} returns `this` so that calls can be chained\n     */\n    extendedBounds(min, max) {\n        this._aggsDef.extended_bounds = { min, max };\n        return this;\n    }\n\n    /**\n     * Set's the range/bounds for the histogram aggregation.\n     * Useful when you want to limit the range of buckets in the histogram.\n     * It is particularly useful in the case of open data ranges that can result in a very large number of buckets.\n     * NOTE: Only available in Elasticsearch v7.10.0+\n     *\n     * @example\n     * const agg = esb.histogramAggregation('prices', 'price', 50).hardBounds(0, 500);\n     *\n     * @param {number|string} min Start bound / minimum bound value\n     * For histogram aggregation, Integer value can be used.\n     * For Date histogram, date expression can be used.\n     * Available expressions for interval:\n     * year, quarter, month, week, day, hour, minute, second\n     * @param {number|string} max End bound / maximum bound value\n     * For histogram aggregation, Integer value can be used.\n     * For Date histogram, date expression can be used.\n     * Available expressions for interval:\n     * year, quarter, month, week, day, hour, minute, second\n     * @returns {HistogramAggregationBase} returns `this` so that calls can be chained\n     */\n    hardBounds(min, max) {\n        this._aggsDef.hard_bounds = { min, max };\n        return this;\n    }\n\n    /**\n     * Sets the missing parameter which defines how documents\n     * that are missing a value should be treated.\n     *\n     * @example\n     * const agg = esb.histogramAggregation('quantity', 'quantity', 10).missing(0);\n     *\n     * @param {string} value\n     * @returns {HistogramAggregationBase} returns `this` so that calls can be chained\n     */\n    missing(value) {\n        this._aggsDef.missing = value;\n        return this;\n    }\n\n    /**\n     * Enable the response to be returned as a keyed object where the key is the\n     * bucket interval.\n     *\n     * @example\n     * const agg = esb.dateHistogramAggregation('sales_over_time', 'date', '1M')\n     *     .keyed(true)\n     *     .format('yyyy-MM-dd');\n     *\n     * @param {boolean} keyed To enable keyed response or not.\n     * @returns {HistogramAggregationBase} returns `this` so that calls can be chained\n     */\n    keyed(keyed) {\n        this._aggsDef.keyed = keyed;\n        return this;\n    }\n}\n\nmodule.exports = HistogramAggregationBase;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/histogram-aggregation.js",
    "content": "'use strict';\n\nconst HistogramAggregationBase = require('./histogram-aggregation-base');\n\n/**\n * A multi-bucket values source based aggregation that can be applied on\n * numeric values extracted from the documents. It dynamically builds fixed\n * size (a.k.a. interval) buckets over the values.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-histogram-aggregation.html)\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n * @param {number=} interval Interval to generate histogram over.\n *\n * @example\n * const agg = esb.histogramAggregation('prices', 'price', 50);\n *\n * @example\n * const agg = esb.histogramAggregation('prices', 'price', 50).minDocCount(1);\n *\n * @example\n * const agg = esb.histogramAggregation('prices', 'price', 50)\n *     .extendedBounds(0, 500);\n *\n * @example\n * const agg = esb.histogramAggregation('quantity', 'quantity', 10).missing(0);\n *\n * @extends HistogramAggregationBase\n */\nclass HistogramAggregation extends HistogramAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field, interval) {\n        super(name, 'histogram', field, interval);\n    }\n}\n\nmodule.exports = HistogramAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/index.js",
    "content": "'use strict';\n\nexports.BucketAggregationBase = require('./bucket-aggregation-base');\nexports.HistogramAggregationBase = require('./histogram-aggregation-base');\nexports.RangeAggregationBase = require('./range-aggregation-base');\nexports.TermsAggregationBase = require('./terms-aggregation-base');\nexports.SignificantAggregationBase = require('./significant-aggregation-base');\n\nexports.AdjacencyMatrixAggregation = require('./adjacency-matrix-aggregation');\nexports.ChildrenAggregation = require('./children-aggregation');\nexports.CompositeAggregation = require('./composite-aggregation');\nexports.DateHistogramAggregation = require('./date-histogram-aggregation');\nexports.AutoDateHistogramAggregation = require('./auto-date-histogram-aggregation');\nexports.VariableWidthHistogramAggregation = require('./variable-width-histogram-aggregation');\nexports.DateRangeAggregation = require('./date-range-aggregation');\nexports.DiversifiedSamplerAggregation = require('./diversified-sampler-aggregation');\nexports.FilterAggregation = require('./filter-aggregation');\nexports.FiltersAggregation = require('./filters-aggregation');\nexports.GeoDistanceAggregation = require('./geo-distance-aggregation');\nexports.GeoHashGridAggregation = require('./geo-hash-grid-aggregation');\nexports.GeoHexGridAggregation = require('./geo-hex-grid-aggregation');\nexports.GeoTileGridAggregation = require('./geo-tile-grid-aggregation');\nexports.GlobalAggregation = require('./global-aggregation');\nexports.HistogramAggregation = require('./histogram-aggregation');\nexports.IpRangeAggregation = require('./ip-range-aggregation');\nexports.MissingAggregation = require('./missing-aggregation');\nexports.NestedAggregation = require('./nested-aggregation');\nexports.ParentAggregation = require('./parent-aggregation');\nexports.RangeAggregation = require('./range-aggregation');\nexports.RareTermsAggregation = require('./rare-terms-aggregation');\nexports.ReverseNestedAggregation = require('./reverse-nested-aggregation');\nexports.SamplerAggregation = require('./sampler-aggregation');\nexports.SignificantTermsAggregation = require('./significant-terms-aggregation');\nexports.SignificantTextAggregation = require('./significant-text-aggregation');\nexports.TermsAggregation = require('./terms-aggregation');\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/ip-range-aggregation.js",
    "content": "'use strict';\n\nconst RangeAggregationBase = require('./range-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-iprange-aggregation.html';\n\n/**\n * Dedicated range aggregation for IP typed fields.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/5current/search-aggregations-bucket-iprange-aggregation.html)\n *\n * @example\n * const agg = esb.ipRangeAggregation('ip_ranges', 'ip').ranges([\n *     { to: '10.0.0.5' },\n *     { from: '10.0.0.5' }\n * ]);\n *\n * @example\n * const agg = esb.ipRangeAggregation('ip_ranges', 'ip').ranges([\n *     { mask: '10.0.0.0/25' },\n *     { mask: '10.0.0.127/25' }\n * ]);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends RangeAggregationBase\n */\nclass IpRangeAggregation extends RangeAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'ip_range', field);\n        // Variable name is misleading. Only one of these needs to be present.\n        this._rangeRequiredKeys = ['from', 'to', 'mask'];\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on IpRangeAggregation\n     */\n    format() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('format is not supported in IpRangeAggregation');\n    }\n}\n\nmodule.exports = IpRangeAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/missing-aggregation.js",
    "content": "'use strict';\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-missing-aggregation.html';\n\n/**\n * A field data based single bucket aggregation, that creates a bucket of all\n * documents in the current document set context that are missing a field value\n * (effectively, missing a field or having the configured NULL value set).\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-missing-aggregation.html)\n *\n * @example\n * const agg = esb.missingAggregation('products_without_a_price', 'price');\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends BucketAggregationBase\n */\nclass MissingAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'missing', field);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on MissingAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in MissingAggregation');\n    }\n}\n\nmodule.exports = MissingAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/nested-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-nested-aggregation.html';\n\n/**\n * A special single bucket aggregation that enables aggregating nested\n * documents.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-nested-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.matchQuery('name', 'led tv'))\n *     .agg(\n *         esb.nestedAggregation('resellers', 'resellers').agg(\n *             esb.minAggregation('min_price', 'resellers.price')\n *         )\n *     );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} path `path` of the nested document\n *\n * @extends BucketAggregationBase\n */\nclass NestedAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, path) {\n        super(name, 'nested');\n\n        if (!_.isNil(path)) this._aggsDef.path = path;\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on NestedAggregation\n     */\n    field() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('field is not supported in NestedAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on NestedAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in NestedAggregation');\n    }\n\n    /**\n     * Sets the nested path\n     *\n     * @param {string} path `path` of the nested document\n     * @returns {NestedAggregation} returns `this` so that calls can be chained\n     */\n    path(path) {\n        this._aggsDef.path = path;\n        return this;\n    }\n}\n\nmodule.exports = NestedAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/parent-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-parent-aggregation.html';\n\n/**\n * A special single bucket aggregation that enables aggregating\n * from buckets on child document types to buckets on parent documents.\n *\n * This aggregation relies on the `_parent` field in the mapping.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-parent-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.termsAggregation('top-names', 'owner.display_name.keyword')\n *             .size(10)\n *             .agg(\n *                 esb.parentAggregation('to-questions')\n *                     .type('answer')\n *                     .agg(\n *                         esb.termsAggregation(\n *                             'top-tags',\n *                             'tags.keyword'\n *                         ).size(10)\n *                     )\n *             )\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} type The type of the child document.\n *\n * @extends BucketAggregationBase\n */\nclass ParentAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, type) {\n        super(name, 'parent');\n\n        if (!_.isNil(type)) this.type(type);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on ParentAggregation\n     */\n    field() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('field is not supported in ParentAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on ParentAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in ParentAggregation');\n    }\n\n    /**\n     * Sets the child type/mapping for aggregation.\n     *\n     * @param {string} type The child type that the buckets in the parent space should be mapped to.\n     * @returns {ParentAggregation} returns `this` so that calls can be chained\n     */\n    type(type) {\n        this._aggsDef.type = type;\n        return this;\n    }\n}\n\nmodule.exports = ParentAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/range-aggregation-base.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst hasOwnProp = Object.prototype.hasOwnProperty;\n\n/**\n * The `RangeAggregationBase` provides support for common options used across\n * various range `Aggregation` implementations like Range Aggregation and\n * Date Range aggregation.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string} aggType Type of aggregation\n * @param {string=} field The field to aggregate on\n *\n * @extends BucketAggregationBase\n */\nclass RangeAggregationBase extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, aggType, field) {\n        super(name, aggType, field);\n        // Variable name is misleading. Only one of these needs to be present.\n        this._rangeRequiredKeys = ['from', 'to'];\n\n        this._aggsDef.ranges = [];\n    }\n\n    /**\n     * Sets the format expression for `key_as_string` in response buckets.\n     * If no format is specified, then it will use the format specified in the field mapping.\n     *\n     * @param {string} fmt Supports expressive [date format pattern](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html#date-format-pattern) for Date Histograms\n     * @returns {RangeAggregationBase} returns `this` so that calls can be chained\n     */\n    format(fmt) {\n        this._aggsDef.format = fmt;\n        return this;\n    }\n\n    /**\n     * Adds a range to the list of existing range expressions.\n     *\n     * @param {Object} range Range to aggregate over. Valid keys are `from`, `to` and `key`\n     * @returns {RangeAggregationBase} returns `this` so that calls can be chained\n     *\n     * @throws {TypeError} If `range` is not an instance of object\n     * @throws {Error} If none of the required keys,\n     * `from`, `to` or `mask`(for IP range) is passed\n     */\n    range(range) {\n        checkType(range, Object);\n        if (!this._rangeRequiredKeys.some(hasOwnProp, range)) {\n            throw new Error(\n                `Invalid Range! Range must have at least one of ${this._rangeRequiredKeys}`\n            );\n        }\n\n        this._aggsDef.ranges.push(range);\n        return this;\n    }\n\n    /**\n     * Adds the list of ranges to the list of existing range expressions.\n     *\n     * @param {Array<Object>} ranges Ranges to aggregate over.\n     * Each item must be an object with keys `from`, `to` and `key`.\n     * @returns {RangeAggregationBase} returns `this` so that calls can be chained\n     *\n     * @throws {TypeError} If `ranges` is not an instance of an array or\n     * and item in the array is not an instance of object\n     * @throws {Error} If none of the required keys,\n     * `from`, `to` or `mask`(for IP range) is passed\n     */\n    ranges(ranges) {\n        checkType(ranges, Array);\n\n        ranges.forEach(range => this.range(range));\n        return this;\n    }\n\n    /**\n     * Sets the missing parameter ehich defines how documents\n     * that are missing a value should be treated.\n     *\n     * @param {string} value\n     * @returns {RangeAggregationBase} returns `this` so that calls can be chained\n     */\n    missing(value) {\n        this._aggsDef.missing = value;\n        return this;\n    }\n\n    /**\n     * Enable the response to be returned as a keyed object where the key is the\n     * bucket interval.\n     *\n     * @example\n     * const agg = esb.dateRangeAggregation('range', 'date')\n     *     .format('MM-yyy')\n     *     .ranges([{ to: 'now-10M/M' }, { from: 'now-10M/M' }])\n     *     .keyed(true);\n     *\n     * @example\n     * const agg = esb.geoDistanceAggregation('rings_around_amsterdam', 'location')\n     *     .origin(esb.geoPoint().string('52.3760, 4.894'))\n     *     .ranges([\n     *         { to: 100000, key: 'first_ring' },\n     *         { from: 100000, to: 300000, key: 'second_ring' },\n     *         { from: 300000, key: 'third_ring' }\n     *     ])\n     *     .keyed(true);\n     *\n     * @param {boolean} keyed To enable keyed response or not.\n     * @returns {RangeAggregationBase} returns `this` so that calls can be chained\n     */\n    keyed(keyed) {\n        this._aggsDef.keyed = keyed;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the `aggregation` query.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        if (_.isEmpty(this._aggsDef.ranges)) {\n            throw new Error('`ranges` cannot be empty.');\n        }\n\n        return super.toJSON();\n    }\n}\n\nmodule.exports = RangeAggregationBase;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/range-aggregation.js",
    "content": "'use strict';\n\nconst RangeAggregationBase = require('./range-aggregation-base');\n\n/**\n * A multi-bucket value source based aggregation that enables the user to\n * define a set of ranges - each representing a bucket. During the aggregation\n * process, the values extracted from each document will be checked against each\n * bucket range and \"bucket\" the relevant/matching document.\n *\n * Note that this aggregration includes the from value and excludes the to\n * value for each range.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-range-aggregation.html)\n *\n * @example\n * const agg = esb.rangeAggregation('price_ranges', 'price').ranges([\n *     { to: 50 },\n *     { from: 50, to: 100 },\n *     { from: 100 }\n * ]);\n *\n * @example\n * const agg = esb.rangeAggregation('price_ranges')\n *     .script(esb.script('inline', \"doc['price'].value\").lang('painless'))\n *     .ranges([{ to: 50 }, { from: 50, to: 100 }, { from: 100 }]);\n *\n * @example\n * // Value script for on-the-fly conversion before aggregation\n * const agg = esb.rangeAggregation('price_ranges', 'price')\n *     .script(\n *         esb.script('inline', '_value * params.conversion_rate')\n *             .lang('painless')\n *             .params({ conversion_rate: 0.8 })\n *     )\n *     .ranges([{ to: 50 }, { from: 50, to: 100 }, { from: 100 }]);\n *\n * @example\n * // Compute statistics over the prices in each price range\n * const agg = esb.rangeAggregation('price_ranges', 'price')\n *     .ranges([{ to: 50 }, { from: 50, to: 100 }, { from: 100 }])\n *     // Passing price to Stats Aggregation is optional(same value source)\n *     .agg(esb.statsAggregation('price_stats', 'price'));\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends RangeAggregationBase\n */\nclass RangeAggregation extends RangeAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'range', field);\n    }\n}\n\nmodule.exports = RangeAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/rare-terms-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-rare-terms-aggregation.html';\n\n/**\n * A multi-bucket value source based aggregation which finds\n * \"rare\" terms — terms that are at the long-tail of the\n * distribution and are not frequent. Conceptually, this is like\n * a terms aggregation that is sorted by `_count` ascending.\n * As noted in the terms aggregation docs, actually ordering\n * a `terms` agg by count ascending has unbounded error.\n * Instead, you should use the `rare_terms` aggregation\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-rare-terms-aggregation.html)\n *\n * NOTE: Only available in Elasticsearch 7.3.0+.\n *\n * @example\n * const agg = esb.rareTermsAggregation('genres', 'genre');\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string} field The field we wish to find rare terms in\n *\n * @extends BucketAggregationBase\n */\nclass RareTermsAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'rare_terms', field);\n    }\n\n    /**\n     * Sets the maximum number of documents a term should appear in.\n     *\n     * @example\n     * const agg = esb.rareTermsAggregation('genres', 'genre').maxDocCount(2);\n     *\n     * @param {number} maxDocCnt Integer value for maximum number of documents a term should appear in.\n     * Max doc count can be between 1 and 100.\n     * @returns {RareTermsAggregation} returns `this` so that calls can be chained\n     */\n    maxDocCount(maxDocCnt) {\n        if (_.isNil(maxDocCnt) || maxDocCnt < 1 || maxDocCnt > 100) {\n            throw new Error('`maxDocCount` can only be value from 1 to 100.');\n        }\n\n        this._aggsDef.max_doc_count = maxDocCnt;\n        return this;\n    }\n\n    /**\n     * Sets the precision of the internal CuckooFilters. Smaller precision\n     * leads to better approximation, but higher memory usage.\n     * Cannot be smaller than 0.00001\n     *\n     * @example\n     * const agg = esb.rareTermsAggregation('genres', 'genre').precision(0.001);\n     *\n     * @param {number} precision Float value for precision of the internal CuckooFilters. Default is 0.01\n     * @returns {RareTermsAggregation} returns `this` so that calls can be chained\n     */\n    precision(precision) {\n        if (precision < 0.00001) {\n            throw new Error('`precision` must be greater than 0.00001.');\n        }\n\n        this._aggsDef.precision = precision;\n        return this;\n    }\n\n    /**\n     * Sets terms that should be included in the aggregation\n     *\n     * @example\n     * const agg = esb.rareTermsAggregation('genres', 'genre').include('swi*');\n     *\n     * @param {string} include Regular expression that will determine what values\n     * are \"allowed\" to be aggregated\n     * @returns {RareTermsAggregation} returns `this` so that calls can be chained\n     */\n    include(include) {\n        this._aggsDef.include = include;\n        return this;\n    }\n\n    /**\n     * Sets terms that should be excluded from the aggregation\n     *\n     * @example\n     * const agg = esb.rareTermsAggregation('genres', 'genre').exclude('electro*');\n     *\n     * @param {string} exclude Regular expression that will determine what values\n     * should not be aggregated\n     * @returns {RareTermsAggregation} returns `this` so that calls can be chained\n     */\n    exclude(exclude) {\n        this._aggsDef.exclude = exclude;\n        return this;\n    }\n\n    /**\n     * Sets the missing parameter which defines how documents\n     * that are missing a value should be treated.\n     *\n     * @param {string} value\n     * @returns {RareTermsAggregation} returns `this` so that calls can be chained\n     */\n    missing(value) {\n        this._aggsDef.missing = value;\n        return this;\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on RareTermsAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in RareTermsAggregation');\n    }\n}\n\nmodule.exports = RareTermsAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/reverse-nested-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-reverse-nested-aggregation.html';\n\n/**\n * A special single bucket aggregation that enables aggregating\n * on parent docs from nested documents. Effectively this\n * aggregation can break out of the nested block structure and\n * link to other nested structures or the root document,\n * which allows nesting other aggregations that aren’t part of\n * the nested object in a nested aggregation.\n *\n * The `reverse_nested` aggregation must be defined inside a nested aggregation.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-reverse-nested-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.matchQuery('name', 'led tv'))\n *     .agg(\n *         esb.nestedAggregation('comments', 'comments').agg(\n *             esb.termsAggregation('top_usernames', 'comments.username').agg(\n *                 esb.reverseNestedAggregation('comment_to_issue').agg(\n *                     esb.termsAggregation('top_tags_per_comment', 'tags')\n *                 )\n *             )\n *         )\n *     );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} path Defines to what nested object field should be joined back.\n * The default is empty, which means that it joins back to the root / main document\n * level.\n *\n * @extends BucketAggregationBase\n */\nclass ReverseNestedAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, path) {\n        super(name, 'reverse_nested');\n\n        if (!_.isNil(path)) this._aggsDef.path = path;\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on ReverseNestedAggregation\n     */\n    field() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('field is not supported in ReverseNestedAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on ReverseNestedAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in ReverseNestedAggregation');\n    }\n\n    /**\n     * Sets the level to join back for subsequent aggregations in a multiple\n     * layered nested object types\n     *\n     * @param {string} path Defines to what nested object field should be joined back.\n     * The default is empty, which means that it joins back to the root / main document\n     * level.\n     * @returns {ReverseNestedAggregation} returns `this` so that calls can be chained\n     */\n    path(path) {\n        this._aggsDef.path = path;\n        return this;\n    }\n}\n\nmodule.exports = ReverseNestedAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/sampler-aggregation.js",
    "content": "'use strict';\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-sampler-aggregation.html';\n\n/**\n * A filtering aggregation used to limit any sub aggregations'\n * processing to a sample of the top-scoring documents.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-sampler-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.queryStringQuery('tags:kibana OR tags:javascript'))\n *     .agg(\n *         esb.samplerAggregation('sample')\n *             .shardSize(200)\n *             .agg(\n *                 esb.significantTermsAggregation(\n *                     'keywords',\n *                     'tags'\n *                 ).exclude(['kibana', 'javascript'])\n *             )\n *     );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends BucketAggregationBase\n */\nclass SamplerAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name) {\n        super(name, 'sampler');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on SamplerAggregation\n     */\n    field() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('field is not supported in SamplerAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on SamplerAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in SamplerAggregation');\n    }\n\n    /**\n     * The shard_size parameter limits how many top-scoring documents\n     * are collected in the sample processed on each shard. The default value is 100.\n     *\n     * @param {number} size Maximum number of documents to return from each shard(Integer)\n     * @returns {SamplerAggregation} returns `this` so that calls can be chained\n     */\n    shardSize(size) {\n        this._aggsDef.shard_size = size;\n        return this;\n    }\n}\n\nmodule.exports = SamplerAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/significant-aggregation-base.js",
    "content": "'use strict';\n\nconst {\n    Query,\n    Script,\n    util: { checkType }\n} = require('../../core');\n\nconst TermsAggregationBase = require('./terms-aggregation-base');\n\n/**\n * The `SignificantAggregationBase` provides support for common options used\n * in `SignificantTermsAggregation` and `SignificantTextAggregation`.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @extends TermsAggregationBase\n */\nclass SignificantAggregationBase extends TermsAggregationBase {\n    /**\n     * Use JLH score as significance score.\n     *\n     * @returns {SignificantAggregationBase} returns `this` so that calls can be chained\n     */\n    jlh() {\n        // I am guessing here\n        // Reference is not clear on usage\n        this._aggsDef.jlh = {};\n        return this;\n    }\n\n    /**\n     * Use `mutual_information` as significance score\n     *\n     * @param {boolean=} includeNegatives Default `true`. If set to `false`,\n     * filters out the terms that appear less often in the subset than in\n     * documents outside the subset\n     * @param {boolean=} backgroundIsSuperset `true`(default) if the documents in the bucket\n     * are also contained in the background. If instead you defined a custom background filter\n     * that represents a different set of documents that you want to compare to, pass `false`\n     * @returns {SignificantAggregationBase} returns `this` so that calls can be chained\n     */\n    mutualInformation(includeNegatives = true, backgroundIsSuperset = true) {\n        this._aggsDef.mutual_information = {\n            include_negatives: includeNegatives,\n            background_is_superset: backgroundIsSuperset\n        };\n        return this;\n    }\n\n    /**\n     * Use `chi_square` as significance score\n     *\n     * @param {boolean} includeNegatives Default `true`. If set to `false`,\n     * filters out the terms that appear less often in the subset than in\n     * documents outside the subset\n     * @param {boolean} backgroundIsSuperset `true`(default) if the documents in the bucket\n     * are also contained in the background. If instead you defined a custom background filter\n     * that represents a different set of documents that you want to compare to, pass `false`\n     * @returns {SignificantAggregationBase} returns `this` so that calls can be chained\n     */\n    chiSquare(includeNegatives = true, backgroundIsSuperset = true) {\n        this._aggsDef.chi_square = {\n            include_negatives: includeNegatives,\n            background_is_superset: backgroundIsSuperset\n        };\n        return this;\n    }\n\n    /**\n     * Sets `gnd`, google normalized score to be used as significance score.\n     *\n     * @param {boolean} backgroundIsSuperset `true`(default) if the documents in the bucket\n     * are also contained in the background. If instead you defined a custom background filter\n     * that represents a different set of documents that you want to compare to, pass `false`\n     * @returns {SignificantAggregationBase} returns `this` so that calls can be chained\n     */\n    gnd(backgroundIsSuperset = true) {\n        this._aggsDef.gnd = {\n            background_is_superset: backgroundIsSuperset\n        };\n        return this;\n    }\n\n    /**\n     * Use a simple calculation of the number of documents in the foreground sample with a term\n     * divided by the number of documents in the background with the term. By default this\n     * produces a score greater than zero and less than one.\n     *\n     * @returns {SignificantAggregationBase} returns `this` so that calls can be chained\n     */\n    percentage() {\n        this._aggsDef.percentage = {};\n        return this;\n    }\n\n    /**\n     * Sets script for customized score calculation.\n     *\n     * @param {Script} script\n     * @returns {SignificantAggregationBase} returns `this` so that calls can be chained\n     */\n    scriptHeuristic(script) {\n        checkType(script, Script);\n\n        this._aggsDef.script_heuristic = { script };\n        return this;\n    }\n\n    /**\n     * Sets the `background_filter` to narrow the scope of statistical information\n     * for background term frequencies instead of using the entire index.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchQuery('text', 'madrid'))\n     *     .agg(\n     *         esb.significantAggregationBase('tags', 'tag').backgroundFilter(\n     *             esb.termQuery('text', 'spain')\n     *         )\n     *     );\n     *\n     * @param {Query} filterQuery Filter query\n     * @returns {SignificantAggregationBase} returns `this` so that calls can be chained\n     */\n    backgroundFilter(filterQuery) {\n        checkType(filterQuery, Query);\n\n        this._aggsDef.background_filter = filterQuery;\n        return this;\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on SignificantAggregationBase\n     */\n    script() {\n        console.log(`Please refer ${this._refUrl}`);\n        throw new Error(`script is not supported in ${this.constructor.name}`);\n    }\n}\n\nmodule.exports = SignificantAggregationBase;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/significant-terms-aggregation.js",
    "content": "'use strict';\n\nconst SignificantAggregationBase = require('./significant-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-significantterms-aggregation.html';\n\n/**\n * An aggregation that returns interesting or unusual occurrences of terms in\n * a set.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-significantterms-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.termsQuery('force', 'British Transport Police'))\n *     .agg(\n *         esb.significantTermsAggregation(\n *             'significantCrimeTypes',\n *             'crime_type'\n *         )\n *     );\n *\n * @example\n * // Use parent aggregation for segregated data analysis\n * const agg = esb.termsAggregation('forces', 'force').agg(\n *     esb.significantTermsAggregation('significantCrimeTypes', 'crime_type')\n * );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends SignificantAggregationBase\n */\nclass SignificantTermsAggregation extends SignificantAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'significant_terms', ES_REF_URL, field);\n    }\n}\n\nmodule.exports = SignificantTermsAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/significant-text-aggregation.js",
    "content": "'use strict';\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst SignificantAggregationBase = require('./significant-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-significanttext-aggregation.html';\n\n/**\n * An aggregation that returns interesting or unusual occurrences of free-text\n * terms in a set. It is like the `SignificantTermsAggregation` but differs in\n * that:\n *   - It is specifically designed for use on type `text` fields\n *   - It does not require field data or doc-values\n *   - It re-analyzes text content on-the-fly meaning it can also filter\n *     duplicate sections of noisy text that otherwise tend to skew statistics.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-significanttext-aggregation.html)\n *\n * NOTE: This query was added in elasticsearch v6.0.\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *   .query(esb.matchQuery('content', 'Bird flu'))\n *   .agg(\n *     esb.samplerAggregation('my_sample')\n *       .shardSize(100)\n *       .agg(esb.significantTextAggregation('keywords', 'content'))\n *   );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends SignificantAggregationBase\n */\nclass SignificantTextAggregation extends SignificantAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'significant_text', ES_REF_URL, field);\n    }\n\n    /**\n     * Control if duplicate paragraphs of text should try be filtered from the\n     * statistical text analysis. Can improve results but slows down analysis.\n     * Default is `false`.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *   .query(esb.matchQuery('content', 'elasticsearch'))\n     *   .agg(\n     *     esb.samplerAggregation('sample')\n     *       .shardSize(100)\n     *       .agg(\n     *         esb.significantTextAggregation('keywords', 'content')\n     *           .filterDuplicateText(true)\n     *       )\n     *   );\n     *\n     * @param {boolean} enable\n     * @returns {SignificantTextAggregation} returns `this` so that calls can be chained\n     */\n    filterDuplicateText(enable) {\n        this._aggsDef.filter_duplicate_text = enable;\n        return this;\n    }\n\n    /**\n     * Selects the fields to load from `_source` JSON and analyze. If none are\n     * specified, the indexed \"fieldName\" value is assumed to also be the name\n     * of the JSON field holding the value\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *   .query(esb.matchQuery('custom_all', 'elasticsearch'))\n     *   .agg(\n     *     esb.significantTextAggregation('tags', 'custom_all')\n     *       .sourceFields(['content', 'title'])\n     *   );\n     *\n     * @param {Array<string>} srcFields Array of fields\n     * @returns {SignificantTextAggregation} returns `this` so that calls can be chained\n     */\n    sourceFields(srcFields) {\n        checkType(srcFields, Array);\n\n        this._aggsDef.source_fields = srcFields;\n        return this;\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on SignificantTextAggregation\n     */\n    missing() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error(\n            'missing is not supported in SignificantTextAggregation'\n        );\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on SignificantTextAggregation\n     */\n    executionHint() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error(\n            'executionHint is not supported in SignificantTextAggregation'\n        );\n    }\n}\n\nmodule.exports = SignificantTextAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/terms-aggregation-base.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { invalidParam },\n    consts: { EXECUTION_HINT_SET }\n} = require('../../core');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\nconst invalidExecutionHintParam = invalidParam(\n    '',\n    'execution_hint',\n    EXECUTION_HINT_SET\n);\n\n/**\n * The `TermsAggregationBase` provides support for common options used across\n * various terms `Aggregation` implementations like Significant terms and\n * Terms aggregation.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string} aggType Type of aggregation\n * @param {string} refUrl Elasticsearch reference URL.\n * @param {string=} field The field to aggregate on\n *\n * @extends BucketAggregationBase\n */\nclass TermsAggregationBase extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, aggType, refUrl, field) {\n        super(name, aggType, field);\n\n        this._refUrl = refUrl;\n    }\n\n    /**\n     * Sets the format expression for `key_as_string` in response buckets.\n     * If no format is specified, then it will use the first format specified in the field mapping.\n     *\n     * @param {string} fmt Format mask to apply on aggregation response. Example: ####.00.\n     * @returns {TermsAggregationBase} returns `this` so that calls can be chained\n     */\n    format(fmt) {\n        this._aggsDef.format = fmt;\n        return this;\n    }\n\n    /**\n     * Sets the minimum number of matching hits required to return the terms.\n     *\n     * @example\n     * const agg = esb.significantTermsAggregation('tags', 'tag').minDocCount(10);\n     *\n     * @param {number} minDocCnt Integer value for minimum number of documents\n     * required to return bucket in response\n     * @returns {TermsAggregationBase} returns `this` so that calls can be chained\n     */\n    minDocCount(minDocCnt) {\n        this._aggsDef.min_doc_count = minDocCnt;\n        return this;\n    }\n\n    /**\n     * Sets the parameter which regulates the _certainty_ a shard has if the term\n     * should actually be added to the candidate list or not with respect to\n     * the `min_doc_count`.\n     * Terms will only be considered if their local shard frequency within\n     * the set is higher than the `shard_min_doc_count`.\n     *\n     * @param {number} minDocCnt Sets the `shard_min_doc_count` parameter. Default is 1\n     * and has no effect unless you explicitly set it.\n     * @returns {TermsAggregationBase} returns `this` so that calls can be chained\n     */\n    shardMinDocCount(minDocCnt) {\n        this._aggsDef.shard_min_doc_count = minDocCnt;\n        return this;\n    }\n\n    /**\n     * Defines how many term buckets should be returned out of the overall terms list.\n     *\n     * @example\n     * const agg = esb.termsAggregation('products', 'product').size(5);\n     *\n     * @param {number} size\n     * @returns {TermsAggregationBase} returns `this` so that calls can be chained\n     */\n    size(size) {\n        this._aggsDef.size = size;\n        return this;\n    }\n\n    /**\n     * Sets the `shard_size` parameter to control the volumes of candidate terms\n     * produced by each shard. For the default, -1, shard_size will be automatically\n     * estimated based on the number of shards and the size parameter.\n     *\n     * `shard_size` cannot be smaller than size (as it doesn’t make much sense).\n     * When it is, elasticsearch will override it and reset it to be equal to size.\n     *\n     * @param {number} size\n     * @returns {TermsAggregationBase} returns `this` so that calls can be chained\n     */\n    shardSize(size) {\n        this._aggsDef.shard_size = size;\n        return this;\n    }\n\n    /**\n     * Sets the missing parameter which defines how documents\n     * that are missing a value should be treated.\n     *\n     * @param {string} value\n     * @returns {TermsAggregationBase} returns `this` so that calls can be chained\n     */\n    missing(value) {\n        this._aggsDef.missing = value;\n        return this;\n    }\n\n    /**\n     * Filter the values for which buckets will be created.\n     *\n     * @example\n     * const agg = esb.termsAggregation('tags', 'tags')\n     *     .include('.*sport.*')\n     *     .exclude('water_.*');\n     *\n     * @example\n     * // Match on exact values\n     * const reqBody = esb.requestBodySearch()\n     *     .agg(\n     *         esb.termsAggregation('JapaneseCars', 'make').include([\n     *             'mazda',\n     *             'honda'\n     *         ])\n     *     )\n     *     .agg(\n     *         esb.termsAggregation('ActiveCarManufacturers', 'make').exclude([\n     *             'rover',\n     *             'jensen'\n     *         ])\n     *     );\n     *\n     * @param {RegExp|Array|string} clause Determine what values are \"allowed\" to be aggregated\n     * @returns {TermsAggregationBase} returns `this` so that calls can be chained\n     */\n    include(clause) {\n        this._aggsDef.include = clause;\n        return this;\n    }\n\n    /**\n     * Filter the values for which buckets will be created.\n     *\n     * @example\n     * const agg = esb.termsAggregation('tags', 'tags')\n     *     .include('.*sport.*')\n     *     .exclude('water_.*');\n     *\n     * @example\n     * // Match on exact values\n     * const reqBody = esb.requestBodySearch()\n     *     .agg(\n     *         esb.termsAggregation('JapaneseCars', 'make').include([\n     *             'mazda',\n     *             'honda'\n     *         ])\n     *     )\n     *     .agg(\n     *         esb.termsAggregation('ActiveCarManufacturers', 'make').exclude([\n     *             'rover',\n     *             'jensen'\n     *         ])\n     *     );\n     *\n     * @param {RegExp|Array|string} clause Determine the values that should not be aggregated\n     * @returns {TermsAggregationBase} returns `this` so that calls can be chained\n     */\n    exclude(clause) {\n        this._aggsDef.exclude = clause;\n        return this;\n    }\n\n    /**\n     * This setting can influence the management of the values used\n     * for de-duplication. Each option will hold up to shard_size\n     * values in memory while performing de-duplication but\n     * the type of value held can be controlled\n     *\n     * @example\n     * const agg = esb.significantTermsAggregation('tags', 'tag').executionHint('map');\n     *\n     * @example\n     * const agg = esb.termsAggregation('tags', 'tags').executionHint('map');\n     *\n     * @param {string} hint the possible values are `map`, `global_ordinals`,\n     * `global_ordinals_hash` and `global_ordinals_low_cardinality`\n     * @returns {TermsAggregationBase} returns `this` so that calls can be chained\n     * @throws {Error} If Execution Hint is outside the accepted set.\n     */\n    executionHint(hint) {\n        if (_.isNil(hint)) invalidExecutionHintParam(hint, this._refUrl);\n\n        const hintLower = hint.toLowerCase();\n        if (!EXECUTION_HINT_SET.has(hintLower)) {\n            invalidExecutionHintParam(hint, this._refUrl);\n        }\n\n        this._aggsDef.execution_hint = hint;\n        return this;\n    }\n}\n\nmodule.exports = TermsAggregationBase;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/terms-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { invalidParam }\n} = require('../../core');\n\nconst TermsAggregationBase = require('./terms-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-terms-aggregation.html';\n\nconst invalidDirectionParam = invalidParam(\n    ES_REF_URL,\n    'direction',\n    \"'asc' or 'desc'\"\n);\nconst invalidCollectModeParam = invalidParam(\n    ES_REF_URL,\n    'mode',\n    \"'breadth_first' or 'depth_first'\"\n);\n\n/**\n * A multi-bucket value source based aggregation where buckets are dynamically\n * built - one per unique value.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-terms-aggregation.html)\n *\n * @example\n * const agg = esb.termsAggregation('genres', 'genre');\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends TermsAggregationBase\n */\nclass TermsAggregation extends TermsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'terms', ES_REF_URL, field);\n    }\n\n    /**\n     * When set to `true`, shows an error value for each term returned by the aggregation\n     * which represents the _worst case error_ in the document count and can be useful\n     * when deciding on a value for the shard_size parameter.\n     *\n     * @param {boolean} enable\n     * @returns {TermsAggregation} returns `this` so that calls can be chained\n     */\n    showTermDocCountError(enable) {\n        this._aggsDef.show_term_doc_count_error = enable;\n        return this;\n    }\n\n    /**\n     * Break the analysis up into multiple requests by grouping the field’s values\n     * into a number of partitions at query-time and processing only one\n     * partition in each request.\n     *\n     * Note that this method is a special case as the name doesn't map to the\n     * elasticsearch parameter name. This is required because there is already\n     * a method for `include` applicable for Terms aggregations. However, this\n     * could change depending on community interest.\n     *\n     * @example\n     * const agg = esb.termsAggregation('expired_sessions', 'account_id')\n     *     .includePartition(0, 20)\n     *     .size(10000)\n     *     .order('last_access', 'asc')\n     *     .agg(esb.maxAggregation('last_access', 'access_date'));\n     *\n     * @param {number} partition\n     * @param {number} numPartitions\n     * @returns {TermsAggregation} returns `this` so that calls can be chained\n     */\n    includePartition(partition, numPartitions) {\n        // TODO: Print warning if include key is being overwritten\n        this._aggsDef.include = {\n            partition,\n            num_partitions: numPartitions\n        };\n        return this;\n    }\n\n    /**\n     * Can be used for deferring calculation of child aggregations by using\n     * `breadth_first` mode. In `depth_first` mode all branches of the aggregation\n     * tree are expanded in one depth-first pass and only then any pruning occurs.\n     *\n     * @example\n     * const agg = esb.termsAggregation('actors', 'actors')\n     *     .size(10)\n     *     .collectMode('breadth_first')\n     *     .agg(esb.termsAggregation('costars', 'actors').size(5));\n     *\n     * @param {string} mode The possible values are `breadth_first` and `depth_first`.\n     * @returns {TermsAggregation} returns `this` so that calls can be chained\n     */\n    collectMode(mode) {\n        if (_.isNil(mode)) invalidCollectModeParam(mode);\n\n        const modeLower = mode.toLowerCase();\n        if (modeLower !== 'breadth_first' && modeLower !== 'depth_first') {\n            invalidCollectModeParam(mode);\n        }\n\n        this._aggsDef.collect_mode = modeLower;\n        return this;\n    }\n\n    /**\n     * Sets the ordering for buckets\n     *\n     * @example\n     * // Ordering the buckets by their doc `_count` in an ascending manner\n     * const agg = esb.termsAggregation('genres', 'genre').order('_count', 'asc');\n     *\n     * @example\n     * // Ordering the buckets alphabetically by their terms in an ascending manner\n     * const agg = esb.termsAggregation('genres', 'genre').order('_term', 'asc');\n     *\n     * @example\n     * // Ordering the buckets by single value metrics sub-aggregation\n     * // (identified by the aggregation name)\n     * const agg = esb.termsAggregation('genres', 'genre')\n     *     .order('max_play_count', 'asc')\n     *     .agg(esb.maxAggregation('max_play_count', 'play_count'));\n     *\n     * @example\n     * // Ordering the buckets by multi value metrics sub-aggregation\n     * // (identified by the aggregation name):\n     * const agg = esb.termsAggregation('genres', 'genre')\n     *     .order('playback_stats.max', 'desc')\n     *     .agg(esb.statsAggregation('playback_stats', 'play_count'));\n     *\n     * @example\n     * // Multiple order criteria\n     * const agg = esb.termsAggregation('countries')\n     *     .field('artist.country')\n     *     .order('rock>playback_stats.avg', 'desc')\n     *     .order('_count', 'desc')\n     *     .agg(\n     *         esb.filterAggregation('rock')\n     *             .filter(esb.termQuery('genre', 'rock'))\n     *             .agg(esb.statsAggregation('playback_stats', 'play_count'))\n     *     );\n     *\n     * @param {string} key\n     * @param {string} direction `asc` or `desc`\n     * @returns {TermsAggregation} returns `this` so that calls can be chained\n     */\n    order(key, direction = 'desc') {\n        if (_.isNil(direction)) invalidDirectionParam(direction);\n\n        const directionLower = direction.toLowerCase();\n        if (directionLower !== 'asc' && directionLower !== 'desc') {\n            invalidDirectionParam(direction);\n        }\n\n        if (_.has(this._aggsDef, 'order')) {\n            if (!Array.isArray(this._aggsDef.order)) {\n                this._aggsDef.order = [this._aggsDef.order];\n            }\n\n            this._aggsDef.order.push({ [key]: directionLower });\n        } else {\n            this._aggsDef.order = { [key]: directionLower };\n        }\n\n        return this;\n    }\n}\n\nmodule.exports = TermsAggregation;\n"
  },
  {
    "path": "src/aggregations/bucket-aggregations/variable-width-histogram-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst BucketAggregationBase = require('./bucket-aggregation-base');\n\n/**\n * This is a multi-bucket aggregation similar to Histogram.\n * However, the width of each bucket is not specified.\n * Rather, a target number of buckets is provided and bucket intervals are dynamically determined based on the document distribution.\n * This is done using a simple one-pass document clustering algorithm that aims to obtain low distances between bucket centroids.\n * Unlike other multi-bucket aggregations, the intervals will not necessarily have a uniform width.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-variablewidthhistogram-aggregation.html)\n *\n * NOTE: Only available in Elasticsearch v7.9.0+\n * @example\n * const agg = esb.variableWidthHistogramAggregation('price', 'lowestPrice', 10)\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string} [field] The field to aggregate on\n * @param {number} [buckets] Bucket count to generate histogram over.\n *\n * @extends BucketAggregationBase\n */\nclass VariableWidthHistogramAggregation extends BucketAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field, buckets) {\n        super(name, 'variable_width_histogram', field);\n        if (!_.isNil(buckets)) this._aggsDef.buckets = buckets;\n    }\n\n    /**\n     * Sets the histogram bucket count. Buckets are generated based on this value.\n     *\n     * @param {number} buckets Bucket count to generate histogram over.\n     * @returns {VariableWidthHistogramAggregation} returns `this` so that calls can be chained\n     */\n    buckets(buckets) {\n        this._aggsDef.buckets = buckets;\n        return this;\n    }\n}\n\nmodule.exports = VariableWidthHistogramAggregation;\n"
  },
  {
    "path": "src/aggregations/index.js",
    "content": "'use strict';\n\n// Not used in favor of explicit exports.\n// IDE seems to handle those better\n\n// const _ = require('lodash');\n\n// const { util: { constructorWrapper } } = require('../core');\n\n// const metricsAggs = require('./metrics-aggregations'),\n//     bucketAggs = require('./bucket-aggregations');\n\n// /* === Metrics Aggregations === */\n// for (const clsName in metricsAggs) {\n//     if (!has(metricsAggs, clsName)) continue;\n\n//     exports[clsName] = metricsAggs[clsName];\n//     exports[_.lowerFirst(clsName)] = constructorWrapper(metricsAggs[clsName]);\n// }\n\n// /* === Bucket Aggregations === */\n// for (const clsName in bucketAggs) {\n//     if (!has(bucketAggs, clsName)) continue;\n\n//     exports[clsName] = bucketAggs[clsName];\n//     exports[_.lowerFirst(clsName)] = constructorWrapper(bucketAggs[clsName]);\n// }\n\nexports.metricsAggregations = require('./metrics-aggregations');\n\nexports.bucketAggregations = require('./bucket-aggregations');\n\nexports.pipelineAggregations = require('./pipeline-aggregations');\n\nexports.matrixAggregations = require('./matrix-aggregations');\n"
  },
  {
    "path": "src/aggregations/matrix-aggregations/index.js",
    "content": "'use strict';\n\nexports.MatrixStatsAggregation = require('./matrix-stats-aggregation');\n"
  },
  {
    "path": "src/aggregations/matrix-aggregations/matrix-stats-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Aggregation,\n    util: { checkType }\n} = require('../../core');\n\n/**\n * The `matrix_stats` aggregation is a numeric aggregation that computes\n * statistics over a set of document fields\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-matrix-stats-aggregation.html)\n *\n * @example\n * const agg = esb.matrixStatsAggregation('matrixstats', ['poverty', 'income']);\n *\n * @param {string} name A valid aggregation name\n * @param {Array=} fields Array of fields\n *\n * @extends Aggregation\n */\nclass MatrixStatsAggregation extends Aggregation {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, fields) {\n        super(name, 'matrix_stats');\n\n        if (!_.isNil(fields)) this.fields(fields);\n    }\n\n    /**\n     * The `fields` setting defines the set of fields (as an array) for computing\n     * the statistics.\n     *\n     * @example\n     * const agg = esb.matrixStatsAggregation('matrixstats')\n     *     .fields(['poverty', 'income']);\n     *\n     * @param {Array<string>} fields Array of fields\n     * @returns {MatrixStatsAggregation} returns `this` so that calls can be chained\n     */\n    fields(fields) {\n        checkType(fields, Array);\n\n        this._aggsDef.fields = fields;\n        return this;\n    }\n\n    /**\n     * The `mode` parameter controls what array value the aggregation will use for\n     * array or multi-valued fields\n     * @param {string} mode One of `avg`, `min`, `max`, `sum` and `median`\n     * @returns {MatrixStatsAggregation} returns `this` so that calls can be chained\n     */\n    mode(mode) {\n        // TODO: Add a set in consts and validate input\n        this._aggsDef.mode = mode;\n        return this;\n    }\n\n    /**\n     * The missing parameter defines how documents that are missing a value should\n     * be treated. By default they will be ignored but it is also possible to treat\n     * them as if they had a value.\n     *\n     * @example\n     * const agg = esb.matrixStatsAggregation('matrixstats')\n     *     .fields(['poverty', 'income'])\n     *     .missing({ income: 50000 });\n     *\n     * @param {Object} missing Set of fieldname : value mappings to specify default\n     * values per field\n     * @returns {MatrixStatsAggregation} returns `this` so that calls can be chained\n     */\n    missing(missing) {\n        this._aggsDef.missing = missing;\n        return this;\n    }\n}\n\nmodule.exports = MatrixStatsAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/avg-aggregation.js",
    "content": "'use strict';\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\n/**\n * A single-value metrics aggregation that computes the average of numeric\n * values that are extracted from the aggregated documents. These values can be\n * extracted either from specific numeric fields in the documents, or be\n * generated by a provided script.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-avg-aggregation.html)\n *\n * Aggregation that computes the average of numeric values that are extracted\n * from the aggregated documents.\n *\n * @example\n * // Compute the average grade over all documents\n * const agg = esb.avgAggregation('avg_grade', 'grade');\n *\n * @example\n * // Compute the average grade based on a script\n * const agg = esb.avgAggregation('avg_grade').script(\n *     esb.script('inline', \"doc['grade'].value\").lang('painless')\n * );\n *\n * @example\n * // Value script, apply grade correction\n * const agg = esb.avgAggregation('avg_grade', 'grade').script(\n *     esb.script('inline', '_value * params.correction')\n *         .lang('painless')\n *         .params({ correction: 1.2 })\n * );\n *\n * @example\n * // Missing value\n * const agg = esb.avgAggregation('avg_grade', 'grade').missing(10);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends MetricsAggregationBase\n */\nclass AvgAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'avg', field);\n    }\n}\n\nmodule.exports = AvgAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/cardinality-aggregation.js",
    "content": "'use strict';\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-cardinality-aggregation.html';\n\n/**\n * A single-value metrics aggregation that calculates an approximate count of\n * distinct values. Values can be extracted either from specific fields in the\n * document or generated by a script.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-cardinality-aggregation.html)\n *\n * Aggregation that calculates an approximate count of distinct values.\n *\n * @example\n * const agg = esb.cardinalityAggregation('author_count', 'author');\n *\n * @example\n * const agg = esb.cardinalityAggregation('author_count').script(\n *     esb.script(\n *         'inline',\n *         \"doc['author.first_name'].value + ' ' + doc['author.last_name'].value\"\n *     ).lang('painless')\n * );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends MetricsAggregationBase\n */\nclass CardinalityAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'cardinality', field);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on CardinalityAggregation\n     */\n    format() {\n        // Not 100% sure about this.\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('format is not supported in CardinalityAggregation');\n    }\n\n    /**\n     * The `precision_threshold` options allows to trade memory for accuracy,\n     * and defines a unique count below which counts are expected to be close to accurate.\n     *\n     * @example\n     * const agg = esb.cardinalityAggregation(\n     *     'author_count',\n     *     'author_hash'\n     * ).precisionThreshold(100);\n     *\n     * @param {number} threshold The threshold value.\n     * The maximum supported value is 40000, thresholds above this number\n     * will have the same effect as a threshold of 40000. The default values is 3000.\n     * @returns {CardinalityAggregation} returns `this` so that calls can be chained\n     */\n    precisionThreshold(threshold) {\n        // TODO: Use validation and warning here\n        this._aggsDef.precision_threshold = threshold;\n        return this;\n    }\n}\n\nmodule.exports = CardinalityAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/extended-stats-aggregation.js",
    "content": "'use strict';\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\n/**\n * A multi-value metrics aggregation that computes stats over numeric values\n * extracted from the aggregated documents. These values can be extracted either\n * from specific numeric fields in the documents, or be generated by a provided\n * script.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-extendedstats-aggregation.html)\n *\n * Aggregation that computes extra stats over numeric values extracted from\n * the aggregated documents.\n *\n * @example\n * const agg = esb.extendedStatsAggregation('grades_stats', 'grade');\n *\n * @example\n * // Compute the grade stats based on a script\n * const agg = esb.extendedStatsAggregation('grades_stats').script(\n *     esb.script('inline', \"doc['grade'].value\").lang('painless')\n * );\n *\n * @example\n * // Value script, apply grade correction\n * const agg = esb.extendedStatsAggregation('grades_stats', 'grade').script(\n *     esb.script('inline', '_value * params.correction')\n *         .lang('painless')\n *         .params({ correction: 1.2 })\n * );\n *\n * @example\n * // Missing value\n * const agg = esb.extendedStatsAggregation('grades_stats', 'grade').missing(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends MetricsAggregationBase\n */\nclass ExtendedStatsAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'extended_stats', field);\n    }\n\n    /**\n     * Set sigma in the request for getting custom boundary.\n     * sigma controls how many standard deviations +/- from the mean should be displayed\n     *\n     * @example\n     * const agg = esb.extendedStatsAggregation('grades_stats', 'grade').sigma(3);\n     *\n     * @param {number} sigma sigma can be any non-negative double,\n     * meaning you can request non-integer values such as 1.5.\n     * A value of 0 is valid, but will simply return the average for both upper and lower bounds.\n     * @returns {ExtendedStatsAggregation} returns `this` so that calls can be chained\n     */\n    sigma(sigma) {\n        this._aggsDef.sigma = sigma;\n        return this;\n    }\n}\n\nmodule.exports = ExtendedStatsAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/geo-bounds-aggregation.js",
    "content": "'use strict';\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-geobounds-aggregation.html';\n\n/**\n * A metric aggregation that computes the bounding box\n * containing all geo_point values for a field.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-geobounds-aggregation.html)\n *\n * @example\n * const agg = esb.geoBoundsAggregation('viewport', 'location').wrapLongitude(true);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends MetricsAggregationBase\n */\nclass GeoBoundsAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'geo_bounds', field);\n    }\n\n    // TODO: Override missing and take only GeoPoint as parameter\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GeoBoundsAggregation\n     */\n    format() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('format is not supported in GeoBoundsAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GeoBoundsAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in GeoBoundsAggregation');\n    }\n\n    /**\n     *\n     * @param {boolean} allowOverlap Optional parameter which specifies whether\n     * the bounding box should be allowed to overlap the international date line.\n     * The default value is true\n     * @returns {GeoBoundsAggregation} returns `this` so that calls can be chained\n     */\n    wrapLongitude(allowOverlap) {\n        this._aggsDef.wrap_longitude = allowOverlap;\n        return this;\n    }\n}\n\nmodule.exports = GeoBoundsAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/geo-centroid-aggregation.js",
    "content": "'use strict';\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-geocentroid-aggregation.html';\n\n/**\n * A metric aggregation that computes the weighted centroid\n * from all coordinate values for a Geo-point datatype field.\n *\n * [Elasticsearchreference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-geocentroid-aggregation.html)\n *\n * @example\n * const agg = esb.geoCentroidAggregation('centroid', 'location');\n *\n * @example\n * // Combined as a sub-aggregation to other bucket aggregations\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.matchQuery('crime', 'burglary'))\n *     .agg(\n *         esb.termsAggregation('towns', 'town').agg(\n *             esb.geoCentroidAggregation('centroid', 'location')\n *         )\n *     );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on. field must be a Geo-point datatype type\n *\n * @extends MetricsAggregationBase\n */\nclass GeoCentroidAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'geo_centroid', field);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GeoCentroidAggregation\n     */\n    format() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('format is not supported in GeoCentroidAggregation');\n    }\n}\n\nmodule.exports = GeoCentroidAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/index.js",
    "content": "'use strict';\n\nexports.MetricsAggregationBase = require('./metrics-aggregation-base');\n\nexports.AvgAggregation = require('./avg-aggregation');\nexports.CardinalityAggregation = require('./cardinality-aggregation');\nexports.ExtendedStatsAggregation = require('./extended-stats-aggregation');\nexports.GeoBoundsAggregation = require('./geo-bounds-aggregation');\nexports.GeoCentroidAggregation = require('./geo-centroid-aggregation');\nexports.MaxAggregation = require('./max-aggregation');\nexports.MinAggregation = require('./min-aggregation');\nexports.PercentilesAggregation = require('./percentiles-aggregation');\nexports.PercentileRanksAggregation = require('./percentile-ranks-aggregation');\nexports.ScriptedMetricAggregation = require('./scripted-metric-aggregation');\nexports.StatsAggregation = require('./stats-aggregation');\nexports.SumAggregation = require('./sum-aggregation');\nexports.TopHitsAggregation = require('./top-hits-aggregation');\nexports.ValueCountAggregation = require('./value-count-aggregation');\nexports.WeightedAverageAggregation = require('./weighted-average-aggregation');\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/max-aggregation.js",
    "content": "'use strict';\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\n/**\n * A single-value metrics aggregation that keeps track and returns the\n * maximum value among the numeric values extracted from the aggregated\n * documents. These values can be extracted either from specific numeric fields\n * in the documents, or be generated by a provided script.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-max-aggregation.html)\n *\n * Aggregation that keeps track and returns the maximum value among the\n * numeric values extracted from the aggregated documents.\n *\n * @example\n * const agg = esb.maxAggregation('max_price', 'price');\n *\n * @example\n * // Use a file script\n * const agg = esb.maxAggregation('max_price').script(\n *     esb.script('file', 'my_script').params({ field: 'price' })\n * );\n *\n * @example\n * // Value script to apply the conversion rate to every value\n * // before it is aggregated\n * const agg = esb.maxAggregation('max_price').script(\n *     esb.script('inline', '_value * params.conversion_rate').params({\n *         conversion_rate: 1.2\n *     })\n * );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends MetricsAggregationBase\n */\nclass MaxAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'max', field);\n    }\n}\n\nmodule.exports = MaxAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/metrics-aggregation-base.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Aggregation,\n    Script,\n    util: { checkType }\n} = require('../../core');\n\n/**\n * The `MetricsAggregationBase` provides support for common options used across\n * various metrics `Aggregation` implementations.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} name a valid aggregation name\n * @param {string} aggType type of aggregation\n * @param {string=} field The field to aggregate on\n *\n * @extends Aggregation\n */\nclass MetricsAggregationBase extends Aggregation {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, aggType, field) {\n        super(name, aggType);\n\n        if (!_.isNil(field)) this._aggsDef.field = field;\n    }\n\n    // TODO: Investigate whether Metrics Aggregations can have sub aggregations\n    // Hide setters for `aggs` and `aggregations` if required\n\n    // TODO: Investigate case when getters will be required\n\n    /**\n     * Sets field to run aggregation on.\n     *\n     * @param {string} field a valid field name\n     * @returns {MetricsAggregationBase} returns `this` so that calls can be chained\n     */\n    field(field) {\n        this._aggsDef.field = field;\n        return this;\n    }\n\n    /**\n     * Sets script parameter for aggregation.\n     *\n     * @example\n     * // Compute the average grade based on a script\n     * const agg = esb.avgAggregation('avg_grade').script(\n     *     esb.script('inline', \"doc['grade'].value\").lang('painless')\n     * );\n     *\n     * @example\n     * // Value script, apply grade correction\n     * const agg = esb.avgAggregation('avg_grade', 'grade').script(\n     *     esb.script('inline', '_value * params.correction')\n     *         .lang('painless')\n     *         .params({ correction: 1.2 })\n     * );\n     *\n     * @param {Script} script\n     * @returns {MetricsAggregationBase} returns `this` so that calls can be chained\n     * @throws {TypeError} If `script` is not an instance of `Script`\n     */\n    script(script) {\n        checkType(script, Script);\n\n        this._aggsDef.script = script;\n        return this;\n    }\n\n    /**\n     * Sets the missing parameter which defines how documents\n     * that are missing a value should be treated.\n     *\n     * @example\n     * const agg = esb.avgAggregation('avg_grade', 'grade').missing(10);\n     *\n     * @param {string} value\n     * @returns {MetricsAggregationBase} returns `this` so that calls can be chained\n     */\n    missing(value) {\n        this._aggsDef.missing = value;\n        return this;\n    }\n\n    /**\n     * Sets the format expression if applicable.\n     *\n     * @param {string} fmt Format mask to apply on aggregation response. Example: ####.00\n     * @returns {MetricsAggregationBase} returns `this` so that calls can be chained\n     */\n    format(fmt) {\n        this._aggsDef.format = fmt;\n        return this;\n    }\n}\n\nmodule.exports = MetricsAggregationBase;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/min-aggregation.js",
    "content": "'use strict';\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\n/**\n * A single-value metrics aggregation that keeps track and returns the\n * minimum value among the numeric values extracted from the aggregated\n * documents. These values can be extracted either from specific numeric fields\n * in the documents, or be generated by a provided script.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-min-aggregation.html)\n *\n * Aggregation that keeps track and returns the minimum value among numeric\n * values extracted from the aggregated documents.\n *\n * @example\n * const agg = esb.minAggregation('min_price', 'price');\n *\n * @example\n * // Use a file script\n * const agg = esb.minAggregation('min_price').script(\n *     esb.script('file', 'my_script').params({ field: 'price' })\n * );\n *\n * @example\n * // Value script to apply the conversion rate to every value\n * // before it is aggregated\n * const agg = esb.minAggregation('min_price').script(\n *     esb.script('inline', '_value * params.conversion_rate').params({\n *         conversion_rate: 1.2\n *     })\n * );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends MetricsAggregationBase\n */\nclass MinAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'min', field);\n    }\n}\n\nmodule.exports = MinAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/percentile-ranks-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-percentile-rank-aggregation.html';\n\n/**\n * A multi-value metrics aggregation that calculates one or more percentile ranks\n * over numeric values extracted from the aggregated documents. These values can\n * be extracted either from specific numeric fields in the documents, or be\n * generated by a provided script.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-percentile-rank-aggregation.html)\n *\n * Aggregation that calculates one or more percentiles ranks over numeric values\n * extracted from the aggregated documents.\n *\n * @example\n * const agg = esb.percentileRanksAggregation(\n *     'load_time_outlier',\n *     'load_time',\n *     [15, 30]\n * );\n *\n * @example\n * // Convert load time from mills to seconds on-the-fly using script\n * const agg = esb.percentileRanksAggregation('load_time_outlier')\n *     .values([3, 5])\n *     .script(\n *         esb.script('inline', \"doc['load_time'].value / params.timeUnit\")\n *             .lang('painless')\n *             .params({ timeUnit: 1000 })\n *     );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on. It must be a numeric field\n * @param {Array=} values Values to compute percentiles from.\n *\n * @throws {TypeError} If `values` is not an instance of Array\n *\n * @extends MetricsAggregationBase\n */\nclass PercentileRanksAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field, values) {\n        super(name, 'percentile_ranks', field);\n\n        if (!_.isNil(values)) this.values(values);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on PercentileRanksAggregation\n     */\n    format() {\n        // Not 100% sure about this.\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error(\n            'format is not supported in PercentileRanksAggregation'\n        );\n    }\n\n    /**\n     * Enable the response to be returned as a keyed object where the key is the\n     * bucket interval.\n     *\n     * @example\n     * // Return the ranges as an array rather than a hash\n     * const agg = esb.percentileRanksAggregation('balance_outlier', 'balance')\n     *     .values([25000, 50000])\n     *     .keyed(false);\n     *\n     * @param {boolean} keyed To enable keyed response or not.\n     * @returns {PercentilesRanksAggregation} returns `this` so that calls can be chained\n     */\n    keyed(keyed) {\n        this._aggsDef.keyed = keyed;\n        return this;\n    }\n\n    /**\n     * Specifies the values to compute percentiles from.\n     *\n     * @param {Array<number>} values Values to compute percentiles from.\n     * @returns {PercentileRanksAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If `values` is not an instance of Array\n     */\n    values(values) {\n        checkType(values, Array);\n        this._aggsDef.values = values;\n        return this;\n    }\n\n    /**\n     * Compression controls memory usage and approximation error. The compression\n     * value limits the maximum number of nodes to 100 * compression. By\n     * increasing the compression value, you can increase the accuracy of your\n     * percentiles at the cost of more memory. Larger compression values also make\n     * the algorithm slower since the underlying tree data structure grows in\n     * size, resulting in more expensive operations. The default compression\n     * value is 100.\n     *\n     * @param {number} compression Parameter to balance memory utilization with estimation accuracy.\n     * @returns {PercentileRanksAggregation} returns `this` so that calls can be chained\n     */\n    tdigest(compression) {\n        this._aggsDef.tdigest = { compression };\n        return this;\n    }\n\n    /**\n     * Compression controls memory usage and approximation error. The compression\n     * value limits the maximum number of nodes to 100 * compression. By\n     * increasing the compression value, you can increase the accuracy of your\n     * percentiles at the cost of more memory. Larger compression values also make\n     * the algorithm slower since the underlying tree data structure grows in\n     * size, resulting in more expensive operations. The default compression\n     * value is 100.\n     *\n     * Alias for `tdigest`\n     *\n     * @param {number} compression Parameter to balance memory utilization with estimation accuracy.\n     * @returns {PercentileRanksAggregation} returns `this` so that calls can be chained\n     */\n    compression(compression) {\n        return this.tdigest(compression);\n    }\n\n    /**\n     * HDR Histogram (High Dynamic Range Histogram) is an alternative implementation\n     * that can be useful when calculating percentiles for latency measurements\n     * as it can be faster than the t-digest implementation\n     * with the trade-off of a larger memory footprint.\n     *\n     * The HDR Histogram can be used by specifying the method parameter in the request.\n     *\n     * @example\n     * const agg = esb.percentileRanksAggregation(\n     *     'load_time_outlier',\n     *     'load_time',\n     *     [15, 30]\n     * ).hdr(3);\n     *\n     * @param {number} numberOfSigDigits The resolution of values\n     * for the histogram in number of significant digits\n     * @returns {PercentileRanksAggregation} returns `this` so that calls can be chained\n     */\n    hdr(numberOfSigDigits) {\n        this._aggsDef.hdr = {\n            number_of_significant_value_digits: numberOfSigDigits\n        };\n        return this;\n    }\n}\n\nmodule.exports = PercentileRanksAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/percentiles-aggregation.js",
    "content": "'use strict';\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\n/**\n * A multi-value metrics aggregation that calculates one or more percentiles\n * over numeric values extracted from the aggregated documents. These values can\n * be extracted either from specific numeric fields in the documents, or be\n * generated by a provided script.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-percentile-aggregation.html)\n *\n * Aggregation that calculates one or more percentiles over numeric values\n * extracted from the aggregated documents.\n *\n * @example\n * const agg = esb.percentilesAggregation('load_time_outlier', 'load_time');\n *\n * @example\n * // Convert load time from mills to seconds on-the-fly using script\n * const agg = esb.percentilesAggregation('load_time_outlier').script(\n *     esb.script('inline', \"doc['load_time'].value / params.timeUnit\")\n *         .lang('painless')\n *         .params({ timeUnit: 1000 })\n * );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends MetricsAggregationBase\n */\nclass PercentilesAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'percentiles', field);\n    }\n\n    /**\n     * Enable the response to be returned as a keyed object where the key is the\n     * bucket interval.\n     *\n     * @example\n     * // Return the ranges as an array rather than a hash\n     * const agg = esb.percentilesAggregation('balance_outlier', 'balance').keyed(\n     *     false\n     * );\n     *\n     * @param {boolean} keyed To enable keyed response or not. True by default\n     * @returns {PercentilesAggregation} returns `this` so that calls can be chained\n     */\n    keyed(keyed) {\n        this._aggsDef.keyed = keyed;\n        return this;\n    }\n\n    /**\n     * Specifies the percents of interest.\n     * Requested percentiles must be a value between 0-100 inclusive\n     *\n     * @example\n     * // Specify particular percentiles to calculate\n     * const agg = esb.percentilesAggregation(\n     *     'load_time_outlier',\n     *     'load_time'\n     * ).percents([95, 99, 99.9]);\n     *\n     * @param {Array<number>} percents Parameter to specify particular percentiles to calculate\n     * @returns {PercentilesAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If `percents` is not an instance of Array\n     */\n    percents(percents) {\n        checkType(percents, Array);\n        this._aggsDef.percents = percents;\n        return this;\n    }\n\n    /**\n     * Compression controls memory usage and approximation error. The compression\n     * value limits the maximum number of nodes to 100 * compression. By\n     * increasing the compression value, you can increase the accuracy of your\n     * percentiles at the cost of more memory. Larger compression values also make\n     * the algorithm slower since the underlying tree data structure grows in\n     * size, resulting in more expensive operations. The default compression\n     * value is 100.\n     *\n     * @example\n     * const agg = esb.percentilesAggregation(\n     *     'load_time_outlier',\n     *     'load_time'\n     * ).tdigest(200);\n     *\n     * @param {number} compression Parameter to balance memory utilization with estimation accuracy.\n     * @returns {PercentilesAggregation} returns `this` so that calls can be chained\n     */\n    tdigest(compression) {\n        this._aggsDef.tdigest = { compression };\n        return this;\n    }\n\n    /**\n     * Compression controls memory usage and approximation error. The compression\n     * value limits the maximum number of nodes to 100 * compression. By\n     * increasing the compression value, you can increase the accuracy of your\n     * percentiles at the cost of more memory. Larger compression values also make\n     * the algorithm slower since the underlying tree data structure grows in\n     * size, resulting in more expensive operations. The default compression\n     * value is 100.\n     *\n     * Alias for `tdigest`\n     *\n     * @example\n     * const agg = esb.percentilesAggregation(\n     *     'load_time_outlier',\n     *     'load_time'\n     * ).compression(200);\n     *\n     * @param {number} compression Parameter to balance memory utilization with estimation accuracy.\n     * @returns {PercentilesAggregation} returns `this` so that calls can be chained\n     */\n    compression(compression) {\n        this._aggsDef.tdigest = { compression };\n        return this;\n    }\n\n    /**\n     * HDR Histogram (High Dynamic Range Histogram) is an alternative implementation\n     * that can be useful when calculating percentiles for latency measurements\n     * as it can be faster than the t-digest implementation\n     * with the trade-off of a larger memory footprint.\n     *\n     * The HDR Histogram can be used by specifying the method parameter in the request.\n     *\n     * @example\n     * const agg = esb.percentilesAggregation('load_time_outlier', 'load_time')\n     *     .percents([95, 99, 99.9])\n     *     .hdr(3);\n     *\n     * @param {number} numberOfSigDigits The resolution of values\n     * for the histogram in number of significant digits\n     * @returns {PercentilesAggregation} returns `this` so that calls can be chained\n     */\n    hdr(numberOfSigDigits) {\n        this._aggsDef.hdr = {\n            number_of_significant_value_digits: numberOfSigDigits\n        };\n        return this;\n    }\n}\n\nmodule.exports = PercentilesAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/scripted-metric-aggregation.js",
    "content": "'use strict';\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-scripted-metric-aggregation.html';\n\n/**\n * A metric aggregation that executes using scripts to provide a metric output.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-scripted-metric-aggregation.html)\n *\n * Aggregation that keeps track and returns the minimum value among numeric\n * values extracted from the aggregated documents.\n *\n * @example\n * const agg = esb.scriptedMetricAggregation('profit')\n *     .initScript('params._agg.transactions = []')\n *     .mapScript(\n *         \"params._agg.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)\"\n *     )\n *     .combineScript(\n *         'double profit = 0; for (t in params._agg.transactions) { profit += t } return profit'\n *     )\n *     .reduceScript(\n *         'double profit = 0; for (a in params._aggs) { profit += a } return profit'\n *     );\n *\n * @example\n * // Specify using file scripts\n * const agg = esb.scriptedMetricAggregation('profit')\n *     .initScript(esb.script('file', 'my_init_script'))\n *     .mapScript(esb.script('file', 'my_map_script'))\n *     .combineScript(esb.script('file', 'my_combine_script'))\n *     // script parameters for `init`, `map` and `combine` scripts must be\n *     // specified in a global params object so that\n *     // it can be shared between the scripts\n *     .params({ field: 'amount', _agg: {} })\n *     .reduceScript(esb.script('file', 'my_reduce_script'));\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n *\n * @extends MetricsAggregationBase\n */\nclass ScriptedMetricAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name) {\n        super(name, 'scripted_metric');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on ScriptedMetricAggregation\n     */\n    field() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('field is not supported in ScriptedMetricAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on ScriptedMetricAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in ScriptedMetricAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on ScriptedMetricAggregation\n     */\n    missing() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error(\n            'missing is not supported in ScriptedMetricAggregation'\n        );\n    }\n\n    /**\n     * Sets the initialization script.\n     *\n     * Executed prior to any collection of documents. Allows the aggregation to set up any initial state.\n     *\n     * @param {string|Script} initScript The initialization script. Can be a string or an Script instance\n     * @returns {ScriptedMetricAggregation} returns `this` so that calls can be chained\n     */\n    initScript(initScript) {\n        this._aggsDef.init_script = initScript;\n        return this;\n    }\n\n    /**\n     * Sets the map script. This is the only required script.\n     *\n     * Executed once per document collected.\n     * If no combine_script is specified, the resulting state needs to be stored in an object named _agg.\n     *\n     * @param {string|Script} mapScript The map script. Can be a string or an Script instance\n     * @returns {ScriptedMetricAggregation} returns `this` so that calls can be chained\n     */\n    mapScript(mapScript) {\n        this._aggsDef.map_script = mapScript;\n        return this;\n    }\n\n    /**\n     * Sets the combine phase script.\n     *\n     * Executed once on each shard after document collection is complete.\n     * Allows the aggregation to consolidate the state returned from each shard.\n     * If a combine_script is not provided the combine phase will return the aggregation variable.\n     *\n     * @param {string|Script} combineScript The combine script. Can be a string or an Script instance\n     * @returns {ScriptedMetricAggregation} returns `this` so that calls can be chained\n     */\n    combineScript(combineScript) {\n        this._aggsDef.combine_script = combineScript;\n        return this;\n    }\n\n    /**\n     * Sets the reduce phase script.\n     *\n     * Executed once on the coordinating node after all shards have returned their results.\n     * The script is provided with access to a variable _aggs\n     * which is an array of the result of the combine_script on each shard.\n     * If a reduce_script is not provided the reduce phase will return the _aggs variable.\n     *\n     * @param {string|Script} reduceScript The combine script. Can be a string or an Script instance\n     * @returns {ScriptedMetricAggregation} returns `this` so that calls can be chained\n     */\n    reduceScript(reduceScript) {\n        this._aggsDef.reduce_script = reduceScript;\n        return this;\n    }\n\n    /**\n     * Sets the params for scripts.\n     *\n     * Optional object whose contents will be passed as variables to\n     * the init_script, map_script and combine_script\n     *\n     * If you specify script parameters then you must specify `\"_agg\": {}`.\n     *\n     * @param {Object} params Object passed to init, map and combine script. Default value - `{ \"_agg\": {} }`\n     * @returns {ScriptedMetricAggregation} returns `this` so that calls can be chained\n     */\n    params(params) {\n        // TODO: If sure, add validation to see that _agg: {} is present in params\n        this._aggsDef.params = params;\n        return this;\n    }\n}\n\nmodule.exports = ScriptedMetricAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/stats-aggregation.js",
    "content": "'use strict';\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\n/**\n * A multi-value metrics aggregation that computes stats over numeric values\n * extracted from the aggregated documents. These values can be extracted either\n * from specific numeric fields in the documents, or be generated by a provided\n * script.\n *\n * The stats that are returned consist of: min, max, sum, count and avg.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-stats-aggregation.html)\n *\n * Aggregation that computes stats over numeric values extracted from the\n * aggregated documents.\n *\n * @example\n * const agg = esb.statsAggregation('grades_stats', 'grade');\n *\n *\n * @example\n * // Use a file script\n * const agg = esb.statsAggregation('grades_stats').script(\n *     esb.script('file', 'my_script').params({ field: 'price' })\n * );\n *\n * @example\n * // Value script to apply the conversion rate to every value\n * // before it is aggregated\n * const agg = esb.statsAggregation('grades_stats').script(\n *     esb.script('inline', '_value * params.conversion_rate').params({\n *         conversion_rate: 1.2\n *     })\n * );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends MetricsAggregationBase\n */\nclass StatsAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'stats', field);\n    }\n}\n\nmodule.exports = StatsAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/sum-aggregation.js",
    "content": "'use strict';\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\n/**\n * A single-value metrics aggregation that sums up numeric values that are\n * extracted from the aggregated documents. These values can be extracted either\n * from specific numeric fields in the documents, or be generated by a\n * provided script.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-stats-aggregation.html)\n *\n * Aggregation that sums up numeric values that are extracted from the\n * aggregated documents.\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.constantScoreQuery(esb.matchQuery('type', 'hat')))\n *     .agg(esb.sumAggregation('hat_prices', 'price'));\n *\n * @example\n * // Script to fetch the sales price\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.constantScoreQuery(esb.matchQuery('type', 'hat')))\n *     .agg(\n *         esb.sumAggregation('hat_prices').script(\n *             esb.script('inline', 'doc.price.value')\n *         )\n *     );\n *\n * @example\n * // Access the field value from the script using `_value`\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.constantScoreQuery(esb.matchQuery('type', 'hat')))\n *     .agg(\n *         esb.sumAggregation('square_hats', 'price').script(\n *             esb.script('inline', '_value * _value')\n *         )\n *     );\n *\n * @example\n * // Treat documents missing price as if they had a value\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.constantScoreQuery(esb.matchQuery('type', 'hat')))\n *     .agg(esb.sumAggregation('hat_prices', 'price').missing(100));\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends MetricsAggregationBase\n */\nclass SumAggregation extends MetricsAggregationBase {\n    /**\n     * Creates an instance of `SumAggregation`\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    constructor(name, field) {\n        super(name, 'sum', field);\n    }\n}\n\nmodule.exports = SumAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/top-hits-aggregation.js",
    "content": "'use strict';\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base'),\n    {\n        Highlight,\n        Sort,\n        util: { checkType, setDefault }\n    } = require('../../core');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-top-hits-aggregation.html';\n\n/**\n * A `top_hits` metric aggregator keeps track of the most relevant document being\n * aggregated. This aggregator is intended to be used as a sub aggregator, so that\n * the top matching documents can be aggregated per bucket.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-top-hits-aggregation.html)\n *\n * `top_hits` metric aggregator keeps track of the most relevant document being\n * aggregated.\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.termsAggregation('top_tags', 'type')\n *             .size(3)\n *             .agg(\n *                 esb.topHitsAggregation('top_sales_hits')\n *                     .sort(esb.sort('date', 'desc'))\n *                     .source({ includes: ['date', 'price'] })\n *                     .size(1)\n *             )\n *     )\n *     .size(0);\n *\n * @example\n * // Field collapsing(logically groups a result set into\n * // groups and per group returns top documents)\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.matchQuery('body', 'elections'))\n *     .agg(\n *         esb.termsAggregation('top-sites', 'domain')\n *             .order('top_hit', 'desc')\n *             .agg(esb.topHitsAggregation('top_tags_hits'))\n *             .agg(\n *                 esb.maxAggregation('top_hit').script(\n *                     esb.script('inline', '_score')\n *                 )\n *             )\n *     );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n *\n * @extends MetricsAggregationBase\n */\nclass TopHitsAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name) {\n        super(name, 'top_hits');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on TopHitsAggregation\n     */\n    field() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('field is not supported in TopHitsAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on TopHitsAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('script is not supported in TopHitsAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on TopHitsAggregation\n     */\n    missing() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('missing is not supported in TopHitsAggregation');\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on TopHitsAggregation\n     */\n    format() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('format is not supported in TopHitsAggregation');\n    }\n\n    /**\n     * Sets the offset for fetching result.\n     *\n     * @param {number} from The offset from the first result you want to fetch.\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained\n     */\n    from(from) {\n        this._aggsDef.from = from;\n        return this;\n    }\n\n    /**\n     * Sets the maximum number of top matching hits to return per bucket.\n     *\n     * @param {number} size The numer of aggregation entries to be returned per bucket.\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained\n     */\n    size(size) {\n        this._aggsDef.size = size;\n        return this;\n    }\n\n    /**\n     * How the top matching hits should be sorted. Allows to add sort on specific field.\n     * The sort can be reversed as well. The sort is defined on a per field level,\n     * with special field name for `_score` to sort by score, and `_doc` to sort by\n     * index order.\n     *\n     * @param {Sort} sort How the top matching hits should be sorted.\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained.\n     * @throws {TypeError} If parameter `sort` is not an instance of `Sort`.\n     */\n    sort(sort) {\n        checkType(sort, Sort);\n\n        setDefault(this._aggsDef, 'sort', []);\n\n        this._aggsDef.sort.push(sort);\n        return this;\n    }\n\n    /**\n     * Allows to add multiple sort on specific fields. Each sort can be reversed as well.\n     * The sort is defined on a per field level, with special field name for _score to\n     * sort by score, and _doc to sort by index order.\n     *\n     * @param {Array<Sort>} sorts Arry of sort How the top matching hits should be sorted.\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained.\n     * @throws {TypeError} If any item in parameter `sorts` is not an instance of `Sort`.\n     */\n    sorts(sorts) {\n        sorts.forEach(sort => this.sort(sort));\n        return this;\n    }\n\n    /**\n     * Enables score computation and tracking during sorting.\n     * By default, sorting scores are not computed.\n     *\n     * @param {boolean} trackScores If scores should be computed and tracked. Defaults to false.\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained\n     */\n    trackScores(trackScores) {\n        this._aggsDef.track_scores = trackScores;\n        return this;\n    }\n\n    /**\n     * Enable/Disable returning version number for each hit.\n     *\n     * @param {boolean} version true to enable, false to disable\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained\n     */\n    version(version) {\n        this._aggsDef.version = version;\n        return this;\n    }\n\n    /**\n     * Enable/Disable explanation of score for each hit.\n     *\n     * @param {boolean} explain true to enable, false to disable\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained\n     */\n    explain(explain) {\n        this._aggsDef.explain = explain;\n        return this;\n    }\n\n    /**\n     * Performs highlighting based on the `Highlight` settings.\n     *\n     * @param {Highlight} highlight\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained\n     */\n    highlight(highlight) {\n        checkType(highlight, Highlight);\n\n        this._aggsDef.highlight = highlight;\n        return this;\n    }\n\n    /**\n     * Allows to control how the `_source` field is returned with every hit.\n     * You can turn off `_source` retrieval by passing `false`.\n     * It also accepts one(string) or more wildcard(array) patterns to control\n     * what parts of the `_source` should be returned\n     * An object can also be used to specify the wildcard patterns for `includes` and `excludes`.\n     *\n     * @param {boolean|string|Array|Object} source\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained\n     */\n    source(source) {\n        this._aggsDef._source = source;\n        return this;\n    }\n\n    /**\n     * The stored_fields parameter is about fields that are explicitly marked as stored in the mapping.\n     * Selectively load specific stored fields for each document represented by a search hit\n     * using array of stored fields.\n     * An empty array will cause only the _id and _type for each hit to be returned.\n     * To disable the stored fields (and metadata fields) entirely use: '_none_'\n     *\n     * @param {Array|string} fields\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained\n     */\n    storedFields(fields) {\n        this._aggsDef.stored_fields = fields;\n        return this;\n    }\n\n    /**\n     * Computes a document property dynamically based on the supplied `Script`.\n     *\n     * @param {string} scriptFieldName\n     * @param {string|Script} script string or instance of `Script`\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained\n     */\n    scriptField(scriptFieldName, script) {\n        setDefault(this._aggsDef, 'script_fields', {});\n\n        this._aggsDef.script_fields[scriptFieldName] = { script };\n        return this;\n    }\n\n    /**\n     * Sets given dynamic document properties to be computed using supplied `Script`s.\n     *\n     * Object should have `scriptFieldName` as key and `script` as the value.\n     *\n     * @param {Object} scriptFields Object with `scriptFieldName` as key and `script` as the value.\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained\n     */\n    scriptFields(scriptFields) {\n        checkType(scriptFields, Object);\n\n        Object.keys(scriptFields).forEach(scriptFieldName =>\n            this.scriptField(scriptFieldName, scriptFields[scriptFieldName])\n        );\n\n        return this;\n    }\n\n    /**\n     * Allows to return the doc value representation of a field for each hit.\n     * Doc value fields can work on fields that are not stored.\n     *\n     * @param {Array<string>} fields\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained\n     */\n    docvalueFields(fields) {\n        this._aggsDef.docvalue_fields = fields;\n        return this;\n    }\n}\n\nmodule.exports = TopHitsAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/value-count-aggregation.js",
    "content": "'use strict';\n\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-valuecount-aggregation.html';\n\n/**\n * A single-value metrics aggregation that counts the number of values that\n * are extracted from the aggregated documents. These values can be extracted\n * either from specific fields in the documents, or be generated by a provided\n * script. Typically, this aggregator will be used in conjunction with other\n * single-value aggregations.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-valuecount-aggregation.html)\n *\n * Aggregation that counts the number of values that are extracted from the\n * aggregated documents.\n *\n * @example\n * const agg = esb.valueCountAggregation('types_count', 'type');\n *\n * @example\n * const agg = esb.valueCountAggregation('types_count').script(\n *     esb.script('inline', \"doc['type'].value\")\n * );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} field The field to aggregate on\n *\n * @extends MetricsAggregationBase\n */\nclass ValueCountAggregation extends MetricsAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super(name, 'value_count', field);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on ValueCountAggregation\n     */\n    format() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('format is not supported in ValueCountAggregation');\n    }\n}\n\nmodule.exports = ValueCountAggregation;\n"
  },
  {
    "path": "src/aggregations/metrics-aggregations/weighted-average-aggregation.js",
    "content": "'use strict';\n\nconst { Script } = require('../../core');\nconst MetricsAggregationBase = require('./metrics-aggregation-base');\nconst _ = require('../../_');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-weight-avg-aggregation.html';\n\n/**\n * A single-value metrics aggregation that computes the weighted average of numeric values that are extracted from the aggregated documents.\n * These values can be extracted either from specific numeric fields in the documents.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-weight-avg-aggregation.html)\n *\n * Added in Elasticsearch v6.4.0\n * [Release notes](https://www.elastic.co/guide/en/elasticsearch/reference/6.4/release-notes-6.4.0.html)\n *\n * As a formula, a weighted average is ∑(value * weight) / ∑(weight)\n *\n * @example\n * // Compute the average grade over all documents, weighing by teacher score.\n * const agg = esb.weightedAverageAggregation('avg_grade', 'grade', 'teacher_score');\n *\n * @example\n * // Compute the average grade where the weight is calculated by a script.\n * // Filling in missing values as '10'.\n * const agg = esb.weightedAverageAggregation('avg_grade', 'grade')\n *      .weight(esb.script('inline', \"doc['teacher_score'].value\").lang('painless'), 10)\n * );\n *\n * @example\n * // Compute the average grade, weighted by teacher score, filling in missing values.\n * const agg = esb.weightedAverageAggregation('avg_grade').value('grade', 5).weight('teacher_score', 10));\n *\n * @example\n * // Compute the average grade over all documents, weighing by teacher score.\n * const agg = esb.weightedAverageAggregation('avg_grade').value('grade').weight('teacher_score');\n *\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} value The field or script to use as the value\n * @param {string=} weight The field or script to use as the weight\n *\n * @extends MetricsAggregationBase\n */\nclass WeightedAverageAggregation extends MetricsAggregationBase {\n    /**\n     * Creates an instance of `WeightedAverageAggregation`\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} value The field or script to be used as the value.\n     * @param {string=} weight The field or script to be used as the weighting.\n     */\n    constructor(name, value, weight) {\n        super(name, 'weighted_avg');\n\n        this._aggsDef.value = {};\n        this._aggsDef.weight = {};\n\n        if (!_.isNil(value)) {\n            this.value(value);\n        }\n\n        if (!_.isNil(weight)) {\n            this.weight(weight);\n        }\n    }\n\n    /**\n     * Sets the value\n     *\n     * @param {string | Script} value Field name or script to use as the value.\n     *\n     * @param {number=} missing Sets the missing parameter which defines how documents\n     * that are missing a value should be treated.\n     * @returns {WeightedAverageAggregation} returns `this` so that calls can be chained\n     */\n    value(value, missing) {\n        if (typeof value !== 'string' && !(value instanceof Script)) {\n            throw new TypeError(\n                'Value must be either a string or instanceof Script'\n            );\n        }\n\n        if (value instanceof Script) {\n            if (this._aggsDef.value.field) {\n                delete this._aggsDef.value.field;\n            }\n            this._aggsDef.value.script = value;\n        } else {\n            if (this._aggsDef.value.script) {\n                delete this._aggsDef.value.script;\n            }\n            this._aggsDef.value.field = value;\n        }\n\n        if (!_.isNil(missing)) {\n            this._aggsDef.value.missing = missing;\n        }\n\n        return this;\n    }\n\n    /**\n     * Sets the weight\n     *\n     * @param {string | Script} weight Field name or script to use as the weight.\n     * @param {number=} missing Sets the missing parameter which defines how documents\n     * that are missing a value should be treated.\n     * @returns {WeightedAverageAggregation} returns `this` so that calls can be chained\n     */\n    weight(weight, missing) {\n        if (typeof weight !== 'string' && !(weight instanceof Script)) {\n            throw new TypeError(\n                'Weight must be either a string or instanceof Script'\n            );\n        }\n\n        if (weight instanceof Script) {\n            if (this._aggsDef.weight.field) {\n                delete this._aggsDef.weight.field;\n            }\n            this._aggsDef.weight.script = weight;\n        } else {\n            if (this._aggsDef.weight.script) {\n                delete this._aggsDef.weight.script;\n            }\n            this._aggsDef.weight.field = weight;\n        }\n\n        if (!_.isNil(missing)) {\n            this._aggsDef.weight.missing = missing;\n        }\n\n        return this;\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on WeightedAverageAggregation\n     */\n    script() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error(\n            'script is not supported in WeightedAverageAggregation'\n        );\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on WeightedAverageAggregation\n     */\n    missing() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error(\n            'missing is not supported in WeightedAverageAggregation'\n        );\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on WeightedAverageAggregation\n     */\n    field() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('field is not supported in WeightedAverageAggregation');\n    }\n}\n\nmodule.exports = WeightedAverageAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/avg-bucket-aggregation.js",
    "content": "'use strict';\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-avg-bucket-aggregation.html';\n\n/**\n * A sibling pipeline aggregation which calculates the (mean) average value\n * of a specified metric in a sibling aggregation. The specified metric must\n * be numeric and the sibling aggregation must be a multi-bucket aggregation.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-avg-bucket-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('sales_per_month', 'date')\n *             .interval('month')\n *             .agg(esb.sumAggregation('sales', 'price'))\n *     )\n *     .agg(\n *         esb.avgBucketAggregation(\n *             'avg_monthly_sales',\n *             'sales_per_month>sales'\n *         )\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends PipelineAggregationBase\n */\nclass AvgBucketAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'avg_bucket', ES_REF_URL, bucketsPath);\n    }\n}\n\nmodule.exports = AvgBucketAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/bucket-script-aggregation.js",
    "content": "'use strict';\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-bucket-script-aggregation.html';\n\n/**\n * A parent pipeline aggregation which executes a script which can perform\n * per bucket computations on specified metrics in the parent multi-bucket\n * aggregation. The specified metric must be numeric and the script must\n * return a numeric value.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-bucket-script-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('sales_per_month', 'date', 'month')\n *             .agg(esb.sumAggregation('total_sales', 'price'))\n *             .agg(\n *                 esb.filterAggregation('t-shirts')\n *                     .filter(esb.termQuery('type', 't-shirt'))\n *                     .agg(esb.sumAggregation('sales', 'price'))\n *             )\n *             .agg(\n *                 esb.bucketScriptAggregation('t-shirt-percentage')\n *                     .bucketsPath({\n *                         tShirtSales: 't-shirts>sales',\n *                         totalSales: 'total_sales'\n *                     })\n *                     .script('params.tShirtSales / params.totalSales * 100')\n *             )\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends PipelineAggregationBase\n */\nclass BucketScriptAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'bucket_script', ES_REF_URL, bucketsPath);\n    }\n\n    /**\n     * Sets script parameter for aggregation.\n     *\n     * @param {Script|string} script\n     * @returns {BucketScriptAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If `script` is not an instance of `Script`\n     */\n    script(script) {\n        this._aggsDef.script = script;\n        return this;\n    }\n}\n\nmodule.exports = BucketScriptAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/bucket-selector-aggregation.js",
    "content": "'use strict';\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-bucket-selector-aggregation.html';\n\n/**\n * A parent pipeline aggregation which executes a script which determines whether\n * the current bucket will be retained in the parent multi-bucket aggregation.\n * The specified metric must be numeric and the script must return a boolean value.\n * If the script language is expression then a numeric return value is permitted.\n * In this case 0.0 will be evaluated as false and all other values will evaluate to true.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-bucket-selector-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('histo', 'date')\n *             .interval('day')\n *             .agg(esb.termsAggregation('categories', 'category'))\n *             .agg(\n *                 esb.bucketSelectorAggregation('min_bucket_selector')\n *                     .bucketsPath({ count: 'categories._bucket_count' })\n *                     .script(esb.script('inline', 'params.count != 0'))\n *             )\n *     )\n *     .size(0);\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('sales_per_month', 'date')\n *             .interval('month')\n *             .agg(esb.sumAggregation('sales', 'price'))\n *             .agg(\n *                 esb.bucketSelectorAggregation('sales_bucket_filter')\n *                     .bucketsPath({ totalSales: 'total_sales' })\n *                     .script('params.totalSales > 200')\n *             )\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends PipelineAggregationBase\n */\nclass BucketSelectorAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'bucket_selector', ES_REF_URL, bucketsPath);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on BucketSelectorAggregation\n     */\n    format() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('format is not supported in BucketSelectorAggregation');\n    }\n\n    /**\n     * Sets script parameter for aggregation. Required.\n     *\n     * @param {Script|string} script\n     * @returns {BucketSelectorAggregation} returns `this` so that calls can be chained\n     * @throws {TypeError} If `script` is not an instance of `Script`\n     */\n    script(script) {\n        this._aggsDef.script = script;\n        return this;\n    }\n}\n\nmodule.exports = BucketSelectorAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/bucket-sort-aggregation.js",
    "content": "'use strict';\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-bucket-sort-aggregation.html';\n\n/**\n * A parent pipeline aggregation which sorts the buckets of its parent\n * multi-bucket aggregation. Zero or more sort fields may be specified\n * together with the corresponding sort order. Each bucket may be sorted\n * based on its _key, _count or its sub-aggregations. In addition, parameters\n * from and size may be set in order to truncate the result buckets.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-bucket-sort-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.bucketSortAggregation('sort')\n *             .sort([\n *                  esb.sort('user', 'desc')\n *              ])\n *              .from(5)\n *              .size(10)\n *         )\n *     );\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n *\n * @extends PipelineAggregationBase\n */\nclass BucketSortAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name) {\n        super(name, 'bucket_sort', ES_REF_URL);\n    }\n\n    /**\n     * Sets the list of fields to sort on. Optional.\n     *\n     * @param {Array<Sort>} sort The list of fields to sort on\n     * @returns {BucketSortAggregation} returns `this` so that calls can be chained\n     */\n    sort(sort) {\n        this._aggsDef.sort = sort;\n        return this;\n    }\n\n    /**\n     * Sets the value buckets in positions prior to which will be truncated. Optional.\n     *\n     * @param {number} from Buckets in positions prior to the set value will be truncated.\n     * @returns {BucketSortAggregation} returns `this` so that calls can be chained\n     */\n    from(from) {\n        this._aggsDef.from = from;\n        return this;\n    }\n\n    /**\n     * Sets the number of buckets to return. Optional.\n     *\n     * @param {number} size The number of buckets to return.\n     * @returns {BucketSortAggregation} returns `this` so that calls can be chained\n     */\n    size(size) {\n        this._aggsDef.size = size;\n        return this;\n    }\n}\n\nmodule.exports = BucketSortAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/cumulative-sum-aggregation.js",
    "content": "'use strict';\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-cumulative-sum-aggregation.html';\n\n/**\n * A parent pipeline aggregation which calculates the cumulative sum of\n * a specified metric in a parent histogram (or date_histogram) aggregation.\n * The specified metric must be numeric and the enclosing histogram must\n * have min_doc_count set to 0 (default for histogram aggregations).\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-cumulative-sum-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('sales_per_month', 'date', 'month')\n *             .agg(esb.sumAggregation('sales', 'price'))\n *             .agg(esb.cumulativeSumAggregation('cumulative_sales', 'sales'))\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends PipelineAggregationBase\n */\nclass CumulativeSumAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'cumulative_sum', ES_REF_URL, bucketsPath);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on CumulativeSumAggregation\n     */\n    gapPolicy() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error(\n            'gapPolicy is not supported in CumulativeSumAggregation'\n        );\n    }\n}\n\nmodule.exports = CumulativeSumAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/derivative-aggregation.js",
    "content": "'use strict';\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-derivative-aggregation.html';\n\n/**\n * A parent pipeline aggregation which calculates the derivative of a\n * specified metric in a parent histogram (or date_histogram) aggregation.\n * The specified metric must be numeric and the enclosing histogram must\n * have min_doc_count set to 0 (default for histogram aggregations).\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-derivative-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('sales_per_month', 'date')\n *             .interval('month')\n *             .agg(esb.sumAggregation('sales', 'price'))\n *             .agg(esb.derivativeAggregation('sales_deriv', 'sales'))\n *     )\n *     .size(0);\n *\n * @example\n * // First and second order derivative of the monthly sales\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('sales_per_month', 'date')\n *             .interval('month')\n *             .agg(esb.sumAggregation('sales', 'price'))\n *             .agg(esb.derivativeAggregation('sales_deriv', 'sales'))\n *             .agg(esb.derivativeAggregation('sales_2nd_deriv', 'sales_deriv'))\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends PipelineAggregationBase\n */\nclass DerivativeAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'derivative', ES_REF_URL, bucketsPath);\n    }\n\n    /**\n     * Set the units of the derivative values. `unit` specifies what unit to use for\n     * the x-axis of the derivative calculation\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .agg(\n     *         esb.dateHistogramAggregation('sales_per_month', 'date')\n     *             .interval('month')\n     *             .agg(esb.sumAggregation('sales', 'price'))\n     *             .agg(esb.derivativeAggregation('sales_deriv', 'sales').unit('day'))\n     *     )\n     *     .size(0);\n     *\n     * @param {string} unit `unit` specifies what unit to use for\n     * the x-axis of the derivative calculation\n     * @returns {DerivativeAggregation} returns `this` so that calls can be chained\n     */\n    unit(unit) {\n        this._aggsDef.unit = unit;\n        return this;\n    }\n}\n\nmodule.exports = DerivativeAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/extended-stats-bucket-aggregation.js",
    "content": "'use strict';\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-extended-stats-bucket-aggregation.html';\n\n/**\n * A sibling pipeline aggregation which calculates a variety of stats across\n * all bucket of a specified metric in a sibling aggregation. The specified\n * metric must be numeric and the sibling aggregation must be a multi-bucket\n * aggregation.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-extended-stats-bucket-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('sales_per_month', 'date')\n *             .interval('month')\n *             .agg(esb.sumAggregation('sales', 'price'))\n *     )\n *     .agg(\n *         // Calculates extended stats for monthly sales\n *         esb.extendedStatsBucketAggregation(\n *             'stats_monthly_sales',\n *             'sales_per_month>sales'\n *         )\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends PipelineAggregationBase\n */\nclass ExtendedStatsBucketAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'extended_stats_bucket', ES_REF_URL, bucketsPath);\n    }\n\n    /**\n     * Sets the number of standard deviations above/below the mean to display.\n     * Optional.\n     *\n     * @param {number} sigma Default is 2.\n     * @returns {ExtendedStatsBucketAggregation} returns `this` so that calls can be chained\n     */\n    sigma(sigma) {\n        this._aggsDef.sigma = sigma;\n        return this;\n    }\n}\n\nmodule.exports = ExtendedStatsBucketAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/index.js",
    "content": "'use strict';\n\nexports.PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nexports.AvgBucketAggregation = require('./avg-bucket-aggregation');\nexports.DerivativeAggregation = require('./derivative-aggregation');\nexports.MaxBucketAggregation = require('./max-bucket-aggregation');\nexports.MinBucketAggregation = require('./min-bucket-aggregation');\nexports.SumBucketAggregation = require('./sum-bucket-aggregation');\nexports.StatsBucketAggregation = require('./stats-bucket-aggregation');\nexports.ExtendedStatsBucketAggregation = require('./extended-stats-bucket-aggregation');\nexports.PercentilesBucketAggregation = require('./percentiles-bucket-aggregation');\nexports.MovingAverageAggregation = require('./moving-average-aggregation');\nexports.MovingFunctionAggregation = require('./moving-function-aggregation');\nexports.CumulativeSumAggregation = require('./cumulative-sum-aggregation');\nexports.BucketScriptAggregation = require('./bucket-script-aggregation');\nexports.BucketSelectorAggregation = require('./bucket-selector-aggregation');\nexports.SerialDifferencingAggregation = require('./serial-differencing-aggregation');\nexports.BucketSortAggregation = require('./bucket-sort-aggregation');\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/max-bucket-aggregation.js",
    "content": "'use strict';\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-max-bucket-aggregation.html';\n\n/**\n * A sibling pipeline aggregation which identifies the bucket(s) with\n * the maximum value of a specified metric in a sibling aggregation and\n * outputs both the value and the key(s) of the bucket(s). The specified\n * metric must be numeric and the sibling aggregation must be a multi-bucket\n * aggregation.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-max-bucket-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('sales_per_month', 'date')\n *             .interval('month')\n *             .agg(esb.sumAggregation('sales', 'price'))\n *     )\n *     .agg(\n *         // Metric embedded in sibling aggregation\n *         // Get the maximum value of `sales` aggregation in\n *         // `sales_per_month` histogram\n *         esb.maxBucketAggregation(\n *             'max_monthly_sales',\n *             'sales_per_month>sales'\n *         )\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends PipelineAggregationBase\n */\nclass MaxBucketAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'max_bucket', ES_REF_URL, bucketsPath);\n    }\n}\n\nmodule.exports = MaxBucketAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/min-bucket-aggregation.js",
    "content": "'use strict';\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-min-bucket-aggregation.html';\n\n/**\n * A sibling pipeline aggregation which identifies the bucket(s) with\n * the minimum value of a specified metric in a sibling aggregation and\n * outputs both the value and the key(s) of the bucket(s). The specified\n * metric must be numeric and the sibling aggregation must be a multi-bucket\n * aggregation.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-min-bucket-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('sales_per_month', 'date')\n *             .interval('month')\n *             .agg(esb.sumAggregation('sales', 'price'))\n *     )\n *     .agg(\n *         // Metric embedded in sibling aggregation\n *         // Get the minimum value of `sales` aggregation in\n *         // `sales_per_month` histogram\n *         esb.minBucketAggregation(\n *             'min_monthly_sales',\n *             'sales_per_month>sales'\n *         )\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends PipelineAggregationBase\n */\nclass MinBucketAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'min_bucket', ES_REF_URL, bucketsPath);\n    }\n}\n\nmodule.exports = MinBucketAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/moving-average-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { invalidParam },\n    consts: { MODEL_SET }\n} = require('../../core');\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-movavg-aggregation.html';\n\nconst invalidModelParam = invalidParam(ES_REF_URL, 'model', MODEL_SET);\n\n/**\n * Given an ordered series of data, the Moving Average aggregation will\n * slide a window across the data and emit the average value of that window.\n *\n * `moving_avg` aggregations must be embedded inside of a histogram or\n * date_histogram aggregation.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-movavg-aggregation.html)\n *\n * @example\n * const agg = esb.movingAverageAggregation('the_movavg', 'the_sum')\n *     .model('holt')\n *     .window(5)\n *     .gapPolicy('insert_zeros')\n *     .settings({ alpha: 0.8 });\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('my_date_histo', 'timestamp')\n *             .interval('day')\n *             .agg(esb.sumAggregation('the_sum', 'lemmings'))\n *             // Relative path to sibling metric `the_sum`\n *             .agg(esb.movingAverageAggregation('the_movavg', 'the_sum'))\n *     )\n *     .size(0);\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('my_date_histo', 'timestamp')\n *             .interval('day')\n *             // Use the document count as it's input\n *             .agg(esb.movingAverageAggregation('the_movavg', '_count'))\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends PipelineAggregationBase\n */\nclass MovingAverageAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'moving_avg', ES_REF_URL, bucketsPath);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on MovingAverageAggregation\n     */\n    format() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('format is not supported in MovingAverageAggregation');\n    }\n\n    /**\n     * Sets the moving average weighting model that we wish to use. Optional.\n     *\n     * @example\n     * const agg = esb.movingAverageAggregation('the_movavg', 'the_sum')\n     *     .model('simple')\n     *     .window(30);\n     *\n     * @example\n     * const agg = esb.movingAverageAggregation('the_movavg', 'the_sum')\n     *     .model('ewma')\n     *     .window(30)\n     *     .settings({ alpha: 0.8 });\n     *\n     * @param {string} model Can be `simple`, `linear`,\n     * `ewma` (aka \"single-exponential\"), `holt` (aka \"double exponential\")\n     * or `holt_winters` (aka \"triple exponential\").\n     * Default is `simple`\n     * @returns {MovingAverageAggregation} returns `this` so that calls can be chained\n     */\n    model(model) {\n        if (_.isNil(model)) invalidModelParam(model);\n\n        const modelLower = model.toLowerCase();\n        if (!MODEL_SET.has(modelLower)) invalidModelParam(model);\n\n        this._aggsDef.model = modelLower;\n        return this;\n    }\n\n    /**\n     * Sets the size of window to \"slide\" across the histogram. Optional.\n     *\n     * @example\n     * const agg = esb.movingAverageAggregation('the_movavg', 'the_sum')\n     *     .model('simple')\n     *     .window(30)\n     *\n     * @param {number} window Default is 5\n     * @returns {MovingAverageAggregation} returns `this` so that calls can be chained\n     */\n    window(window) {\n        this._aggsDef.window = window;\n        return this;\n    }\n\n    /**\n     * If the model should be algorithmically minimized. Optional.\n     * Applicable on EWMA, Holt-Linear, Holt-Winters.\n     * Minimization is disabled by default for `ewma` and `holt_linear`,\n     * while it is enabled by default for `holt_winters`.\n     *\n     * @example\n     * const agg = esb.movingAverageAggregation('the_movavg', 'the_sum')\n     *     .model('holt_winters')\n     *     .window(30)\n     *     .minimize(true)\n     *     .settings({ period: 7 });\n     *\n     * @param {boolean} enable `false` for most models\n     * @returns {MovingAverageAggregation} returns `this` so that calls can be chained\n     */\n    minimize(enable) {\n        this._aggsDef.minimize = enable;\n        return this;\n    }\n\n    /**\n     * Model-specific settings, contents which differ depending on the model specified.\n     * Optional.\n     *\n     * @example\n     * const agg = esb.movingAverageAggregation('the_movavg', 'the_sum')\n     *     .model('ewma')\n     *     .window(30)\n     *     .settings({ alpha: 0.8 });\n     *\n     * @param {Object} settings\n     * @returns {MovingAverageAggregation} returns `this` so that calls can be chaineds\n     */\n    settings(settings) {\n        this._aggsDef.settings = settings;\n        return this;\n    }\n\n    /**\n     * Enable \"prediction\" mode, which will attempt to extrapolate into the future given\n     * the current smoothed, moving average\n     *\n     * @example\n     * const agg = esb.movingAverageAggregation('the_movavg', 'the_sum')\n     *     .model('simple')\n     *     .window(30)\n     *     .predict(10);\n     *\n     * @param {number} predict the number of predictions you would like appended to the\n     * end of the series\n     * @returns {MovingAverageAggregation} returns `this` so that calls can be chained\n     */\n    predict(predict) {\n        this._aggsDef.predict = predict;\n        return this;\n    }\n}\n\nmodule.exports = MovingAverageAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/moving-function-aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-movfn-aggregation.html';\n\n/**\n * Given an ordered series of data, the Moving Function aggregation\n * will slide a window across the data and allow the user to specify\n * a custom script that is executed on each window of data.\n * For convenience, a number of common functions are predefined such as min/max, moving averages, etc.\n *\n * `moving_fn` aggregations must be embedded inside of a histogram or\n * date_histogram aggregation.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-movfn-aggregation.html)\n *\n * NOTE: Only available in Elasticsearch 6.4.0+.\n *\n * @example\n * const agg = esb.movingFunctionAggregation('the_movfn', 'the_sum')\n *     .model('holt')\n *     .window(5)\n *     .gapPolicy('insert_zeros')\n *     .settings({ alpha: 0.8 });\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('my_date_histo', 'timestamp')\n *             .interval('day')\n *             .agg(esb.sumAggregation('the_sum', 'lemmings'))\n *             // Relative path to sibling metric `the_sum`\n *             .agg(esb.movingFunctionAggregation('the_movfn', 'the_sum'))\n *     )\n *     .size(0);\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('my_date_histo', 'timestamp')\n *             .interval('day')\n *             // Use the document count as it's input\n *             .agg(esb.movingFunctionAggregation('the_movfn', '_count'))\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over.\n * @param {string=} window The size of window to \"slide\" across the histogram.\n * @param {string=} script The script that should be executed on each window of data.\n *\n * @extends PipelineAggregationBase\n */\nclass MovingFunctionAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath, window, script) {\n        super(name, 'moving_fn', ES_REF_URL, bucketsPath);\n\n        if (!_.isNil(window)) this._aggsDef.window = window;\n        if (!_.isNil(script)) this._aggsDef.script = script;\n    }\n\n    /**\n     * Sets the size of window to \"slide\" across the histogram. Optional.\n     *\n     * @example\n     * const agg = esb.movingFunctionAggregation('the_movfn', 'the_sum')\n     *     .window(30)\n     *\n     * @param {number} window Default is 5\n     * @returns {MovingFunctionAggregation} returns `this` so that calls can be chained\n     */\n    window(window) {\n        this._aggsDef.window = window;\n        return this;\n    }\n\n    /**\n     * Sets shift of window position. Optional.\n     *\n     * @example\n     * const agg = esb.movingFunctionAggregation('the_movfn', 'the_sum')\n     *     .shift(30)\n     *\n     * @param {number} shift Default is 0\n     * @returns {MovingFunctionAggregation} returns `this` so that calls can be chained\n     */\n    shift(shift) {\n        this._aggsDef.shift = shift;\n        return this;\n    }\n\n    /**\n     * Sets the script that should be executed on each window of data. Required.\n     *\n     * @example\n     * const agg = esb.movingFunctionAggregation('the_movfn', 'the_sum', \"MovingFunctions.unweightedAvg(values)\"\")\n     *     .script(\"MovingFunctions.unweightedAvg(values)\")\n     *\n     * @param {string} script\n     * @returns {MovingFunctionAggregation} returns `this` so that calls can be chained\n     */\n    script(script) {\n        this._aggsDef.script = script;\n        return this;\n    }\n}\n\nmodule.exports = MovingFunctionAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/percentiles-bucket-aggregation.js",
    "content": "'use strict';\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-percentiles-bucket-aggregation.html';\n\n/**\n * A sibling pipeline aggregation which calculates percentiles across all\n * bucket of a specified metric in a sibling aggregation. The specified\n * metric must be numeric and the sibling aggregation must be a multi-bucket\n * aggregation.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-percentiles-bucket-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('sales_per_month', 'date')\n *             .interval('month')\n *             .agg(esb.sumAggregation('sales', 'price'))\n *     )\n *     .agg(\n *         // Calculates stats for monthly sales\n *         esb.percentilesBucketAggregation(\n *             'percentiles_monthly_sales',\n *             'sales_per_month>sales'\n *         ).percents([25.0, 50.0, 75.0])\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends PipelineAggregationBase\n */\nclass PercentilesBucketAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'percentiles_bucket', ES_REF_URL, bucketsPath);\n    }\n\n    /**\n     * Sets the list of percentiles to calculate\n     *\n     * @param {Array<number>} percents The list of percentiles to calculate\n     * @returns {PercentilesBucketAggregation} returns `this` so that calls can be chained\n     */\n    percents(percents) {\n        checkType(percents, Array);\n\n        this._aggsDef.percents = percents;\n        return this;\n    }\n}\n\nmodule.exports = PercentilesBucketAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/pipeline-aggregation-base.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Aggregation,\n    util: { invalidParam }\n} = require('../../core');\n\nconst invalidGapPolicyParam = invalidParam(\n    '',\n    'gap_policy',\n    \"'skip' or 'insert_zeros'\"\n);\n\n/**\n * The `PipelineAggregationBase` provides support for common options used across\n * various pipeline `Aggregation` implementations.\n *\n * Pipeline aggregations cannot have sub-aggregations but depending on the type\n * it can reference another pipeline in the buckets_path allowing pipeline\n * aggregations to be chained. For example, you can chain together two derivatives\n * to calculate the second derivative (i.e. a derivative of a derivative).\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} name a valid aggregation name\n * @param {string} aggType type of aggregation\n * @param {string} refUrl Elasticsearch reference URL\n * @param {string|Object=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends Aggregation\n */\nclass PipelineAggregationBase extends Aggregation {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, aggType, refUrl, bucketsPath) {\n        super(name, aggType);\n\n        this._refUrl = refUrl;\n\n        if (!_.isNil(bucketsPath)) this._aggsDef.buckets_path = bucketsPath;\n    }\n\n    /**\n     * Sets the relative path, `buckets_path`, which refers to the metric to aggregate over.\n     * Required.\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline.html#buckets-path-syntax)\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .agg(\n     *         esb.dateHistogramAggregation('histo', 'date')\n     *             .interval('day')\n     *             .agg(esb.termsAggregation('categories', 'category'))\n     *             .agg(\n     *                 esb.bucketSelectorAggregation('min_bucket_selector')\n     *                     .bucketsPath({ count: 'categories._bucket_count' })\n     *                     .script(esb.script('inline', 'params.count != 0'))\n     *             )\n     *     )\n     *     .size(0);\n     *\n     * @param {string|Object} path\n     * @returns {PipelineAggregationBase} returns `this` so that calls can be chained\n     */\n    bucketsPath(path) {\n        this._aggsDef.buckets_path = path;\n        return this;\n    }\n\n    /**\n     * Set policy for missing data. Optional.\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline.html#gap-policy)\n     *\n     * @param {string} policy Can be `skip` or `insert_zeros`\n     * @returns {PipelineAggregationBase} returns `this` so that calls can be chained\n     */\n    gapPolicy(policy) {\n        if (_.isNil(policy)) invalidGapPolicyParam(policy, this._refUrl);\n\n        const policyLower = policy.toLowerCase();\n        if (policyLower !== 'skip' && policyLower !== 'insert_zeros') {\n            invalidGapPolicyParam(policy, this._refUrl);\n        }\n\n        this._aggsDef.gap_policy = policyLower;\n        return this;\n    }\n\n    /**\n     * Sets the format expression if applicable. Optional.\n     *\n     * @param {string} fmt Format mask to apply on aggregation response. Example: ####.00\n     * @returns {PipelineAggregationBase} returns `this` so that calls can be chained\n     */\n    format(fmt) {\n        this._aggsDef.format = fmt;\n        return this;\n    }\n}\n\nmodule.exports = PipelineAggregationBase;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/serial-differencing-aggregation.js",
    "content": "'use strict';\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-serialdiff-aggregation.html';\n\n/**\n * Serial differencing is a technique where values in a time series are\n * subtracted from itself at different time lags or periods.\n *\n * Serial differences are built by first specifying a `histogram` or `date_histogram` over a field.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-serialdiff-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('my_date_histo', 'timestamp')\n *             .interval('day')\n *             .agg(esb.sumAggregation('the_sum', 'lemmings'))\n *             .agg(\n *                 esb.serialDifferencingAggregation(\n *                     'thirtieth_difference',\n *                     'the_sum'\n *                 ).lag(30)\n *             )\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends PipelineAggregationBase\n */\nclass SerialDifferencingAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'serial_diff', ES_REF_URL, bucketsPath);\n    }\n\n    /**\n     * The historical bucket to subtract from the current value.\n     * Optional.\n     *\n     * @param {number} lag Default is 1.\n     * @returns {SerialDifferencingAggregation} returns `this` so that calls can be chained\n     */\n    lag(lag) {\n        this._aggsDef.lag = lag;\n        return this;\n    }\n}\n\nmodule.exports = SerialDifferencingAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/stats-bucket-aggregation.js",
    "content": "'use strict';\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-stats-bucket-aggregation.html';\n\n/**\n * A sibling pipeline aggregation which calculates a variety of stats across\n * all bucket of a specified metric in a sibling aggregation. The specified\n * metric must be numeric and the sibling aggregation must be a multi-bucket\n * aggregation.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-stats-bucket-aggregation.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('sales_per_month', 'date')\n *             .interval('month')\n *             .agg(esb.sumAggregation('sales', 'price'))\n *     )\n *     .agg(\n *         // Calculates stats for monthly sales\n *         esb.statsBucketAggregation(\n *             'stats_monthly_sales',\n *             'sales_per_month>sales'\n *         )\n *     )\n *     .size(0);\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @extends PipelineAggregationBase\n */\nclass StatsBucketAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'stats_bucket', ES_REF_URL, bucketsPath);\n    }\n}\n\nmodule.exports = StatsBucketAggregation;\n"
  },
  {
    "path": "src/aggregations/pipeline-aggregations/sum-bucket-aggregation.js",
    "content": "'use strict';\n\nconst PipelineAggregationBase = require('./pipeline-aggregation-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-sum-bucket-aggregation.html';\n\n/**\n * A sibling pipeline aggregation which calculates the sum across all bucket\n * of a specified metric in a sibling aggregation. The specified metric must\n * be numeric and the sibling aggregation must be a multi-bucket aggregation.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-sum-bucket-aggregation.html)\n *\n * @param {string} name The name which will be used to refer to this aggregation.\n * @param {string=} bucketsPath The relative path of metric to aggregate over\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .agg(\n *         esb.dateHistogramAggregation('sales_per_month', 'date')\n *             .interval('month')\n *             .agg(esb.sumAggregation('sales', 'price'))\n *     )\n *     .agg(\n *         // Get the sum of all the total monthly `sales` buckets\n *         esb.sumBucketAggregation(\n *             'sum_monthly_sales',\n *             'sales_per_month>sales'\n *         )\n *     )\n *     .size(0);\n *\n * @extends PipelineAggregationBase\n */\nclass SumBucketAggregation extends PipelineAggregationBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, bucketsPath) {\n        super(name, 'sum_bucket', ES_REF_URL, bucketsPath);\n    }\n}\n\nmodule.exports = SumBucketAggregation;\n"
  },
  {
    "path": "src/core/aggregation.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst { checkType, recursiveToJSON } = require('./util');\n\n/**\n * Base class implementation for all aggregation types.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class should be extended and used, as validation against the class\n * type is present in various places.\n *\n * @param {string} name\n * @param {string} aggType Type of aggregation\n *\n * @throws {Error} if `name` is empty\n * @throws {Error} if `aggType` is empty\n */\nclass Aggregation {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, aggType) {\n        if (_.isEmpty(aggType))\n            throw new Error('Aggregation `aggType` cannot be empty');\n\n        this._name = name;\n        this.aggType = aggType;\n\n        this._aggs = {};\n        this._aggsDef = this._aggs[aggType] = {};\n        this._nestedAggs = [];\n    }\n\n    // TODO: Investigate case when getter for aggregation will be required\n\n    /**\n     * Sets name for aggregation.\n     *\n     * @param {string} name returns `this` so that calls can be chained.\n     * @returns {Aggregation}\n     */\n    name(name) {\n        this._name = name;\n        return this;\n    }\n\n    /**\n     * Sets nested aggregations.\n     * This method can be called multiple times in order to set multiple nested aggregations.\n     *\n     * @param {Aggregation} agg Any valid {@link Aggregation}\n     * @returns {Aggregation} returns `this` so that calls can be chained.\n     * @throws {TypeError} If `agg` is not an instance of `Aggregation`\n     */\n    aggregation(agg) {\n        checkType(agg, Aggregation);\n\n        // Possible to check for Global aggregation?\n        // Global aggregation can only be at the top level.\n\n        this._nestedAggs.push(agg);\n\n        return this;\n    }\n\n    /**\n     * Sets nested aggregation.\n     * This method can be called multiple times in order to set multiple nested aggregations.\n     *\n     * @param {Aggregation} agg Any valid {@link Aggregation}\n     * @returns {Aggregation} returns `this` so that calls can be chained.\n     */\n    agg(agg) {\n        return this.aggregation(agg);\n    }\n\n    /**\n     * Sets multiple nested aggregation items.\n     * This method accepts an array to set multiple nested aggregations in one call.\n     *\n     * @param {Array<Aggregation>} aggs Array of valid {@link Aggregation} items\n     * @returns {Aggregation} returns `this` so that calls can be chained.\n     * @throws {TypeError} If `aggs` is not an instance of `Array`\n     * @throws {TypeError} If `aggs` contains instances not of type `Aggregation`\n     */\n    aggregations(aggs) {\n        checkType(aggs, Array);\n\n        aggs.forEach(agg => this.aggregation(agg));\n\n        return this;\n    }\n\n    /**\n     * Sets multiple nested aggregation items.\n     * Alias for method `aggregations`\n     *\n     * @param {Array<Aggregation>} aggs Array of valid {@link Aggregation} items\n     * @returns {Aggregation} returns `this` so that calls can be chained.\n     * @throws {TypeError} If `aggs` is not an instance of `Array`\n     * @throws {TypeError} If `aggs` contains instances not of type `Aggregation`\n     */\n    aggs(aggs) {\n        return this.aggregations(aggs);\n    }\n\n    /**\n     * You can associate a piece of metadata with individual aggregations at request time\n     * that will be returned in place at response time.\n     *\n     * @param {Object} meta\n     * @returns {Aggregation} returns `this` so that calls can be chained.\n     */\n    meta(meta) {\n        this._aggs.meta = meta;\n        return this;\n    }\n\n    /**\n     * Internal helper function for determining the aggregation name.\n     *\n     * @returns {string} Aggregation name\n     * @private\n     */\n    _aggsName() {\n        if (!_.isEmpty(this._name)) return this._name;\n\n        if (_.has(this._aggsDef, 'field')) {\n            return `agg_${this.aggType}_${this._aggsDef.field}`;\n        }\n\n        // At this point, it would be difficult to construct a unique\n        // aggregation name. Error out.\n        throw new Error('Aggregation name could not be determined');\n    }\n\n    /**\n     * Build and returns DSL representation of the `Aggregation` class instance.\n     *\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    getDSL() {\n        return this.toJSON();\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the `aggregation` query.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        const mainAggs = recursiveToJSON(this._aggs);\n\n        if (!_.isEmpty(this._nestedAggs)) {\n            mainAggs.aggs = Object.assign(\n                {},\n                ...recursiveToJSON(this._nestedAggs)\n            );\n        }\n\n        return { [this._aggsName()]: mainAggs };\n    }\n}\n\nmodule.exports = Aggregation;\n"
  },
  {
    "path": "src/core/consts.js",
    "content": "'use strict';\n\n// Used in Fiversified Sampler aggrenation\nexports.EXECUTION_HINT_SET = new Set([\n    'map',\n    'global_ordinals',\n    'global_ordinals_hash',\n    'global_ordinals_low_cardinality'\n]);\n\n// Used in Geo Point Aggregation\n// prettier-ignore\nexports.UNIT_SET = new Set(\n    [\n        'in', 'inch',\n        'yd', 'yards',\n        'ft', 'feet',\n        'km', 'kilometers',\n        'NM', 'nmi', 'nauticalmiles',\n        'mm', 'millimeters',\n        'cm', 'centimeters',\n        'mi', 'miles',\n        'm', 'meters'\n    ]\n);\n\nexports.MODEL_SET = new Set([\n    'simple',\n    'linear',\n    'ewma',\n    'holt',\n    'holt_winters'\n]);\n\nexports.SORT_MODE_SET = new Set(['min', 'max', 'sum', 'avg', 'median']);\n\nexports.RESCORE_MODE_SET = new Set(['total', 'multiply', 'min', 'max', 'avg']);\n\nexports.REWRITE_METHOD_SET = new Set([\n    'constant_score',\n    'constant_score_auto',\n    'constant_score_filter',\n    'scoring_boolean',\n    'constant_score_boolean',\n    'top_terms_N',\n    'top_terms_boost_N',\n    'top_terms_blended_freqs_N'\n]);\n\nexports.MULTI_MATCH_TYPE = new Set([\n    'best_fields',\n    'most_fields',\n    'cross_fields',\n    'phrase',\n    'phrase_prefix',\n    'bool_prefix'\n]);\n\nexports.SCORE_MODE_SET = new Set([\n    'multiply',\n    'sum',\n    'first',\n    'min',\n    'max',\n    'avg'\n]);\n\nexports.BOOST_MODE_SET = new Set([\n    'multiply',\n    'sum',\n    'replace',\n    'min',\n    'max',\n    'avg'\n]);\n\nexports.FIELD_MODIFIER_SET = new Set([\n    'none',\n    'log',\n    'log1p',\n    'log2p',\n    'ln',\n    'ln1p',\n    'ln2p',\n    'square',\n    'sqrt',\n    'reciprocal'\n]);\n\nexports.NESTED_SCORE_MODE_SET = new Set(['none', 'sum', 'min', 'max', 'avg']);\n\nexports.GEO_SHAPE_TYPES = new Set([\n    'point',\n    'linestring',\n    'polygon',\n    'multipoint',\n    'multilinestring',\n    'multipolygon',\n    'geometrycollection',\n    'envelope',\n    'circle'\n]);\n\nexports.GEO_RELATION_SET = new Set([\n    'WITHIN',\n    'CONTAINS',\n    'DISJOINT',\n    'INTERSECTS'\n]);\n\nexports.SUGGEST_MODE_SET = new Set(['missing', 'popular', 'always']);\n\nexports.STRING_DISTANCE_SET = new Set([\n    'internal',\n    'damerau_levenshtein',\n    'levenstein',\n    'jarowinkler',\n    'ngram'\n]);\n\nexports.SMOOTHING_MODEL_SET = new Set([\n    'stupid_backoff',\n    'laplace',\n    'linear_interpolation'\n]);\n"
  },
  {
    "path": "src/core/geo-point.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst { checkType } = require('./util');\n\n/**\n * A `GeoPoint` object that can be used in queries and filters that\n * take a `GeoPoint`.  `GeoPoint` supports various input formats.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/geo-point.html)\n */\nclass GeoPoint {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        // Take optional parameter and call appropriate method?\n        // Will have to check for string, object and array.\n        // this will be set depending on subsequent method called\n        this._point = null;\n    }\n\n    /**\n     * Print warning message to console namespaced by class name.\n     *\n     * @param {string} msg\n     * @private\n     */\n    _warn(msg) {\n        console.warn(`[GeoPoint] ${msg}`);\n    }\n\n    /**\n     * Print warning messages to not mix Geo Point representations\n     * @private\n     */\n    _warnMixedRepr() {\n        this._warn('Do not mix with other representation!');\n        this._warn('Overwriting.');\n    }\n\n    /**\n     * Check the instance for object representation of Geo Point.\n     * If representation is null, new object is initialised.\n     * If it is not null, warning is logged and point is overwritten.\n     * @private\n     */\n    _checkObjRepr() {\n        if (_.isNil(this._point)) this._point = {};\n        else if (!_.isObject(this._point)) {\n            this._warnMixedRepr();\n            this._point = {};\n        }\n    }\n\n    /**\n     * Sets the latitude for the object representation.\n     *\n     * @param {number} lat Latitude\n     * @returns {GeoPoint} returns `this` so that calls can be chained\n     */\n    lat(lat) {\n        this._checkObjRepr();\n\n        this._point.lat = lat;\n        return this;\n    }\n\n    /**\n     * Sets the longitude for the object representation.\n     *\n     * @param {number} lon Longitude\n     * @returns {GeoPoint} returns `this` so that calls can be chained\n     */\n    lon(lon) {\n        this._checkObjRepr();\n\n        this._point.lon = lon;\n        return this;\n    }\n\n    /**\n     * Sets the Geo Point value expressed as an object,\n     * with `lat` and `lon` keys.\n     *\n     * @param {Object} point\n     * @returns {GeoPoint} returns `this` so that calls can be chained\n     * @throws {TypeError} If `point` is not an instance of object\n     */\n    object(point) {\n        checkType(point, Object);\n\n        !_.isNil(this._point) && this._warnMixedRepr();\n\n        this._point = point;\n        return this; // This doesn't make much sense. What else are you gonna call?\n    }\n\n    /**\n     * Sets the Geo Point value expressed as an array\n     * with the format: `[ lon, lat ]`.\n     *\n     * @param {Array<number>} point Array in format `[ lon, lat ]`(`GeoJson` standard)\n     * @returns {GeoPoint} returns `this` so that calls can be chained\n     * @throws {TypeError} If `point` is not an instance of Array\n     */\n    array(point) {\n        checkType(point, Array);\n\n        !_.isNil(this._point) && this._warnMixedRepr();\n\n        this._point = point;\n        return this; // This doesn't make much sense. What else are you gonna call?\n    }\n\n    /**\n     * Sets Geo-point expressed as a string with the format: `\"lat,lon\"`\n     * or as a geo hash\n     *\n     * @param {string} point\n     * @returns {GeoPoint} returns `this` so that calls can be chained\n     */\n    string(point) {\n        !_.isNil(this._point) && this._warnMixedRepr();\n\n        this._point = point;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the `GeoPoint`\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        return this._point;\n    }\n}\n\nmodule.exports = GeoPoint;\n"
  },
  {
    "path": "src/core/geo-shape.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst { checkType, invalidParam } = require('./util');\nconst { GEO_SHAPE_TYPES } = require('./consts');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/geo-shape.html';\n\nconst invalidTypeParam = invalidParam(ES_REF_URL, 'type', GEO_SHAPE_TYPES);\n\n/**\n * Shape object that can be used in queries and filters that\n * take a Shape. Shape uses the GeoJSON format.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/geo-shape.html)\n *\n * @example\n * // Pass options using method\n * const shape = esb.geoShape()\n *     .type('linestring')\n *     .coordinates([[-77.03653, 38.897676], [-77.009051, 38.889939]]);\n *\n * @example\n * // Pass parameters using contructor\n * const shape = esb.geoShape('multipoint', [[102.0, 2.0], [103.0, 2.0]])\n *\n * @param {string=} type A valid shape type.\n * Can be one of `point`, `linestring`, `polygon`, `multipoint`, `multilinestring`,\n * `multipolygon`, `geometrycollection`, `envelope` and `circle`\n * @param {Array=} coords A valid coordinat definition for the given shape.\n */\nclass GeoShape {\n    // eslint-disable-next-line require-jsdoc\n    constructor(type, coords) {\n        this._body = {};\n\n        if (!_.isNil(type)) this.type(type);\n        if (!_.isNil(coords)) this.coordinates(coords);\n    }\n\n    /**\n     * Sets the GeoJSON format type used to represent shape.\n     *\n     * @example\n     * const shape = esb.geoShape()\n     *     .type('envelope')\n     *     .coordinates([[-45.0, 45.0], [45.0, -45.0]])\n     *\n     * @param {string} type A valid shape type.\n     * Can be one of `point`, `linestring`, `polygon`, `multipoint`, `multilinestring`,\n     * `multipolygon`, `geometrycollection`, `envelope`, `circle`\n     * @returns {GeoShape} returns `this` so that calls can be chained.\n     */\n    type(type) {\n        if (_.isNil(type)) invalidTypeParam(type);\n\n        const typeLower = type.toLowerCase();\n        if (!GEO_SHAPE_TYPES.has(typeLower)) invalidTypeParam(type);\n\n        this._body.type = typeLower;\n        return this;\n    }\n\n    /**\n     * Sets the coordinates for the shape definition. Note, the coordinates\n     * are not validated in this api. Please see [GeoJSON](http://geojson.org/geojson-spec.html#geometry-objects)\n     * and [ElasticSearch documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/geo-shape.html#input-structure) for correct coordinate definitions.\n     *\n     * @example\n     * const shape = esb.geoShape()\n     *     .type('point')\n     *     .coordinates([-77.03653, 38.897676])\n     *\n     * @param {Array<Array<number>>|Array<number>} coords\n     * @returns {GeoShape} returns `this` so that calls can be chained.\n     */\n    coordinates(coords) {\n        checkType(coords, Array);\n\n        this._body.coordinates = coords;\n        return this;\n    }\n\n    /**\n     * Sets the radius for parsing a circle `GeoShape`.\n     *\n     * @example\n     * const shape = esb.geoShape()\n     *     .type('circle')\n     *     .coordinates([-45.0, 45.0])\n     *     .radius('100m')\n     *\n     * @param {string|number} radius The radius for shape circle.\n     * @returns {GeoShape} returns `this` so that calls can be chained.\n     */\n    radius(radius) {\n        // Should this have a validation for circle shape type?\n        this._body.radius = radius;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation of the geo shape\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        if (!_.has(this._body, 'type') || !_.has(this._body, 'coordinates')) {\n            throw new Error(\n                'For all types, both the inner `type` and `coordinates` fields are required.'\n            );\n        }\n        return this._body;\n    }\n}\n\nmodule.exports = GeoShape;\n"
  },
  {
    "path": "src/core/highlight.js",
    "content": "'use strict';\n\nconst _ = require('../_');\nconst Query = require('./query');\nconst { checkType, invalidParam, recursiveToJSON } = require('./util');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-highlighting.html';\n\nconst invalidEncoderParam = invalidParam(\n    ES_REF_URL,\n    'encoder',\n    \"'default' or 'html'\"\n);\nconst invalidTypeParam = invalidParam(\n    ES_REF_URL,\n    'type',\n    \"'plain', 'postings', 'unified' or 'fvh'\"\n);\nconst invalidFragmenterParam = invalidParam(\n    ES_REF_URL,\n    'fragmenter',\n    \"'simple' or 'span'\"\n);\n\n/**\n * Allows to highlight search results on one or more fields. In order to\n * perform highlighting, the actual content of the field is required. If the\n * field in question is stored (has store set to yes in the mapping), it will\n * be used, otherwise, the actual _source will be loaded and the relevant\n * field will be extracted from it.\n *\n * If no term_vector information is provided (by setting it to\n * `with_positions_offsets` in the mapping), then the plain highlighter will be\n * used. If it is provided, then the fast vector highlighter will be used.\n * When term vectors are available, highlighting will be performed faster at\n * the cost of bigger index size.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-highlighting.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.matchAllQuery())\n *     .highlight(esb.highlight('content'));\n *\n * @example\n * const highlight = esb.highlight()\n *     .numberOfFragments(3)\n *     .fragmentSize(150)\n *     .fields(['_all', 'bio.title', 'bio.author', 'bio.content'])\n *     .preTags('<em>', '_all')\n *     .postTags('</em>', '_all')\n *     .numberOfFragments(0, 'bio.title')\n *     .numberOfFragments(0, 'bio.author')\n *     .numberOfFragments(5, 'bio.content')\n *     .scoreOrder('bio.content');\n *\n * highlight.toJSON()\n * {\n *     \"number_of_fragments\" : 3,\n *     \"fragment_size\" : 150,\n *     \"fields\" : {\n *         \"_all\" : { \"pre_tags\" : [\"<em>\"], \"post_tags\" : [\"</em>\"] },\n *         \"bio.title\" : { \"number_of_fragments\" : 0 },\n *         \"bio.author\" : { \"number_of_fragments\" : 0 },\n *         \"bio.content\" : { \"number_of_fragments\" : 5, \"order\" : \"score\" }\n *     }\n *  }\n *\n * @param {string|Array=} fields An optional field or array of fields to highlight.\n */\nclass Highlight {\n    // eslint-disable-next-line require-jsdoc\n    constructor(fields) {\n        this._fields = {};\n        this._highlight = { fields: this._fields };\n\n        // Does this smell?\n        if (_.isNil(fields)) return;\n\n        if (_.isString(fields)) this.field(fields);\n        else this.fields(fields);\n    }\n\n    /**\n     * Private function to set field option\n     *\n     * @param {string|null} field\n     * @param {string} option\n     * @param {string} val\n     * @private\n     */\n    _setFieldOption(field, option, val) {\n        if (_.isNil(field)) {\n            this._highlight[option] = val;\n            return;\n        }\n\n        this.field(field);\n        this._fields[field][option] = val;\n    }\n\n    /**\n     * Allows you to set a field that will be highlighted. The field is\n     * added to the current list of fields.\n     *\n     * @param {string} field A field name.\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    field(field) {\n        if (!_.isNil(field) && !_.has(this._fields, field)) {\n            this._fields[field] = {};\n        }\n\n        return this;\n    }\n\n    /**\n     * Allows you to set the fields that will be highlighted. All fields are\n     * added to the current list of fields.\n     *\n     * @param {Array<string>} fields Array of field names.\n     * @returns {Highlight} returns `this` so that calls can be chained\n     * @throws {TypeError} If `fields` is not an instance of Array\n     */\n    fields(fields) {\n        checkType(fields, Array);\n\n        fields.forEach(field => this.field(field));\n        return this;\n    }\n\n    /**\n     * Sets the pre tags for highlighted fragments. You can apply the\n     * tags to a specific field by passing the optional field name parameter.\n     *\n     * @example\n     * const highlight = esb.highlight('_all')\n     *     .preTags('<tag1>')\n     *     .postTags('</tag1>');\n     *\n     * @example\n     * const highlight = esb.highlight('_all')\n     *     .preTags(['<tag1>', '<tag2>'])\n     *     .postTags(['</tag1>', '</tag2>']);\n     *\n     * @param {string|Array} tags\n     * @param {string=} field\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    preTags(tags, field) {\n        this._setFieldOption(\n            field,\n            'pre_tags',\n            _.isString(tags) ? [tags] : tags\n        );\n        return this;\n    }\n\n    /**\n     * Sets the post tags for highlighted fragments. You can apply the\n     * tags to a specific field by passing the optional field name parameter.\n     *\n     * @example\n     * const highlight = esb.highlight('_all')\n     *     .preTags('<tag1>')\n     *     .postTags('</tag1>');\n     *\n     * @example\n     * const highlight = esb.highlight('_all')\n     *     .preTags(['<tag1>', '<tag2>'])\n     *     .postTags(['</tag1>', '</tag2>']);\n     *\n     * @param {string|Array} tags\n     * @param {string=} field\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    postTags(tags, field) {\n        this._setFieldOption(\n            field,\n            'post_tags',\n            _.isString(tags) ? [tags] : tags\n        );\n        return this;\n    }\n\n    /**\n     * Sets the styled schema to be used for the tags.\n     *\n     * styled - 10 `<em>` pre tags with css class of hltN, where N is 1-10\n     *\n     * @example\n     * const highlight = esb.highlight('content').styledTagsSchema();\n     *\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    styledTagsSchema() {\n        // This is a special case as it does not map directly to elasticsearch DSL\n        // This is written this way for ease of use\n        this._highlight.tags_schema = 'styled';\n        return this;\n    }\n\n    /**\n     * Sets the order of highlight fragments to be sorted by score. You can apply the\n     * score order to a specific field by passing the optional field name parameter.\n     *\n     * @example\n     * const highlight = esb.highlight('content').scoreOrder()\n     *\n     * @param {string=} field An optional field name\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    scoreOrder(field) {\n        // This is a special case as it does not map directly to elasticsearch DSL\n        // It is written this way for ease of use\n        this._setFieldOption(field, 'order', 'score');\n        return this;\n    }\n\n    /**\n     * Sets the size of each highlight fragment in characters. You can apply the\n     * option to a specific field by passing the optional field name parameter.\n     *\n     * @example\n     * const highlight = esb.highlight('content')\n     *     .fragmentSize(150, 'content')\n     *     .numberOfFragments(3, 'content');\n     *\n     * @param {number} size The fragment size in characters. Defaults to 100.\n     * @param {string=} field An optional field name\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    fragmentSize(size, field) {\n        this._setFieldOption(field, 'fragment_size', size);\n        return this;\n    }\n    /**\n     * Sets the maximum number of fragments to return. You can apply the\n     * option to a specific field by passing the optional field name parameter.\n     *\n     * @example\n     * const highlight = esb.highlight('content')\n     *     .fragmentSize(150, 'content')\n     *     .numberOfFragments(3, 'content');\n     *\n     * @example\n     * const highlight = esb.highlight(['_all', 'bio.title'])\n     *     .numberOfFragments(0, 'bio.title');\n     *\n     * @param {number} count The maximum number of fragments to return\n     * @param {string=} field An optional field name\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    numberOfFragments(count, field) {\n        this._setFieldOption(field, 'number_of_fragments', count);\n        return this;\n    }\n\n    /**\n     * If `no_match_size` is set, in the case where there is no matching fragment\n     * to highlight, a snippet of text, with the specified length, from the beginning\n     * of the field will be returned.\n     *\n     * The actual length may be shorter than specified as it tries to break on a word boundary.\n     *\n     * Default is `0`.\n     *\n     * @example\n     * const highlight = esb.highlight('content')\n     *     .fragmentSize(150, 'content')\n     *     .numberOfFragments(3, 'content')\n     *     .noMatchSize(150, 'content');\n     *\n     * @param {number} size\n     * @param {string} field\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    noMatchSize(size, field) {\n        this._setFieldOption(field, 'no_match_size', size);\n        return this;\n    }\n\n    /**\n     * Highlight against a query other than the search query.\n     * Useful if you use a rescore query because those\n     * are not taken into account by highlighting by default.\n     *\n     * @example\n     * const highlight = esb.highlight('content')\n     *     .fragmentSize(150, 'content')\n     *     .numberOfFragments(3, 'content')\n     *     .highlightQuery(\n     *         esb.boolQuery()\n     *             .must(esb.matchQuery('content', 'foo bar'))\n     *             .should(\n     *                 esb.matchPhraseQuery('content', 'foo bar').slop(1).boost(10)\n     *             )\n     *             .minimumShouldMatch(0),\n     *         'content'\n     *     );\n     *\n     * @param {Query} query\n     * @param {string=} field An optional field name\n     * @returns {Highlight} returns `this` so that calls can be chained\n     * @throws {TypeError} If `query` is not an instance of `Query`\n     */\n    highlightQuery(query, field) {\n        checkType(query, Query);\n\n        this._setFieldOption(field, 'highlight_query', query);\n        return this;\n    }\n\n    /**\n     * Combine matches on multiple fields to highlight a single field.\n     * Useful for multifields that analyze the same string in different ways.\n     * Sets the highlight type to Fast Vector Highlighter(`fvh`).\n     *\n     * @example\n     * const highlight = esb.highlight('content')\n     *     .scoreOrder('content')\n     *     .matchedFields(['content', 'content.plain'], 'content');\n     *\n     * highlight.toJSON();\n     * {\n     *     \"order\": \"score\",\n     *     \"fields\": {\n     *         \"content\": {\n     *             \"matched_fields\": [\"content\", \"content.plain\"],\n     *             \"type\" : \"fvh\"\n     *         }\n     *     }\n     * }\n     *\n     * @param {Array<string>} fields\n     * @param {string} field Field name\n     * @returns {Highlight} returns `this` so that calls can be chained\n     * @throws {Error} field parameter should be valid field name\n     * @throws {TypeError} If `fields` is not an instance of Array\n     */\n    matchedFields(fields, field) {\n        checkType(fields, Array);\n        if (_.isEmpty(field)) {\n            throw new Error(\n                '`matched_fields` requires field name to be passed'\n            );\n        }\n\n        this.type('fvh', field);\n        this._setFieldOption(field, 'matched_fields', fields);\n        return this;\n    }\n\n    /**\n     * The fast vector highlighter has a phrase_limit parameter that prevents\n     * it from analyzing too many phrases and eating tons of memory. It defaults\n     * to 256 so only the first 256 matching phrases in the document scored\n     * considered. You can raise the limit with the phrase_limit parameter.\n     *\n     * If using `matched_fields`, `phrase_limit` phrases per matched field\n     * are considered.\n     *\n     * @param {number} limit Defaults to 256.\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    phraseLimit(limit) {\n        this._highlight.phrase_limit = limit;\n        return this;\n    }\n\n    /**\n     * Can be used to define how highlighted text will be encoded.\n     *\n     * @param {string} encoder It can be either default (no encoding)\n     * or `html` (will escape `html`, if you use html highlighting tags)\n     * @returns {Highlight} returns `this` so that calls can be chained\n     * @throws {Error} Encoder can be either `default` or `html`\n     */\n    encoder(encoder) {\n        if (_.isNil(encoder)) invalidEncoderParam(encoder);\n\n        const encoderLower = encoder.toLowerCase();\n        if (encoderLower !== 'default' && encoderLower !== 'html') {\n            invalidEncoderParam(encoder);\n        }\n\n        this._highlight.encoder = encoderLower;\n        return this;\n    }\n\n    /**\n     * By default only fields that hold a query match will be highlighted.\n     * This can be set to false to highlight the field regardless of whether\n     * the query matched specifically on them. You can apply the\n     * option to a specific field by passing the optional field name parameter.\n     *\n     * @example\n     * const highlight = esb.highlight('_all')\n     *     .preTags('<em>', '_all')\n     *     .postTags('</em>', '_all')\n     *     .requireFieldMatch(false);\n     *\n     * @param {boolean} requireFieldMatch\n     * @param {string=} field An optional field name\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    requireFieldMatch(requireFieldMatch, field) {\n        this._setFieldOption(field, 'require_field_match', requireFieldMatch);\n        return this;\n    }\n\n    /**\n     * Allows to control how far to look for boundary characters, and defaults to 20.\n     * You can apply the option to a specific field by passing the optional field name parameter.\n     *\n     * @param {number} count The max characters to scan.\n     * @param {string=} field An optional field name\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    boundaryMaxScan(count, field) {\n        this._setFieldOption(field, 'boundary_max_scan', count);\n        return this;\n    }\n\n    /**\n     * Defines what constitutes a boundary for highlighting.\n     * It is a single string with each boundary character defined in it.\n     * It defaults to `.,!? \\t\\n`. You can apply the\n     * option to a specific field by passing the optional field name parameter.\n     *\n     * @param {string} charStr\n     * @param {string=} field An optional field name\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    boundaryChars(charStr, field) {\n        this._setFieldOption(field, 'boundary_chars', charStr);\n        return this;\n    }\n\n    /**\n     * Allows to force a specific highlighter type.\n     * This is useful for instance when needing to use\n     * the plain highlighter on a field that has term_vectors enabled.\n     * You can apply the option to a specific field by passing the optional\n     * field name parameter.\n     *\n     * Note: The `postings` highlighter has been removed in elasticsearch 6.0.\n     * The `unified` highlighter outputs the same highlighting when\n     * `index_options` is set to `offsets`.\n     *\n     * Note: The `unified` highlighter is only supported in elasticsearch 6.0\n     * and later.\n     *\n     * @example\n     * const highlight = esb.highlight('content').type('plain', 'content');\n     *\n     * @param {string} type The allowed values are: `plain`, `postings`, `unified` and `fvh`.\n     * @param {string=} field An optional field name\n     * @returns {Highlight} returns `this` so that calls can be chained\n     * @throws {Error} Type can be one of `plain`, `postings`, `unified` or `fvh`.\n     */\n    type(type, field) {\n        if (_.isNil(type)) invalidTypeParam(type);\n\n        const typeLower = type.toLowerCase();\n        if (\n            typeLower !== 'plain' &&\n            typeLower !== 'postings' &&\n            typeLower !== 'unified' &&\n            typeLower !== 'fvh'\n        ) {\n            invalidTypeParam(type);\n        }\n\n        this._setFieldOption(field, 'type', typeLower);\n        return this;\n    }\n\n    /**\n     * Forces the highlighting to highlight fields based on the source\n     * even if fields are stored separately. Defaults to false.\n     *\n     * @example\n     * const highlight = esb.highlight('content').forceSource(true, 'content');\n     *\n     * @param {boolean} forceSource\n     * @param {string=} field An optional field name\n     * @returns {Highlight} returns `this` so that calls can be chained\n     */\n    forceSource(forceSource, field) {\n        this._setFieldOption(field, 'force_source', forceSource);\n        return this;\n    }\n\n    /**\n     * Sets the fragmenter type. You can apply the\n     * option to a specific field by passing the optional field name parameter.\n     * Valid values for order are:\n     *  - `simple` - breaks text up into same-size fragments with no concerns\n     *      over spotting sentence boundaries.\n     *  - `span` - breaks text up into same-size fragments but does not split\n     *      up Spans.\n     *\n     * @example\n     * const highlight = esb.highlight('message')\n     *     .fragmentSize(15, 'message')\n     *     .numberOfFragments(3, 'message')\n     *     .fragmenter('simple', 'message');\n     *\n     * @param {string} fragmenter The fragmenter.\n     * @param {string=} field An optional field name\n     * @returns {Highlight} returns `this` so that calls can be chained\n     * @throws {Error} Fragmenter can be either `simple` or `span`\n     */\n    fragmenter(fragmenter, field) {\n        if (_.isNil(fragmenter)) invalidFragmenterParam(fragmenter);\n\n        const fragmenterLower = fragmenter.toLowerCase();\n        if (fragmenterLower !== 'simple' && fragmenterLower !== 'span') {\n            invalidFragmenterParam(fragmenter);\n        }\n\n        this._setFieldOption(field, 'fragmenter', fragmenterLower);\n        return this;\n    }\n\n    // TODO: Support Explicit field order\n    // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-highlighting.html#explicit-field-order\n\n    /**\n     * Override default `toJSON` to return DSL representation for the `highlight` request\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        return recursiveToJSON(this._highlight);\n    }\n}\n\nmodule.exports = Highlight;\n"
  },
  {
    "path": "src/core/index.js",
    "content": "'use strict';\n\n// Base classes\n\nexports.RequestBodySearch = require('./request-body-search');\n\nexports.Aggregation = require('./aggregation');\n\nexports.Query = require('./query');\n\nexports.KNN = require('./knn');\n\nexports.Suggester = require('./suggester');\n\nexports.Script = require('./script');\n\nexports.Highlight = require('./highlight');\n\nexports.GeoPoint = require('./geo-point');\n\nexports.GeoShape = require('./geo-shape');\n\nexports.IndexedShape = require('./indexed-shape');\n\nexports.Sort = require('./sort');\n\nexports.Rescore = require('./rescore');\n\nexports.InnerHits = require('./inner-hits');\n\nexports.SearchTemplate = require('./search-template');\n\nexports.consts = require('./consts');\n\nexports.util = require('./util');\n\nexports.RuntimeField = require('./runtime-field');\n"
  },
  {
    "path": "src/core/indexed-shape.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\n/**\n * A shape which has already been indexed in another index and/or index\n * type. This is particularly useful for when you have a pre-defined list of\n * shapes which are useful to your application and you want to reference this\n * using a logical name (for example 'New Zealand') rather than having to\n * provide their coordinates each time.\n *\n * @example\n * const shape = esb.indexedShape('DEU', 'countries')\n *     .index('shapes')\n *     .path('location');\n *\n * const shape = esb.indexedShape()\n *     .id('DEU')\n *     .type('countries')\n *     .index('shapes')\n *     .path('location');\n *\n * @param {string=} id The document id of the shape.\n * @param {string=} type The name of the type where the shape is indexed.\n */\nclass IndexedShape {\n    // eslint-disable-next-line require-jsdoc\n    constructor(id, type) {\n        this._body = {};\n\n        if (!_.isNil(id)) this._body.id = id;\n        if (!_.isNil(type)) this._body.type = type;\n    }\n\n    /**\n     * Sets the ID of the document that containing the pre-indexed shape.\n     *\n     * @param {string} id The document id of the shape.\n     * @returns {IndexedShape} returns `this` so that calls can be chained.\n     */\n    id(id) {\n        this._body.id = id;\n        return this;\n    }\n\n    /**\n     * Sets the index type where the pre-indexed shape is.\n     *\n     * @param {string} type The name of the type where the shape is indexed.\n     * @returns {IndexedShape} returns `this` so that calls can be chained.\n     */\n    type(type) {\n        this._body.type = type;\n        return this;\n    }\n\n    /**\n     * Sets the name of the index where the pre-indexed shape is. Defaults to `shapes`.\n     *\n     * @param {string} index A valid index name\n     * @returns {IndexedShape} returns `this` so that calls can be chained.\n     */\n    index(index) {\n        this._body.index = index;\n        return this;\n    }\n\n    /**\n     * Sets the field specified as path containing the pre-indexed shape.\n     * Defaults to `shape`.\n     *\n     * @param {string} path field name.\n     * @returns {IndexedShape} returns `this` so that calls can be chained.\n     */\n    path(path) {\n        this._body.path = path;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation of the geo shape\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        return this._body;\n    }\n}\n\nmodule.exports = IndexedShape;\n"
  },
  {
    "path": "src/core/inner-hits.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst Sort = require('./sort');\nconst Highlight = require('./highlight');\n\nconst { checkType, setDefault, recursiveToJSON } = require('./util');\n\n/**\n * Inner hits returns per search hit in the search response additional\n * nested hits that caused a search hit to match in a different scope.\n * Inner hits can be used by defining an `inner_hits` definition on a\n * `nested`, `has_child` or `has_parent` query and filter.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-inner-hits.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch().query(\n *     esb.nestedQuery(\n *         esb.matchQuery('comments.message', '[actual query]')\n *     ).innerHits(\n *         esb.innerHits().source(false).storedFields(['comments.text'])\n *     )\n * );\n *\n * @param {string=} name The name to be used for the particular inner hit definition\n * in the response. Useful when multiple inner hits have been defined in a single\n * search request. The default depends in which query the inner hit is defined.\n */\nclass InnerHits {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name) {\n        // Maybe accept some optional parameter?\n        this._body = {};\n\n        if (!_.isNil(name)) this._body.name = name;\n    }\n\n    /**\n     * The name to be used for the particular inner hit definition\n     * in the response. Useful when multiple inner hits have been defined in a single\n     * search request. The default depends in which query the inner hit is defined.\n     *\n     * @param {number} name\n     * @returns {InnerHits} returns `this` so that calls can be chained.\n     */\n    name(name) {\n        this._body.name = name;\n        return this;\n    }\n\n    /**\n     * The offset from where the first hit to fetch for each `inner_hits` in the returned\n     * regular search hits.\n     *\n     * @param {number} from\n     * @returns {InnerHits} returns `this` so that calls can be chained.\n     */\n    from(from) {\n        this._body.from = from;\n        return this;\n    }\n\n    /**\n     * The maximum number of hits to return per inner_hits.\n     * By default the top three matching hits are returned.\n     *\n     * @param {number} size Defaults to 10.\n     * @returns {InnerHits} returns `this` so that calls can be chained.\n     */\n    size(size) {\n        this._body.size = size;\n        return this;\n    }\n\n    /**\n     * How the inner hits should be sorted per inner_hits.\n     * By default the hits are sorted by the score.\n     *\n     * @param {Sort} sort\n     * @returns {InnerHits} returns `this` so that calls can be chained.\n     * @throws {TypeError} If parameter `sort` is not an instance of `Sort`.\n     */\n    sort(sort) {\n        checkType(sort, Sort);\n        setDefault(this._body, 'sort', []);\n\n        this._body.sort.push(sort);\n        return this;\n    }\n\n    /**\n     * Allows to add multiple sort on specific fields. Each sort can be reversed as well.\n     * The sort is defined on a per field level, with special field name for _score to\n     * sort by score, and _doc to sort by index order.\n     *\n     * @param {Array<Sort>} sorts Array of sort\n     * @returns {InnerHits} returns `this` so that calls can be chained.\n     * @throws {TypeError} If any item in parameter `sorts` is not an instance of `Sort`.\n     */\n    sorts(sorts) {\n        sorts.forEach(sort => this.sort(sort));\n        return this;\n    }\n\n    /**\n     * Allows to highlight search results on one or more fields. The implementation\n     * uses either the lucene `plain` highlighter, the fast vector highlighter (`fvh`)\n     * or `postings` highlighter.\n     *\n     * Note: The `postings` highlighter has been removed in elasticsearch 6.0.\n     * The `unified` highlighter outputs the same highlighting when\n     * `index_options` is set to `offsets`.\n     *\n     * @param {Highlight} highlight\n     * @returns {InnerHits} returns `this` so that calls can be chained\n     */\n    highlight(highlight) {\n        checkType(highlight, Highlight);\n\n        this._body.highlight = highlight;\n        return this;\n    }\n\n    /**\n     * Enables explanation for each hit on how its score was computed.\n     *\n     * @param {boolean} enable\n     * @returns {InnerHits} returns `this` so that calls can be chained\n     */\n    explain(enable) {\n        this._body.explain = enable;\n        return this;\n    }\n\n    /**\n     * Allows to control how the `_source` field is returned with every hit.\n     * You can turn off `_source` retrieval by passing `false`.\n     * It also accepts one(string) or more wildcard(array) patterns to control\n     * what parts of the `_source` should be returned\n     * An object can also be used to specify the wildcard patterns for `includes` and `excludes`.\n     *\n     * @param {boolean|string|Array|Object} source\n     * @returns {InnerHits} returns `this` so that calls can be chained\n     */\n    source(source) {\n        this._body._source = source;\n        return this;\n    }\n\n    /**\n     * Include specific stored fields\n     *\n     * @param {Array|string} fields\n     * @returns {InnerHits} returns `this` so that calls can be chained\n     */\n    storedFields(fields) {\n        this._body.stored_fields = fields;\n        return this;\n    }\n\n    /**\n     * Computes a document property dynamically based on the supplied `Script`.\n     *\n     * @param {string} scriptFieldName\n     * @param {string|Script} script string or instance of `Script`\n     * @returns {InnerHits} returns `this` so that calls can be chained\n     */\n    scriptField(scriptFieldName, script) {\n        setDefault(this._body, 'script_fields', {});\n\n        this._body.script_fields[scriptFieldName] = { script };\n        return this;\n    }\n\n    /**\n     * Sets given dynamic document properties to be computed using supplied `Script`s.\n     *\n     * Object should have `scriptFieldName` as key and `script` as the value.\n     *\n     * @param {Object} scriptFields Object with `scriptFieldName` as key and `script` as the value.\n     * @returns {InnerHits} returns `this` so that calls can be chained\n     */\n    scriptFields(scriptFields) {\n        checkType(scriptFields, Object);\n\n        Object.keys(scriptFields).forEach(scriptFieldName =>\n            this.scriptField(scriptFieldName, scriptFields[scriptFieldName])\n        );\n\n        return this;\n    }\n\n    /**\n     * Allows to return the doc value representation of a field for each hit.\n     * Doc value fields can work on fields that are not stored.\n     *\n     * @param {Array<string>} fields\n     * @returns {InnerHits} returns `this` so that calls can be chained\n     */\n    docvalueFields(fields) {\n        this._body.docvalue_fields = fields;\n        return this;\n    }\n\n    /**\n     * Returns a version for each search hit.\n     *\n     * @param {boolean} enable\n     * @returns {InnerHits} returns `this` so that calls can be chained.\n     */\n    version(enable) {\n        this._body.version = enable;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the inner hits request\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        return recursiveToJSON(this._body);\n    }\n}\n\nmodule.exports = InnerHits;\n"
  },
  {
    "path": "src/core/inspect.js",
    "content": "/* istanbul ignore file */\n/* eslint-disable max-lines */\n'use strict';\n\nconst _ = require('../_');\n\n/**\n * Echos the value of a value. Trys to print the value out\n * in the best way possible given the different types.\n *\n * @param {Object} obj The object to print out.\n * @param {Object} opts Optional options object that alters the output.\n * @returns {string}\n */\nfunction inspect(obj, opts) {\n    /* eslint-disable prefer-rest-params */\n    // default options\n    const ctx = {\n        seen: [],\n        stylize: stylizeNoColor\n    };\n    // legacy...\n    if (arguments.length >= 3) ctx.depth = arguments[2];\n    if (arguments.length >= 4) ctx.colors = arguments[3];\n    if (isBoolean(opts)) {\n        // legacy...\n        ctx.showHidden = opts;\n    } else if (opts) {\n        // got an \"options\" object\n        exports._extend(ctx, opts);\n    }\n    // set default options\n    if (isUndefined(ctx.showHidden)) ctx.showHidden = false;\n    if (isUndefined(ctx.depth)) ctx.depth = 2;\n    if (isUndefined(ctx.colors)) ctx.colors = false;\n    if (isUndefined(ctx.customInspect)) ctx.customInspect = true;\n    if (ctx.colors) ctx.stylize = stylizeWithColor;\n    return formatValue(ctx, obj, ctx.depth);\n    /* eslint-enable prefer-rest-params */\n}\n\nmodule.exports = inspect;\n\n/* eslint-disable require-jsdoc */\n\nfunction stylizeNoColor(str) {\n    return str;\n}\n\nfunction stylizeWithColor(str, styleType) {\n    const style = inspect.styles[styleType];\n\n    if (style) {\n        return `\\u001B[${inspect.colors[style][0]}m${str}\\u001B[${inspect.colors[style][1]}m`;\n    }\n    return str;\n}\n\n// eslint-disable-next-line complexity, max-statements\nfunction formatValue(ctx, value, recurseTimes) {\n    // Provide a hook for user-specified inspect functions.\n    // Check that value is an object with an inspect function on it\n    if (\n        ctx.customInspect &&\n        value &&\n        isFunction(value.inspect) &&\n        // Filter out the util module, it's inspect function is special\n        value.inspect !== exports.inspect &&\n        // Also filter out any prototype objects using the circular check.\n        !(value.constructor && value.constructor.prototype === value)\n    ) {\n        let ret = value.inspect(recurseTimes, ctx);\n        if (!_.isString(ret)) {\n            ret = formatValue(ctx, ret, recurseTimes);\n        }\n        return ret;\n    }\n\n    // Primitive types cannot have properties\n    const primitive = formatPrimitive(ctx, value);\n    if (primitive) {\n        return primitive;\n    }\n\n    // Look up the keys of the object.\n    let keys = Object.keys(value);\n    const visibleKeys = arrayToHash(keys);\n\n    if (ctx.showHidden) {\n        keys = Object.getOwnPropertyNames(value);\n    }\n\n    // IE doesn't make error fields non-enumerable\n    // http://msdn.microsoft.com/en-us/library/ie/dww52sbt(v=vs.94).aspx\n    if (\n        isError(value) &&\n        (keys.indexOf('message') >= 0 || keys.indexOf('description') >= 0)\n    ) {\n        return formatError(value);\n    }\n\n    // Some type of object without properties can be shortcutted.\n    if (keys.length === 0) {\n        if (isFunction(value)) {\n            const name = value.name ? `: ${value.name}` : '';\n            return ctx.stylize(`[Function${name}]`, 'special');\n        }\n        if (isRegExp(value)) {\n            return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');\n        }\n        if (isDate(value)) {\n            return ctx.stylize(Date.prototype.toString.call(value), 'date');\n        }\n        if (isError(value)) {\n            return formatError(value);\n        }\n        if (isSet(value)) {\n            return formatSet(ctx, value);\n        }\n    }\n\n    let base = '',\n        array = false,\n        braces = ['{', '}'];\n\n    // Make Array say that they are Array\n    if (isArray(value)) {\n        array = true;\n        braces = ['[', ']'];\n    }\n\n    // Make functions say that they are functions\n    if (isFunction(value)) {\n        const n = value.name ? `: ${value.name}` : '';\n        base = ` [Function${n}]`;\n    }\n\n    // Make RegExps say that they are RegExps\n    if (isRegExp(value)) {\n        base = ` ${RegExp.prototype.toString.call(value)}`;\n    }\n\n    // Make dates with properties first say the date\n    if (isDate(value)) {\n        base = ` ${Date.prototype.toUTCString.call(value)}`;\n    }\n\n    // Make error with message first say the error\n    if (isError(value)) {\n        base = ` ${formatError(value)}`;\n    }\n\n    if (keys.length === 0 && (!array || value.length === 0)) {\n        return braces[0] + base + braces[1];\n    }\n\n    if (recurseTimes < 0) {\n        if (isRegExp(value)) {\n            return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');\n        }\n        return ctx.stylize('[Object]', 'special');\n    }\n\n    ctx.seen.push(value);\n\n    let output;\n    if (array) {\n        output = formatArray(ctx, value, recurseTimes, visibleKeys, keys);\n    } else {\n        output = keys.map(key =>\n            formatProperty(ctx, value, recurseTimes, visibleKeys, key, array)\n        );\n    }\n\n    ctx.seen.pop();\n\n    return reduceToSingleString(output, base, braces);\n}\n\nfunction isArray(ar) {\n    return Array.isArray(ar);\n}\n\nfunction isBoolean(arg) {\n    return typeof arg === 'boolean';\n}\n\nfunction isNull(arg) {\n    return arg === null;\n}\n\nfunction isNumber(arg) {\n    return typeof arg === 'number';\n}\n\nfunction isUndefined(arg) {\n    return arg === undefined;\n}\n\nfunction isRegExp(re) {\n    return _.isObject(re) && objectToString(re) === '[object RegExp]';\n}\n\nfunction isDate(d) {\n    return _.isObject(d) && objectToString(d) === '[object Date]';\n}\n\nfunction isError(e) {\n    return (\n        _.isObject(e) &&\n        (objectToString(e) === '[object Error]' || e instanceof Error)\n    );\n}\n\nfunction isFunction(arg) {\n    return typeof arg === 'function';\n}\n\nfunction isSet(s) {\n    return _.isObject(s) && objectToString(s) === '[object Set]';\n}\n\nfunction arrayToHash(array) {\n    const hash = {};\n\n    array.forEach(val => {\n        hash[val] = true;\n    });\n\n    return hash;\n}\n\nfunction formatError(value) {\n    return `[${Error.prototype.toString.call(value)}]`;\n}\n\nfunction formatSet(ctx, set) {\n    if (set.size === 0) {\n        return ctx.stylize('{}', 'special');\n    }\n\n    // Convert Set to sorted array and format each value\n    const sortedValues = Array.from(set).sort();\n    const formattedValues = sortedValues.map(\n        val => formatPrimitive(ctx, val) || formatValue(ctx, val, ctx.depth)\n    );\n\n    return formattedValues.join(', ');\n}\n\n// eslint-disable-next-line consistent-return\nfunction formatPrimitive(ctx, value) {\n    if (isUndefined(value)) return ctx.stylize('undefined', 'undefined');\n    if (_.isString(value)) {\n        const simple = `'${JSON.stringify(value)\n            .replace(/^\"|\"$/g, '')\n            .replace(/'/g, \"\\\\'\")\n            .replace(/\\\\\"/g, '\"')}'`;\n        return ctx.stylize(simple, 'string');\n    }\n    if (isNumber(value)) return ctx.stylize(`${value}`, 'number');\n    if (isBoolean(value)) return ctx.stylize(`${value}`, 'boolean');\n    // For some reason typeof null is \"object\", so special case here.\n    if (isNull(value)) return ctx.stylize('null', 'null');\n}\n\nfunction formatArray(ctx, value, recurseTimes, visibleKeys, keys) {\n    const output = [];\n    for (let i = 0, l = value.length; i < l; ++i) {\n        if (hasOwnProperty(value, String(i))) {\n            output.push(\n                formatProperty(\n                    ctx,\n                    value,\n                    recurseTimes,\n                    visibleKeys,\n                    String(i),\n                    true\n                )\n            );\n        } else {\n            output.push('');\n        }\n    }\n    keys.forEach(key => {\n        if (!key.match(/^\\d+$/)) {\n            output.push(\n                formatProperty(ctx, value, recurseTimes, visibleKeys, key, true)\n            );\n        }\n    });\n    return output;\n}\n\nfunction formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) {\n    let name, str;\n    const desc = Object.getOwnPropertyDescriptor(value, key) || {\n        value: value[key]\n    };\n    if (desc.get) {\n        if (desc.set) {\n            str = ctx.stylize('[Getter/Setter]', 'special');\n        } else {\n            str = ctx.stylize('[Getter]', 'special');\n        }\n    } else if (desc.set) {\n        str = ctx.stylize('[Setter]', 'special');\n    }\n    if (!hasOwnProperty(visibleKeys, key)) {\n        name = `[${key}]`;\n    }\n    if (!str) {\n        if (ctx.seen.indexOf(desc.value) < 0) {\n            if (isNull(recurseTimes)) {\n                str = formatValue(ctx, desc.value, null);\n            } else {\n                str = formatValue(ctx, desc.value, recurseTimes - 1);\n            }\n            if (str.indexOf('\\n') > -1) {\n                if (array) {\n                    str = str\n                        .split('\\n')\n                        .map(line => `  ${line}`)\n                        .join('\\n')\n                        .slice(2);\n                } else {\n                    str = `\\n${str\n                        .split('\\n')\n                        .map(line => `   ${line}`)\n                        .join('\\n')}`;\n                }\n            }\n        } else {\n            str = ctx.stylize('[Circular]', 'special');\n        }\n    }\n    if (isUndefined(name)) {\n        if (array && key.match(/^\\d+$/)) {\n            return str;\n        }\n        name = JSON.stringify(`${key}`);\n        if (name.match(/^\"([a-zA-Z_]\\w*)\"$/)) {\n            name = name.slice(1, -1);\n            name = ctx.stylize(name, 'name');\n        } else {\n            name = name\n                .replace(/'/g, \"\\\\'\")\n                .replace(/\\\\\"/g, '\"')\n                .replace(/(^\"|\"$)/g, \"'\");\n            name = ctx.stylize(name, 'string');\n        }\n    }\n\n    return `${name}: ${str}`;\n}\n\nfunction reduceToSingleString(output, base, braces) {\n    const length = output.reduce(\n        (prev, cur) =>\n            // eslint-disable-next-line no-control-regex\n            prev + cur.replace(/\\u001b\\[\\d\\d?m/g, '').length + 1,\n        0\n    );\n\n    if (length > 60) {\n        return `${braces[0] + (base === '' ? '' : `${base}\\n `)} ${output.join(\n            ',\\n  '\n        )} ${braces[1]}`;\n    }\n\n    return `${braces[0] + base} ${output.join(', ')} ${braces[1]}`;\n}\n\nfunction objectToString(o) {\n    return Object.prototype.toString.call(o);\n}\n\n/* eslint-enable require-jsdoc */\n"
  },
  {
    "path": "src/core/knn.js",
    "content": "'use strict';\n\nconst { recursiveToJSON, checkType } = require('./util');\nconst Query = require('./query');\n\n/**\n * Class representing a k-Nearest Neighbors (k-NN) query.\n * This class extends the Query class to support the specifics of k-NN search, including setting up the field,\n * query vector, number of neighbors (k), and number of candidates.\n *\n * @example\n * const qry = esb.kNN('my_field', 100, 1000).vector([1,2,3]);\n * const qry = esb.kNN('my_field', 100, 1000).queryVectorBuilder('model_123', 'Sample model text');\n *\n * NOTE: kNN search was added to Elasticsearch in v8.0\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/knn-search.html)\n */\nclass KNN {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, k, numCandidates) {\n        if (k > numCandidates)\n            throw new Error('KNN numCandidates cannot be less than k');\n        this._body = {};\n        this._body.field = field;\n        this._body.k = k;\n        this._body.filter = [];\n        this._body.num_candidates = numCandidates;\n    }\n\n    /**\n     * Sets the query vector for the k-NN search.\n     * @param {Array<number>} vector - The query vector.\n     * @returns {KNN} Returns the instance of KNN for method chaining.\n     */\n    queryVector(vector) {\n        if (this._body.query_vector_builder)\n            throw new Error(\n                'cannot provide both query_vector_builder and query_vector'\n            );\n        this._body.query_vector = vector;\n        return this;\n    }\n\n    /**\n     * Sets the query vector builder for the k-NN search.\n     * This method configures a query vector builder using a specified model ID and model text.\n     * It's important to note that either a direct query vector or a query vector builder can be\n     * provided, but not both.\n     *\n     * @param {string} modelId - The ID of the model to be used for generating the query vector.\n     * @param {string} modelText - The text input based on which the query vector is generated.\n     * @returns {KNN} Returns the instance of KNN for method chaining.\n     * @throws {Error} Throws an error if both query_vector_builder and query_vector are provided.\n     *\n     * @example\n     * let knn = new esb.KNN().queryVectorBuilder('model_123', 'Sample model text');\n     */\n    queryVectorBuilder(modelId, modelText) {\n        if (this._body.query_vector)\n            throw new Error(\n                'cannot provide both query_vector_builder and query_vector'\n            );\n        this._body.query_vector_builder = {\n            text_embeddings: {\n                model_id: modelId,\n                model_text: modelText\n            }\n        };\n        return this;\n    }\n\n    /**\n     * Adds one or more filter queries to the k-NN search.\n     *\n     * This method is designed to apply filters to the k-NN search. It accepts either a single\n     * query or an array of queries. Each query acts as a filter, refining the search results\n     * according to the specified conditions. These queries must be instances of the `Query` class.\n     * If any provided query is not an instance of `Query`, a TypeError is thrown.\n     *\n     * @param {Query|Query[]} queries - A single `Query` instance or an array of `Query` instances for filtering.\n     * @returns {KNN} Returns `this` to allow method chaining.\n     * @throws {TypeError} If any of the provided queries is not an instance of `Query`.\n     *\n     * @example\n     * let knn = new esb.KNN().filter(new esb.TermQuery('field', 'value')); // Applying a single filter query\n     *\n     * @example\n     * let knn = new esb.KNN().filter([\n     *     new esb.TermQuery('field1', 'value1'),\n     *     new esb.TermQuery('field2', 'value2')\n     * ]); // Applying multiple filter queries\n     */\n    filter(queries) {\n        const queryArray = Array.isArray(queries) ? queries : [queries];\n        queryArray.forEach(query => {\n            checkType(query, Query);\n            this._body.filter.push(query);\n        });\n        return this;\n    }\n\n    /**\n     * Sets the field to perform the k-NN search on.\n     * @param {number} boost - The number of the boost\n     * @returns {KNN} Returns the instance of KNN for method chaining.\n     */\n    boost(boost) {\n        this._body.boost = boost;\n        return this;\n    }\n\n    /**\n     * Sets the field to perform the k-NN search on.\n     * @param {number} similarity - The number of the similarity\n     * @returns {KNN} Returns the instance of KNN for method chaining.\n     */\n    similarity(similarity) {\n        this._body.similarity = similarity;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the `query`\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        if (!this._body.query_vector && !this._body.query_vector_builder)\n            throw new Error(\n                'either query_vector_builder or query_vector must be provided'\n            );\n        return recursiveToJSON(this._body);\n    }\n}\n\nmodule.exports = KNN;\n"
  },
  {
    "path": "src/core/query.js",
    "content": "'use strict';\n\nconst { recursiveToJSON } = require('./util');\n\n/**\n * Base class implementation for all query types.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class should be extended and used, as validation against the class\n * type is present in various places.\n *\n * @param {string} queryType\n */\nclass Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(queryType) {\n        this.queryType = queryType;\n\n        this._body = {};\n        this._queryOpts = this._body[queryType] = {};\n    }\n\n    /**\n     * Sets the boost value for documents matching the `Query`.\n     *\n     * @param {number} factor\n     * @returns {Query} returns `this` so that calls can be chained.\n     */\n    boost(factor) {\n        this._queryOpts.boost = factor;\n        return this;\n    }\n\n    /**\n     * Sets the query name.\n     *\n     * @example\n     * const boolQry = esb.boolQuery()\n     *     .should([\n     *         esb.matchQuery('name.first', 'shay').name('first'),\n     *         esb.matchQuery('name.last', 'banon').name('last')\n     *     ])\n     *     .filter(esb.termsQuery('name.last', ['banon', 'kimchy']).name('test'));\n     *\n     * @param {string} name\n     * @returns {Query} returns `this` so that calls can be chained.\n     */\n    name(name) {\n        this._queryOpts._name = name;\n        return this;\n    }\n\n    /**\n     * Build and returns DSL representation of the `Query` class instance.\n     *\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    getDSL() {\n        return this.toJSON();\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the `query`\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        return recursiveToJSON(this._body);\n    }\n}\n\nmodule.exports = Query;\n"
  },
  {
    "path": "src/core/request-body-search.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst Query = require('./query'),\n    Aggregation = require('./aggregation'),\n    Suggester = require('./suggester'),\n    Rescore = require('./rescore'),\n    Sort = require('./sort'),\n    Highlight = require('./highlight'),\n    InnerHits = require('./inner-hits'),\n    KNN = require('./knn');\n\nconst { checkType, setDefault, recursiveToJSON } = require('./util');\nconst RuntimeField = require('./runtime-field');\n\n/**\n * Helper function to call `recursiveToJSON` on elements of array and assign to object.\n *\n * @private\n *\n * @param {Array} arr\n * @returns {Object}\n */\nfunction recMerge(arr) {\n    return Object.assign({}, ...recursiveToJSON(arr));\n}\n\n/**\n * The `RequestBodySearch` object provides methods generating an elasticsearch\n * search request body. The search request can be executed with a search DSL,\n * which includes the Query DSL, within its body.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-body.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.termQuery('user', 'kimchy'))\n *     .from(0)\n *     .size(10);\n *\n * reqBody.toJSON();\n * {\n *   \"query\": { \"term\": { \"user\": \"kimchy\" } },\n *   \"from\": 0,\n *   \"size\": 10\n * }\n *\n * @example\n * // Query and aggregation\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.matchQuery('business_type', 'shop'))\n *     .agg(\n *         esb.geoBoundsAggregation('viewport', 'location').wrapLongitude(true)\n *     );\n *\n * @example\n * // Query, aggregation with nested\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.matchQuery('crime', 'burglary'))\n *     .agg(\n *         esb.termsAggregation('towns', 'town').agg(\n *             esb.geoCentroidAggregation('centroid', 'location')\n *         )\n *     );\n */\nclass RequestBodySearch {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        // Maybe accept some optional parameter?\n        this._body = {};\n        this._knn = [];\n        this._aggs = [];\n        this._suggests = [];\n        this._suggestText = null;\n    }\n\n    /**\n     * Define query on the search request body using the Query DSL.\n     *\n     * @param {Query} query\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     */\n    query(query) {\n        checkType(query, Query);\n\n        this._body.query = query;\n        return this;\n    }\n\n    /**\n     * Sets knn on the search request body.\n     *\n     * @param {Knn|Knn[]} knn\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     */\n    kNN(knn) {\n        const knns = Array.isArray(knn) ? knn : [knn];\n        knns.forEach(_knn => {\n            checkType(_knn, KNN);\n            this._knn.push(_knn);\n        });\n        return this;\n    }\n\n    /**\n     * Sets aggregation on the request body.\n     * Alias for method `aggregation`\n     *\n     * @param {Aggregation} agg Any valid `Aggregation`\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     * @throws {TypeError} If `agg` is not an instance of `Aggregation`\n     */\n    agg(agg) {\n        return this.aggregation(agg);\n    }\n\n    /**\n     * Sets aggregation on the request body.\n     *\n     * @param {Aggregation} agg Any valid `Aggregation`\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     * @throws {TypeError} If `agg` is not an instance of `Aggregation`\n     */\n    aggregation(agg) {\n        checkType(agg, Aggregation);\n\n        this._aggs.push(agg);\n        return this;\n    }\n\n    /**\n     * Sets multiple nested aggregation items.\n     * Alias for method `aggregations`\n     *\n     * @param {Array<Aggregation>} aggs Array of valid {@link Aggregation} items\n     * @returns {Aggregation} returns `this` so that calls can be chained.\n     * @throws {TypeError} If `aggs` is not an instance of `Array`\n     * @throws {TypeError} If `aggs` contains instances not of type `Aggregation`\n     */\n    aggs(aggs) {\n        return this.aggregations(aggs);\n    }\n\n    /**\n     * Sets multiple nested aggregation items.\n     * This method accepts an array to set multiple nested aggregations in one call.\n     *\n     * @param {Array<Aggregation>} aggs Array of valid {@link Aggregation} items\n     * @returns {Aggregation} returns `this` so that calls can be chained.\n     * @throws {TypeError} If `aggs` is not an instance of `Array`\n     * @throws {TypeError} If `aggs` contains instances not of type `Aggregation`\n     */\n    aggregations(aggs) {\n        checkType(aggs, Array);\n\n        aggs.forEach(agg => this.aggregation(agg));\n\n        return this;\n    }\n\n    /**\n     * Sets suggester on the request body.\n     *\n     * @example\n     * const req = esb.requestBodySearch()\n     *     .query(esb.matchQuery('message', 'trying out elasticsearch'))\n     *     .suggest(\n     *         esb.termSuggester(\n     *             'my-suggestion',\n     *             'message',\n     *             'tring out Elasticsearch'\n     *         )\n     *     );\n     *\n     * @param {Suggester} suggest Any valid `Suggester`\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     * @throws {TypeError} If `suggest` is not an instance of `Suggester`\n     */\n    suggest(suggest) {\n        checkType(suggest, Suggester);\n\n        this._suggests.push(suggest);\n        return this;\n    }\n\n    /**\n     * Sets the global suggest text to avoid repetition for multiple suggestions.\n     *\n     * @example\n     * const req = esb.requestBodySearch()\n     *     .suggestText('tring out elasticsearch')\n     *     .suggest(esb.termSuggester('my-suggest-1', 'message'))\n     *     .suggest(esb.termSuggester('my-suggest-2', 'user'));\n     *\n     * @param {string} txt Global suggest text\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     */\n    suggestText(txt) {\n        this._suggestText = txt;\n        return this;\n    }\n\n    /**\n     * Sets a search timeout, bounding the search request to be executed within\n     * the specified time value and bail with the hits accumulated up to that\n     * point when expired.\n     *\n     * @param {string} timeout Duration can be specified using\n     * [time units](https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html#time-units)\n     * Defaults to no timeout.\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     */\n    timeout(timeout) {\n        this._body.timeout = timeout;\n        return this;\n    }\n\n    /**\n     * To retrieve hits from a certain offset.\n     *\n     * @param {number} from Defaults to 0.\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     */\n    from(from) {\n        this._body.from = from;\n        return this;\n    }\n\n    /**\n     * The number of hits to return. If you do not care about getting some hits back\n     * but only about the number of matches and/or aggregations, setting the value\n     * to 0 will help performance.\n     *\n     * @param {number} size Defaults to 10.\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     */\n    size(size) {\n        this._body.size = size;\n        return this;\n    }\n\n    /**\n     * The maximum number of documents to collect for each shard, upon reaching which\n     * the query execution will terminate early. If set, the response will have a\n     * boolean field `terminated_early` to indicate whether the query execution has\n     * actually terminated early.\n     *\n     * @param {number} numberOfDocs Maximum number of documents to collect for each shard.\n     * Defaults to no limit.\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     */\n    terminateAfter(numberOfDocs) {\n        this._body.terminate_after = numberOfDocs;\n        return this;\n    }\n\n    /**\n     * Allows to add sort on specific field. The sort can be reversed as well.\n     * The sort is defined on a per field level, with special field name for `_score` to\n     * sort by score, and `_doc` to sort by index order.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.termQuery('user', 'kimchy'))\n     *     .sort(esb.sort('post_date', 'asc'))\n     *     .sort(esb.sort('user'))\n     *     .sorts([\n     *         esb.sort('name', 'desc'),\n     *         esb.sort('age', 'desc'),\n     *         esb.sort('_score')\n     *     ]);\n     *\n     * @param {Sort} sort\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     * @throws {TypeError} If parameter `sort` is not an instance of `Sort`.\n     */\n    sort(sort) {\n        checkType(sort, Sort);\n        setDefault(this._body, 'sort', []);\n\n        this._body.sort.push(sort);\n        return this;\n    }\n\n    /**\n     * Allows to add multiple sort on specific fields. Each sort can be reversed as well.\n     * The sort is defined on a per field level, with special field name for _score to\n     * sort by score, and _doc to sort by index order.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.termQuery('user', 'kimchy'))\n     *     .sort(esb.sort('post_date', 'asc'))\n     *     .sort(esb.sort('user'))\n     *     .sorts([\n     *         esb.sort('name', 'desc'),\n     *         esb.sort('age', 'desc'),\n     *         esb.sort('_score')\n     *     ]);\n     *\n     * @param {Array<Sort>} sorts Arry of sort\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     * @throws {TypeError} If any item in parameter `sorts` is not an instance of `Sort`.\n     */\n    sorts(sorts) {\n        sorts.forEach(sort => this.sort(sort));\n        return this;\n    }\n\n    /**\n     * When sorting on a field, scores are not computed. By setting `track_scores` to true,\n     * scores will still be computed and tracked.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .trackScores(true)\n     *     .sorts([\n     *         esb.sort('post_date', 'desc'),\n     *         esb.sort('name', 'desc'),\n     *         esb.sort('age', 'desc')\n     *     ])\n     *     .query(esb.termQuery('user', 'kimchy'));\n\n     *\n     * @param {boolean} enable\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained\n     */\n    trackScores(enable) {\n        this._body.track_scores = enable;\n        return this;\n    }\n\n    /**\n     * The `track_total_hits` parameter allows you to control how the total number of hits\n     * should be tracked. Passing `false` can increase performance in some situations.\n     * (Added in elasticsearch@7)\n     *\n     * Pass `true`, `false`, or the upper limit (default: `10000`) of hits you want tracked.\n     *\n     * @param {boolean|number} enableOrLimit\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained\n     */\n    trackTotalHits(enableOrLimit) {\n        this._body.track_total_hits = enableOrLimit;\n        return this;\n    }\n\n    /**\n     * Allows to control how the `_source` field is returned with every hit.\n     * You can turn off `_source` retrieval by passing `false`.\n     * It also accepts one(string) or more wildcard(array) patterns to control\n     * what parts of the `_source` should be returned\n     * An object can also be used to specify the wildcard patterns for `includes` and `excludes`.\n     *\n     * @example\n     * // To disable `_source` retrieval set to `false`:\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.termQuery('user', 'kimchy'))\n     *     .source(false);\n     *\n     * @example\n     * // The `_source` also accepts one or more wildcard patterns to control what\n     * // parts of the `_source` should be returned:\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.termQuery('user', 'kimchy'))\n     *     .source('obj.*');\n     *\n     * // OR\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.termQuery('user', 'kimchy'))\n     *     .source([ 'obj1.*', 'obj2.*' ]);\n     *\n     * @example\n     * // For complete control, you can specify both `includes` and `excludes` patterns:\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.termQuery('user', 'kimchy'))\n     *     .source({\n     *         'includes': [ 'obj1.*', 'obj2.*' ],\n     *         'excludes': [ '*.description' ]\n     *     });\n     *\n     * @param {boolean|string|Array|Object} source\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained\n     */\n    source(source) {\n        this._body._source = source;\n        return this;\n    }\n\n    /**\n     * The `stored_fields` parameter is about fields that are explicitly marked as stored in the mapping.\n     * Selectively load specific stored fields for each document represented by a search hit\n     * using array of stored fields.\n     * An empty array will cause only the `_id` and `_type` for each hit to be returned.\n     * To disable the stored fields (and metadata fields) entirely use: `_none_`\n     *\n     * @example\n     * // Selectively load specific stored fields for each document\n     * // represented by a search hit\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.termQuery('user', 'kimchy'))\n     *     .storedFields(['user', 'postDate']);\n     *\n     * @example\n     * // Return only the `_id` and `_type` to be returned:\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.termQuery('user', 'kimchy'))\n     *     .storedFields([]);\n     *\n     * @example\n     * // Disable the stored fields (and metadata fields) entirely\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.termQuery('user', 'kimchy'))\n     *     .storedFields('_none_');\n     *\n     * @param {Array|string} fields\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained\n     */\n    storedFields(fields) {\n        this._body.stored_fields = fields;\n        return this;\n    }\n\n    /**\n     * Computes a document property dynamically based on the supplied `runtimeField`.\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/runtime-search-request.html)\n     *\n     * Added in Elasticsearch v7.11.0\n     * [Release note](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/release-notes-7.11.0.html)\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchAllQuery())\n     *     .runtimeMapping(\n     *       'sessionId-name',\n     *       esb.runtimeField(\n     *         'keyword',\n     *         `emit(doc['session_id'].value + '::' + doc['name'].value)`\n     *       )\n     *     )\n     *\n     * @example\n     * // runtime fields can also be used in query aggregation\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchAllQuery())\n     *     .runtimeMapping(\n     *       'sessionId-eventName',\n     *       esb.runtimeField(\n     *         'keyword',\n     *         `emit(doc['session_id'].value + '::' + doc['eventName'].value)`,\n     *       )\n     *     )\n     *     .agg(esb.cardinalityAggregation('uniqueCount', `sessionId-eventName`)),;\n     *\n     * @param {string} runtimeFieldName Name for the computed runtime mapping field.\n     * @param {RuntimeField} runtimeField Instance of RuntimeField\n     *\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained\n     *\n     */\n    runtimeMapping(runtimeFieldName, runtimeField) {\n        checkType(runtimeField, RuntimeField);\n\n        setDefault(this._body, 'runtime_mappings', {});\n        this._body.runtime_mappings[runtimeFieldName] = runtimeField;\n        return this;\n    }\n\n    /**\n     * Computes one or more document properties dynamically based on supplied `RuntimeField`s.\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/runtime-search-request.html)\n     *\n     * Added in Elasticsearch v7.11.0\n     * [Release note](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/release-notes-7.11.0.html)\n     *\n     * @example\n     * const fieldA = esb.runtimeField(\n     *       'keyword',\n     *       `emit(doc['session_id'].value + '::' + doc['name'].value)`\n     * );\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchAllQuery())\n     *     .runtimeMappings({\n     *       'sessionId-name': fieldA,\n     *     })\n     *\n     * @param {Object} runtimeMappings Object with `runtimeFieldName` as key and instance of `RuntimeField` as the value.\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained\n     */\n    runtimeMappings(runtimeMappings) {\n        checkType(runtimeMappings, Object);\n\n        Object.keys(runtimeMappings).forEach(runtimeFieldName =>\n            this.runtimeMapping(\n                runtimeFieldName,\n                runtimeMappings[runtimeFieldName]\n            )\n        );\n\n        return this;\n    }\n\n    /**\n     * Computes a document property dynamically based on the supplied `Script`.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchAllQuery())\n     *     .scriptField(\n     *         'test1',\n     *         esb.script('inline', \"doc['my_field_name'].value * 2\").lang('painless')\n     *     )\n     *     .scriptField(\n     *         'test2',\n     *         esb.script('inline', \"doc['my_field_name'].value * factor\")\n     *             .lang('painless')\n     *             .params({ factor: 2.0 })\n     *     );\n     *\n     * @example\n     * // Script fields can also access the actual `_source` document and extract\n     * // specific elements to be returned from it by using `params['_source']`.\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchAllQuery())\n     *     .scriptField('test1', \"params['_source']['message']\");\n     *\n     * @param {string} scriptFieldName\n     * @param {string|Script} script string or instance of `Script`\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained\n     */\n    scriptField(scriptFieldName, script) {\n        setDefault(this._body, 'script_fields', {});\n\n        this._body.script_fields[scriptFieldName] = { script };\n        return this;\n    }\n\n    /**\n     * Sets given dynamic document properties to be computed using supplied `Script`s.\n     *\n     * Object should have `scriptFieldName` as key and `script` as the value.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchAllQuery())\n     *     .scriptFields({\n     *         test1: esb\n     *             .script('inline', \"doc['my_field_name'].value * 2\")\n     *             .lang('painless'),\n     *         test2: esb\n     *             .script('inline', \"doc['my_field_name'].value * factor\")\n     *             .lang('painless')\n     *             .params({ factor: 2.0 })\n     *     });\n     *\n     * @example\n     * // Script fields can also access the actual `_source` document and extract\n     * // specific elements to be returned from it by using `params['_source']`.\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchAllQuery())\n     *     .scriptFields({ test1: \"params['_source']['message']\" });\n     * @param {Object} scriptFields Object with `scriptFieldName` as key and `script` as the value.\n     * @returns {TopHitsAggregation} returns `this` so that calls can be chained\n     */\n    scriptFields(scriptFields) {\n        checkType(scriptFields, Object);\n\n        Object.keys(scriptFields).forEach(scriptFieldName =>\n            this.scriptField(scriptFieldName, scriptFields[scriptFieldName])\n        );\n\n        return this;\n    }\n\n    /**\n     * Allows to return the doc value representation of a field for each hit.\n     * Doc value fields can work on fields that are not stored.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchAllQuery())\n     *     .docvalueFields(['test1', 'test2']);\n     *\n     * @param {Array<string>} fields\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained\n     */\n    docvalueFields(fields) {\n        this._body.docvalue_fields = fields;\n        return this;\n    }\n\n    /**\n     * The `post_filter` is applied to the search hits at the very end of a search request,\n     * after aggregations have already been calculated.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.boolQuery().filter(esb.termQuery('brand', 'gucci')))\n     *     .agg(esb.termsAggregation('colors', 'color'))\n     *     .agg(\n     *         esb.filterAggregation(\n     *             'color_red',\n     *             esb.termQuery('color', 'red')\n     *         ).agg(esb.termsAggregation('models', 'model'))\n     *     )\n     *     .postFilter(esb.termQuery('color', 'red'));\n     *\n     * @param {Query} filterQuery The filter to be applied after aggregation.\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained\n     */\n    postFilter(filterQuery) {\n        checkType(filterQuery, Query);\n\n        this._body.post_filter = filterQuery;\n        return this;\n    }\n\n    /**\n     * Allows to highlight search results on one or more fields. The implementation\n     * uses either the lucene `plain` highlighter, the fast vector highlighter (`fvh`)\n     * or `postings` highlighter.\n     *\n     * Note: The `postings` highlighter has been removed in elasticsearch 6.0.\n     * The `unified` highlighter outputs the same highlighting when\n     * `index_options` is set to `offsets`.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchAllQuery())\n     *     .highlight(esb.highlight('content'));\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(\n     *         esb.percolateQuery('query', 'doctype').document({\n     *             message: 'The quick brown fox jumps over the lazy dog'\n     *         })\n     *     )\n     *     .highlight(esb.highlight('message'));\n     *\n     * @param {Highlight} highlight\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained\n     */\n    highlight(highlight) {\n        checkType(highlight, Highlight);\n\n        this._body.highlight = highlight;\n        return this;\n    }\n\n    /**\n     * Rescoring can help to improve precision by reordering just the top (eg 100 - 500)\n     * documents returned by the `query` and `post_filter` phases, using a secondary\n     * (usually more costly) algorithm, instead of applying the costly algorithm to\n     * all documents in the index.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchQuery('message', 'the quick brown').operator('or'))\n     *     .rescore(\n     *         esb.rescore(\n     *             50,\n     *             esb.matchPhraseQuery('message', 'the quick brown').slop(2)\n     *         )\n     *             .queryWeight(0.7)\n     *             .rescoreQueryWeight(1.2)\n     *     );\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchQuery('message', 'the quick brown').operator('or'))\n     *     .rescore(\n     *         esb.rescore(\n     *             100,\n     *             esb.matchPhraseQuery('message', 'the quick brown').slop(2)\n     *         )\n     *             .queryWeight(0.7)\n     *             .rescoreQueryWeight(1.2)\n     *     )\n     *     .rescore(\n     *         esb.rescore(\n     *             10,\n     *             esb.functionScoreQuery().function(\n     *                 esb.scriptScoreFunction(\n     *                     esb.script('inline', 'Math.log10(doc.likes.value + 2)')\n     *                 )\n     *             )\n     *         ).scoreMode('multiply')\n     *     );\n     *\n     * @param {Rescore} rescore\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained\n     * @throws {TypeError} If `query` is not an instance of `Rescore`\n     */\n    rescore(rescore) {\n        checkType(rescore, Rescore);\n\n        if (_.has(this._body, 'rescore')) {\n            if (!Array.isArray(this._body.rescore)) {\n                this._body.rescore = [this._body.rescore];\n            }\n\n            this._body.rescore.push(rescore);\n        } else this._body.rescore = rescore;\n\n        return this;\n    }\n\n    // TODO: Scroll related changes\n    // Maybe only slice needs to be supported.\n\n    /**\n     * Enables explanation for each hit on how its score was computed.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.termQuery('user', 'kimchy'))\n     *     .explain(true);\n     *\n     * @param {boolean} enable\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained\n     */\n    explain(enable) {\n        this._body.explain = enable;\n        return this;\n    }\n\n    /**\n     * Returns a version for each search hit.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.termQuery('user', 'kimchy'))\n     *     .version(true);\n     *\n     * @param {boolean} enable\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     */\n    version(enable) {\n        this._body.version = enable;\n        return this;\n    }\n\n    /**\n     * Allows to configure different boost level per index when searching across\n     * more than one indices. This is very handy when hits coming from one index\n     * matter more than hits coming from another index.\n     *\n     * Alias for method `indicesBoost`.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .indexBoost('alias1', 1.4)\n     *     .indexBoost('index*', 1.3);\n     *\n     * @param {string} index Index windcard expression or alias\n     * @param {number} boost\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     */\n    indexBoost(index, boost) {\n        return this.indicesBoost(index, boost);\n    }\n\n    /**\n     * Allows to configure different boost level per index when searching across\n     * more than one indices. This is very handy when hits coming from one index\n     * matter more than hits coming from another index.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .indicesBoost('alias1', 1.4)\n     *     .indicesBoost('index*', 1.3);\n     *\n     * @param {string} index Index windcard expression or alias\n     * @param {number} boost\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     */\n    indicesBoost(index, boost) {\n        setDefault(this._body, 'indices_boost', []);\n\n        this._body.indices_boost.push({\n            [index]: boost\n        });\n        return this;\n    }\n\n    /**\n     * Exclude documents which have a `_score` less than the minimum specified in `min_score`.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.termQuery('user', 'kimchy'))\n     *     .minScore(0.5);\n     *\n     * @param {number} score\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     */\n    minScore(score) {\n        this._body.min_score = score;\n        return this;\n    }\n\n    /**\n     * Allows to collapse search results based on field values. The collapsing\n     * is done by selecting only the top sorted document per collapse key.\n     *\n     * The field used for collapsing must be a single valued `keyword` or `numeric`\n     * field with `doc_values` activated\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchQuery('message', 'elasticsearch'))\n     *     .collapse('user')\n     *     .sort(esb.sort('likes'))\n     *     .from(10);\n     *\n     * @example\n     * // Expand each collapsed top hits with the `inner_hits` option:\n     * const reqBody = esb.requestBodySearch()\n     *     .query(esb.matchQuery('message', 'elasticsearch'))\n     *     .collapse(\n     *         'user',\n     *         esb.innerHits('last_tweets')\n     *             .size(5)\n     *             .sort(esb.sort('date', 'asc')),\n     *         4\n     *     )\n     *     .sort(esb.sort('likes'))\n     *     .from(10);\n     *\n     * @param {string} field\n     * @param {InnerHits=} innerHits Allows to expand each collapsed top hits.\n     * @param {number=} maxConcurrentGroupRequests The number of concurrent\n     * requests allowed to retrieve the inner_hits' per group\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     * @throws {TypeError} If `innerHits` is not an instance of `InnerHits`\n     */\n    collapse(field, innerHits, maxConcurrentGroupRequests) {\n        const collapse = (this._body.collapse = { field });\n\n        if (!_.isNil(innerHits)) {\n            checkType(innerHits, InnerHits);\n\n            collapse.inner_hits = innerHits;\n            collapse.max_concurrent_group_searches = maxConcurrentGroupRequests;\n        }\n\n        return this;\n    }\n\n    /**\n     * Allows to use the results from the previous page to help the retrieval\n     * of the next page. The `search_after` parameter provides a live cursor.\n     *\n     * The parameter `from` must be set to `0` (or `-1`) when `search_after` is used.\n     *\n     * @example\n     * const reqBody = esb.requestBodySearch()\n     *     .size(10)\n     *     .query(esb.matchQuery('message', 'elasticsearch'))\n     *     .searchAfter(1463538857, 'tweet#654323')\n     *     .sorts([esb.sort('date', 'asc'), esb.sort('_uid', 'desc')]);\n     *\n     * @param {Array<*>} values The `sort values` of the last document to retrieve\n     * the next page of results\n     * @returns {RequestBodySearch} returns `this` so that calls can be chained.\n     */\n    searchAfter(values) {\n        this._body.search_after = values;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the request body search\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        const dsl = recursiveToJSON(this._body);\n\n        if (!_.isEmpty(this._knn))\n            dsl.knn =\n                this._knn.length == 1\n                    ? recMerge(this._knn)\n                    : this._knn.map(knn => recursiveToJSON(knn));\n\n        if (!_.isEmpty(this._aggs)) dsl.aggs = recMerge(this._aggs);\n\n        if (!_.isEmpty(this._suggests) || !_.isNil(this._suggestText)) {\n            dsl.suggest = recMerge(this._suggests);\n\n            if (!_.isNil(this._suggestText)) {\n                dsl.suggest.text = this._suggestText;\n            }\n        }\n\n        return dsl;\n    }\n}\n\nmodule.exports = RequestBodySearch;\n"
  },
  {
    "path": "src/core/rescore.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst Query = require('./query');\nconst { checkType, invalidParam, recursiveToJSON } = require('./util');\nconst { RESCORE_MODE_SET } = require('./consts');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-rescore.html';\n\nconst invalidScoreModeParam = invalidParam(\n    ES_REF_URL,\n    'score_mode',\n    RESCORE_MODE_SET\n);\n\n/**\n * A `rescore` request can help to improve precision by reordering just\n * the top (eg 100 - 500) documents returned by the `query` and `post_filter`\n * phases, using a secondary (usually more costly) algorithm, instead of\n * applying the costly algorithm to all documents in the index.\n *\n * The rescore phase is not executed when sort is used.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-rescore.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.matchQuery('message', 'the quick brown').operator('or'))\n *     .rescore(\n *         esb.rescore(\n *             50,\n *             esb.matchPhraseQuery('message', 'the quick brown').slop(2)\n *         )\n *             .queryWeight(0.7)\n *             .rescoreQueryWeight(1.2)\n *     );\n *\n * @example\n * const rescore = esb.rescore(\n *     10,\n *     esb.functionScoreQuery().function(\n *         esb.scriptScoreFunction(\n *             esb.script('inline', 'Math.log10(doc.likes.value + 2)')\n *         )\n *     )\n * ).scoreMode('multiply');\n *\n * @param {number=} windowSize\n * @param {Query=} rescoreQuery\n */\nclass Rescore {\n    // eslint-disable-next-line require-jsdoc\n    constructor(windowSize, rescoreQuery) {\n        this._body = {};\n        this._rescoreOpts = this._body.query = {};\n\n        if (!_.isNil(windowSize)) this._body.window_size = windowSize;\n        if (!_.isNil(rescoreQuery)) this.rescoreQuery(rescoreQuery);\n    }\n\n    /**\n     * The number of docs which will be examined on each shard can be controlled\n     * by the window_size parameter, which defaults to `from` and `size`.\n     *\n     * @param {number} windowSize\n     * @returns {Rescore} returns `this` so that calls can be chained.\n     */\n    windowSize(windowSize) {\n        this._body.window_size = windowSize;\n        return this;\n    }\n\n    /**\n     * The query to execute on the Top-K results by the `query` and `post_filter` phases.\n     *\n     * @param {Query} rescoreQuery\n     * @returns {Rescore} returns `this` so that calls can be chained.\n     * @throws {TypeError} If `rescoreQuery` is not an instance of `Query`\n     */\n    rescoreQuery(rescoreQuery) {\n        checkType(rescoreQuery, Query);\n\n        this._rescoreOpts.rescore_query = rescoreQuery;\n        return this;\n    }\n\n    /**\n     * Control the relative importance of the original query.\n     *\n     * @param {number} weight Defaults to 1\n     * @returns {Rescore} returns `this` so that calls can be chained.\n     */\n    queryWeight(weight) {\n        this._rescoreOpts.query_weight = weight;\n        return this;\n    }\n\n    /**\n     * Control the relative importance of the rescore query.\n     *\n     * @param {number} weight Defaults to 1\n     * @returns {Rescore} returns `this` so that calls can be chained.\n     */\n    rescoreQueryWeight(weight) {\n        this._rescoreOpts.rescore_query_weight = weight;\n        return this;\n    }\n\n    /**\n     * Controls the way the scores are combined.\n     *\n     * @param {string} mode Can be one of `total`, `multiply`, `min`, `max`, `avg`.\n     * Defaults to `total`.\n     * @returns {Rescore} returns `this` so that calls can be chained.\n     */\n    scoreMode(mode) {\n        if (_.isNil(mode)) invalidScoreModeParam(mode);\n\n        const modeLower = mode.toLowerCase();\n        if (!RESCORE_MODE_SET.has(modeLower)) {\n            invalidScoreModeParam(mode);\n        }\n\n        this._rescoreOpts.score_mode = modeLower;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for `rescore` request\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        return recursiveToJSON(this._body);\n    }\n}\n\nmodule.exports = Rescore;\n"
  },
  {
    "path": "src/core/runtime-field.js",
    "content": "'use strict';\n\nconst _ = require('../_');\nconst validType = [\n    'boolean',\n    'composite',\n    'date',\n    'double',\n    'geo_point',\n    'ip',\n    'keyword',\n    'long',\n    'lookup'\n];\n\n/**\n * Class supporting the Elasticsearch runtime field.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/runtime.html)\n *\n * Added in Elasticsearch v7.11.0\n * [Release note](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/release-notes-7.11.0.html)\n *\n * @param {string=} type One of `boolean`, `composite`, `date`, `double`, `geo_point`, `ip`, `keyword`, `long`, `lookup`.\n * @param {string=} script Source of the script.\n *\n * @example\n * const field = esb.runtimeField('keyword', `emit(doc['sessionId'].value + '::' + doc['name'].value)`);\n */\nclass RuntimeField {\n    // eslint-disable-next-line require-jsdoc\n    constructor(type, script) {\n        this._body = {};\n        this._isTypeSet = false;\n        this._isScriptSet = false;\n\n        if (!_.isNil(type)) {\n            this.type(type);\n        }\n\n        if (!_.isNil(script)) {\n            this.script(script);\n        }\n    }\n\n    /**\n     * Sets the source of the script.\n     * @param {string} script\n     * @returns {RuntimeField} returns `this` so that calls can be chained.\n     */\n    script(script) {\n        this._body.script = {\n            source: script\n        };\n        this._isScriptSet = true;\n        return this;\n    }\n\n    /**\n     * Sets the type of the runtime field.\n     * @param {string} type One of `boolean`, `composite`, `date`, `double`, `geo_point`, `ip`, `keyword`, `long`, `lookup`.\n     * @returns {RuntimeField} returns `this` so that calls can be chained.\n     */\n    type(type) {\n        const typeLower = type.toLowerCase();\n        if (!validType.includes(typeLower)) {\n            throw new Error(`\\`type\\` must be one of ${validType.join(', ')}`);\n        }\n        this._body.type = typeLower;\n        this._isTypeSet = true;\n        return this;\n    }\n\n    /**\n     * Specifies the language the script is written in. Defaults to `painless` but\n     * may be set to any of languages listed in [Scripting](https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html).\n     *\n     * @param {string} lang The language for the script.\n     * @returns {RuntimeField} returns `this` so that calls can be chained.\n     */\n    lang(lang) {\n        if (!_.isNil(this._body.script)) {\n            this._body.script.lang = lang;\n        }\n        return this;\n    }\n\n    /**\n     * Specifies any named parameters that are passed into the script as variables.\n     *\n     * @param {Object} params Named parameters to be passed to script.\n     * @returns {RuntimeField} returns `this` so that calls can be chained.\n     */\n    params(params) {\n        if (!_.isNil(this._body.script)) {\n            this._body.script.params = params;\n        }\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the `script`.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        if (!this._isTypeSet) {\n            throw new Error('`type` should be set');\n        }\n\n        if (!this._isScriptSet) {\n            throw new Error('`script` should be set');\n        }\n\n        return this._body;\n    }\n}\n\nmodule.exports = RuntimeField;\n"
  },
  {
    "path": "src/core/script.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\n/**\n * Class supporting the Elasticsearch scripting API.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting-using.html)\n *\n * Note: `inline` script type was deprecated in [elasticsearch v5.0](https://www.elastic.co/guide/en/elasticsearch/reference/5.6/breaking_50_scripting.html).\n * `source` should be used instead. And similarly for `stored` scripts, type\n * `id` must be used instead. `file` scripts were removed as part of the\n * breaking changes in [elasticsearch v6.0](https://www.elastic.co/guide/en/elasticsearch/reference/6.0/breaking_60_scripting_changes.html#_file_scripts_removed)\n *\n * @param {string=} type One of `inline`, `stored`, `file`, `source`, `id`.\n * @param {string=} source Source of the script.\n * This needs to be specified if optional argument `type` is passed.\n *\n * @example\n * const script = esb.script('inline', \"doc['my_field'] * multiplier\")\n *     .lang('expression')\n *     .params({ multiplier: 2 });\n *\n * // cat \"log(_score * 2) + my_modifier\" > config/scripts/calculate-score.groovy\n * const script = esb.script()\n *     .lang('groovy')\n *     .file('calculate-score')\n *     .params({ my_modifier: 2 });\n */\nclass Script {\n    // eslint-disable-next-line require-jsdoc\n    constructor(type, source) {\n        this._isTypeSet = false;\n        this._body = {};\n\n        // NOTE: Script syntax changed in elasticsearch 5.6 to use `id`/`source`\n        // instead of `inline`/`source`/`file`. This needs to be handled\n        // somehow.\n        if (!_.isNil(type) && !_.isNil(source)) {\n            const typeLower = type.toLowerCase();\n\n            switch (typeLower) {\n                case 'inline':\n                    this.inline(source);\n                    break;\n\n                case 'source':\n                    this.source(source);\n                    break;\n\n                case 'stored':\n                    this.stored(source);\n                    break;\n\n                case 'id':\n                    this.id(source);\n                    break;\n\n                case 'file':\n                    this.file(source);\n                    break;\n\n                default:\n                    throw new Error(\n                        '`type` must be one of `inline`, `stored`, `file`'\n                    );\n            }\n        }\n    }\n\n    /**\n     * Print warning message to console namespaced by class name.\n     *\n     * @param {string} msg\n     * @private\n     */\n    _warn(msg) {\n        console.warn(`[Script] ${msg}`);\n    }\n\n    /**\n     * Print warning messages to not mix `Script` source\n     *\n     * @private\n     */\n    _checkMixedRepr() {\n        if (!this._isTypeSet) return;\n\n        this._warn(\n            'Script source(`inline`/`source`/`stored`/`id`/`file`) was already specified!'\n        );\n        this._warn('Overwriting.');\n\n        delete this._body.inline;\n        delete this._body.source;\n        delete this._body.stored;\n        delete this._body.id;\n        delete this._body.file;\n    }\n\n    /**\n     * Sets the type of script to be `inline` and specifies the source of the script.\n     *\n     * Note: This type was deprecated in elasticsearch v5.0. Use `source`\n     * instead if you are using elasticsearch `>= 5.0`.\n     *\n     * @param {string} scriptCode\n     * @returns {Script} returns `this` so that calls can be chained.\n     */\n    inline(scriptCode) {\n        this._checkMixedRepr();\n\n        this._body.inline = scriptCode;\n        this._isTypeSet = true;\n        return this;\n    }\n\n    /**\n     * Sets the type of script to be `source` and specifies the source of the script.\n     *\n     * Note: `source` is an alias for the `inline` type which was deprecated\n     * in elasticsearch v5.0. So this type is supported only in versions\n     * `>= 5.0`.\n     *\n     * @param {string} scriptCode\n     * @returns {Script} returns `this` so that calls can be chained.\n     */\n    source(scriptCode) {\n        this._checkMixedRepr();\n\n        this._body.source = scriptCode;\n        this._isTypeSet = true;\n        return this;\n    }\n\n    /**\n     * Specify the `stored` script by `id` which will be retrieved from cluster state.\n     *\n     * Note: This type was deprecated in elasticsearch v5.0. Use `id`\n     * instead if you are using elasticsearch `>= 5.0`.\n     *\n     * @param {string} scriptId The unique identifier for the stored script.\n     * @returns {Script} returns `this` so that calls can be chained.\n     */\n    stored(scriptId) {\n        this._checkMixedRepr();\n\n        this._body.stored = scriptId;\n        this._isTypeSet = true;\n        return this;\n    }\n\n    /**\n     * Specify the stored script to be used by it's `id` which will be retrieved\n     * from cluster state.\n     *\n     * Note: `id` is an alias for the `stored` type which was deprecated in\n     * elasticsearch v5.0. So this type is supported only in versions `>= 5.0`.\n     *\n     * @param {string} scriptId The unique identifier for the stored script.\n     * @returns {Script} returns `this` so that calls can be chained.\n     */\n    id(scriptId) {\n        this._checkMixedRepr();\n\n        this._body.id = scriptId;\n        this._isTypeSet = true;\n        return this;\n    }\n\n    /**\n     * Specify the `file` script by stored as a file in the scripts folder.\n     *\n     * Note: File scripts have been removed in elasticsearch 6.0. Instead, use\n     * stored scripts.\n     *\n     * @param {string} fileName The name of the script stored as a file in the scripts folder.\n     * For script file `config/scripts/calculate-score.groovy`,\n     * `fileName` should be `calculate-score`\n     * @returns {Script} returns `this` so that calls can be chained.\n     */\n    file(fileName) {\n        this._checkMixedRepr();\n\n        this._body.file = fileName;\n        this._isTypeSet = true;\n        return this;\n    }\n\n    /**\n     * Specifies the language the script is written in. Defaults to `painless` but\n     * may be set to any of languages listed in [Scripting](https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html).\n     * The default language may be changed in the `elasticsearch.yml` config file by setting\n     * `script.default_lang` to the appropriate language.\n     *\n     * For a `file` script,  it should correspond with the script file suffix.\n     * `groovy` for `config/scripts/calculate-score.groovy`.\n     *\n     * Note: The Groovy, JavaScript, and Python scripting languages were\n     * deprecated in elasticsearch 5.0 and removed in 6.0. Use painless instead.\n     *\n     * @param {string} lang The language for the script.\n     * @returns {Script} returns `this` so that calls can be chained.\n     */\n    lang(lang) {\n        this._body.lang = lang;\n        return this;\n    }\n\n    /**\n     * Specifies any named parameters that are passed into the script as variables.\n     *\n     * @param {Object} params Named parameters to be passed to script.\n     * @returns {Script} returns `this` so that calls can be chained.\n     */\n    params(params) {\n        this._body.params = params;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the `script`.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        // recursiveToJSON doesn't seem to be needed here\n        return this._body;\n    }\n}\n\nmodule.exports = Script;\n"
  },
  {
    "path": "src/core/search-template.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst { recursiveToJSON } = require('./util');\n\n/**\n * Class supporting the Elasticsearch search template API.\n *\n * The `/_search/template` endpoint allows to use the mustache language to\n * pre render search requests, before they are executed and fill existing\n * templates with template parameters.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html)\n *\n * @param {string=} type One of `inline`, `id`, `file`. `id` is also\n * aliased as `indexed`\n * @param {string|Object=} source Source of the search template.\n * This needs to be specified if optional argument `type` is passed.\n *\n * @example\n * const templ = esb.searchTemplate('inline', {\n *     query: esb.matchQuery('{{my_field}}', '{{my_value}}'),\n *     size: '{{my_size}}'\n * }).params({\n *     my_field: 'message',\n *     my_value: 'some message',\n *     my_size: 5\n * });\n *\n * @example\n * const templ = new esb.SearchTemplate(\n *     'inline',\n *     '{ \"query\": { \"terms\": {{#toJson}}statuses{{/toJson}} }}'\n * ).params({\n *     statuses: {\n *         status: ['pending', 'published']\n *     }\n * });\n *\n * @example\n * const templ = new esb.SearchTemplate(\n *     'inline',\n *     '{ \"query\": { \"bool\": { \"must\": {{#toJson}}clauses{{/toJson}} } } }'\n * ).params({\n *     clauses: [\n *         esb.termQuery('user', 'boo'),\n *         esb.termQuery('user', 'bar'),\n *         esb.termQuery('user', 'baz')\n *     ]\n * });\n */\nclass SearchTemplate {\n    // eslint-disable-next-line require-jsdoc\n    constructor(type, source) {\n        this._isTypeSet = false;\n        this._body = {};\n\n        if (!_.isNil(type) && !_.isNil(source)) {\n            const typeLower = type.toLowerCase();\n\n            if (\n                typeLower !== 'inline' &&\n                typeLower !== 'id' &&\n                typeLower !== 'indexed' && // alias for id\n                typeLower !== 'file'\n            ) {\n                throw new Error(\n                    '`type` must be one of `inline`, `id`, `indexed`, `file`'\n                );\n            }\n\n            this[typeLower](source);\n        }\n    }\n\n    /**\n     * Print warning message to console namespaced by class name.\n     *\n     * @param {string} msg\n     * @private\n     */\n    _warn(msg) {\n        console.warn(`[SearchTemplate] ${msg}`);\n    }\n\n    /**\n     * Print warning messages to not mix `SearchTemplate` source\n     *\n     * @private\n     */\n    _checkMixedRepr() {\n        if (this._isTypeSet) {\n            this._warn(\n                'Search template source(`inline`/`id`/`file`) was already specified!'\n            );\n            this._warn('Overwriting.');\n\n            delete this._body.file;\n            delete this._body.id;\n            delete this._body.file;\n        }\n    }\n\n    /**\n     * Helper method to set the type and source\n     *\n     * @param {string} type\n     * @param {*} source\n     * @returns {SearchTemplate} returns `this` so that calls can be chained.\n     * @private\n     */\n    _setSource(type, source) {\n        this._checkMixedRepr();\n\n        this._body[type] = source;\n        this._isTypeSet = true;\n        return this;\n    }\n\n    /**\n     * Sets the type of search template to be `inline` and specifies the\n     * template with `query` and other optional fields such as `size`.\n     *\n     * @param {string|Object} templ Either an object or a string.\n     * @returns {SearchTemplate} returns `this` so that calls can be chained.\n     */\n    inline(templ) {\n        return this._setSource('inline', templ);\n    }\n\n    /**\n     * Specify the indexed search template by `templateName` which will be\n     * retrieved from cluster state.\n     *\n     * @param {string} templId The unique identifier for the indexed template.\n     * @returns {SearchTemplate} returns `this` so that calls can be chained.\n     */\n    id(templId) {\n        return this._setSource('id', templId);\n    }\n\n    /**\n     * Specify the indexed search template by `templateName` which will be\n     * retrieved from cluster state.\n     *\n     * Alias for `SearchTemplate.id`\n     *\n     * @param {string} templId The unique identifier for the indexed template.\n     * @returns {SearchTemplate} returns `this` so that calls can be chained.\n     */\n    indexed(templId) {\n        return this.id(templId);\n    }\n\n    /**\n     * Specify the search template by filename stored in the scripts folder,\n     * with `mustache` extension.\n     *\n     * @example\n     * // `templId` - Name of the query template in config/scripts/, i.e.,\n     * // storedTemplate.mustache.\n     * const templ = new esb.SearchTemplate('file', 'storedTemplate').params({\n     *     query_string: 'search for these words'\n     * });\n     *\n     * @param {string} fileName The name of the search template stored as a file\n     * in the scripts folder.\n     * For file `config/scripts/storedTemplate.mustache`,\n     * `fileName` should be `storedTemplate`\n     * @returns {SearchTemplate} returns `this` so that calls can be chained.\n     */\n    file(fileName) {\n        return this._setSource('file', fileName);\n    }\n\n    /**\n     * Specifies any named parameters that are used to render the search template.\n     *\n     * @param {Object} params Named parameters to be used for rendering.\n     * @returns {SearchTemplate} returns `this` so that calls can be chained.\n     */\n    params(params) {\n        this._body.params = params;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the Search Template.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        return recursiveToJSON(this._body);\n    }\n}\n\nmodule.exports = SearchTemplate;\n"
  },
  {
    "path": "src/core/sort.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst Query = require('./query');\nconst Script = require('./script');\nconst { checkType, invalidParam, recursiveToJSON } = require('./util');\nconst { SORT_MODE_SET, UNIT_SET } = require('./consts');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-sort.html';\n\nconst invalidOrderParam = invalidParam(ES_REF_URL, 'order', \"'asc' or 'desc'\");\nconst invalidModeParam = invalidParam(ES_REF_URL, 'mode', SORT_MODE_SET);\nconst invalidDistanceTypeParam = invalidParam(\n    ES_REF_URL,\n    'distance_type',\n    \"'plane' or 'arc'\"\n);\nconst invalidUnitParam = invalidParam(ES_REF_URL, 'unit', UNIT_SET);\n\n/**\n * Allows creating and configuring sort on specified field.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-sort.html)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.termQuery('user', 'kimchy'))\n *     .sort(esb.sort('post_date', 'asc'))\n *\n * @param {string=} field The field to sort on.\n * If a script is used to specify the sort order, `field` should be omitted.\n * @param {string=} order The `order` option can have the following values.\n * `asc`, `desc` to sort in ascending, descending order respectively.\n */\nclass Sort {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, order) {\n        this._opts = {};\n        this._geoPoint = null;\n        this._script = null;\n\n        if (!_.isNil(field)) this._field = field;\n        if (!_.isNil(order)) this.order(order);\n    }\n\n    /**\n     * Set order for sorting. The order defaults to `desc` when sorting on the `_score`,\n     * and defaults to `asc` when sorting on anything else.\n     *\n     * @param {string} order The `order` option can have the following values.\n     * `asc`, `desc` to sort in ascending, descending order respectively.\n     * @returns {Sort} returns `this` so that calls can be chained.\n     */\n    order(order) {\n        if (_.isNil(order)) invalidOrderParam(order);\n\n        const orderLower = order.toLowerCase();\n        if (orderLower !== 'asc' && orderLower !== 'desc') {\n            invalidOrderParam(order);\n        }\n\n        this._opts.order = orderLower;\n        return this;\n    }\n\n    /**\n     * Elasticsearch supports sorting by array or multi-valued fields.\n     * The `mode` option controls what array value is picked for sorting the\n     * document it belongs to.\n     *\n     * The `mode` option can have the following values:\n     *\n     * - `min` - Pick the lowest value.\n     * - `max` - Pick the highest value.\n     * - `sum` - Use the sum of all values as sort value.\n     *   Only applicable for number based array fields.\n     * - `avg` - Use the average of all values as sort value.\n     *   Only applicable for number based array fields.\n     * - `median` - Use the median of all values as sort value.\n     *   Only applicable for number based array fields.\n     *\n     * @example\n     * const sort = esb.sort('price', 'asc').mode('avg');\n     *\n     * @param {string} mode One of `avg`, `min`, `max`, `sum` and `median`.\n     * @returns {Sort} returns `this` so that calls can be chained.\n     */\n    mode(mode) {\n        if (_.isNil(mode)) invalidModeParam(mode);\n\n        const modeLower = mode.toLowerCase();\n        if (!SORT_MODE_SET.has(modeLower)) {\n            invalidModeParam(mode);\n        }\n\n        this._opts.mode = modeLower;\n        return this;\n    }\n\n    /**\n     * Defines on which nested object to sort. The actual sort field must be a direct\n     * field inside this nested object. When sorting by nested field, this field\n     * is mandatory.\n     *\n     * Note: This method has been deprecated in elasticsearch 6.1. From 6.1 and\n     * later, use `nested` method instead.\n     *\n     * @example\n     * const sort = esb.sort('offer.price', 'asc')\n     *     .nestedPath('offer')\n     *     .nestedFilter(esb.termQuery('offer.color', 'blue'));\n     *\n     * @param {string} path Nested object to sort on\n     * @returns {Sort} returns `this` so that calls can be chained.\n     */\n    nestedPath(path) {\n        this._opts.nested_path = path;\n        return this;\n    }\n\n    /**\n     * A filter that the inner objects inside the nested path should match with in order\n     * for its field values to be taken into account by sorting. By default no\n     * `nested_filter` is active.\n     *\n     * Note: This method has been deprecated in elasticsearch 6.1. From 6.1 and\n     * later, use `nested` method instead.\n     *\n     * @example\n     * const sort = esb.sort('offer.price', 'asc')\n     *     .nestedPath('offer')\n     *     .nestedFilter(esb.termQuery('offer.color', 'blue'));\n     *\n     * @param {Query} filterQuery Filter query\n     * @returns {Sort} returns `this` so that calls can be chained.\n     * @throws {TypeError} If filter query is not an instance of `Query`\n     */\n    nestedFilter(filterQuery) {\n        checkType(filterQuery, Query);\n\n        this._opts.nested_filter = filterQuery;\n        return this;\n    }\n\n    /**\n     * Defines on which nested object to sort and the filter that the inner objects inside\n     * the nested path should match with in order for its field values to be taken into\n     * account by sorting\n     *\n     * Note: This method is incompatible with elasticsearch 6.0 and older.\n     * Use it only with elasticsearch 6.1 and later.\n     *\n     * @example\n     * const sort = esb.sort('offer.price', 'asc')\n     *     .nested({\n     *          path: 'offer',\n     *          filter: esb.termQuery('offer.color', 'blue')\n     *      });\n     *\n     * @param {Object} nested Nested config that contains path and filter\n     * @param {string} nested.path Nested object to sort on\n     * @param {Query} nested.filter Filter query\n     * @returns {Sort} returns `this` so that calls can be chained.\n     * @throws {TypeError} If filter query is not an instance of `Query`\n     */\n    nested(nested) {\n        const { filter } = nested;\n        if (!_.isNil(filter)) checkType(filter, Query);\n\n        this._opts.nested = nested;\n        return this;\n    }\n\n    /**\n     * The missing parameter specifies how docs which are missing the field should\n     * be treated: The missing value can be set to `_last`, `_first`, or a custom value\n     * (that will be used for missing docs as the sort value). The default is `_last`.\n     *\n     * @example\n     * const sort = esb.sort('price').missing('_last');\n     *\n     * @param {string|number} value\n     * @returns {Sort} returns `this` so that calls can be chained.\n     */\n    missing(value) {\n        this._opts.missing = value;\n        return this;\n    }\n\n    /**\n     * By default, the search request will fail if there is no mapping associated with\n     * a field. The `unmapped_type` option allows to ignore fields that have no mapping\n     * and not sort by them. The value of this parameter is used to determine what sort\n     * values to emit.\n     *\n     * @example\n     * const sort = esb.sort('price').unmappedType('long');\n     *\n     * @param {string} type\n     * @returns {Sort} returns `this` so that calls can be chained.\n     */\n    unmappedType(type) {\n        this._opts.unmapped_type = type;\n        return this;\n    }\n\n    /**\n     * Sorts documents by distance of the geo point field from reference point.\n     * If multiple reference points are specified, the final distance for a\n     * document will then be `min`/`max`/`avg` (defined via `mode`) distance of all\n     * points contained in the document to all points given in the sort request.\n     *\n     * @example\n     * const sort = esb.sort('pin.location', 'asc')\n     *     .geoDistance([-70, 40])\n     *     .unit('km')\n     *     .mode('min')\n     *     .distanceType('arc');\n     *\n     * @param {GeoPoint|Object|Array|string} geoPoint Reference point or array of\n     * points to calculate distance from. Can be expressed using the `GeoPoint` class,\n     * `Object` with `lat`, `lon` keys, as a string either `lat,lon` or geohash\n     * or as Array with GeoJSON format `[lon, lat]`\n     * @returns {Sort} returns `this` so that calls can be chained.\n     */\n    geoDistance(geoPoint) {\n        this._geoPoint = geoPoint;\n        return this;\n    }\n\n    /**\n     * Sets the distance calculation mode, `arc` or `plane`.\n     * The `arc` calculation is the more accurate.\n     * The `plane` is the faster but least accurate.\n     *\n     * @param {string} type\n     * @returns {Sort} returns `this` so that calls can be chained\n     * @throws {Error} If `type` is neither `plane` nor `arc`.\n     */\n    distanceType(type) {\n        if (_.isNil(type)) invalidDistanceTypeParam(type);\n\n        const typeLower = type.toLowerCase();\n        if (typeLower !== 'plane' && typeLower !== 'arc') {\n            invalidDistanceTypeParam(type);\n        }\n\n        this._opts.distance_type = typeLower;\n        return this;\n    }\n\n    /**\n     * Sets the distance unit.  Valid values are:\n     * mi (miles), in (inches), yd (yards),\n     * km (kilometers), cm (centimeters), mm (millimeters),\n     * ft(feet), NM(nauticalmiles)\n     *\n     * @param {string} unit Distance unit, default is `m`(meters).\n     * @returns {Sort} returns `this` so that calls can be chained\n     * @throws {Error} If Unit is outside the accepted set.\n     */\n    unit(unit) {\n        if (!UNIT_SET.has(unit)) {\n            invalidUnitParam(unit);\n        }\n\n        this._opts.unit = unit;\n        return this;\n    }\n\n    /**\n     * Sorts based on custom script. When sorting on a field, scores are not computed.\n     *\n     * @example\n     * const sort = esb.sort()\n     *    .type('number')\n     *    .script(\n     *        esb.script('inline', \"doc['field_name'].value * params.factor\")\n     *            .lang('painless')\n     *            .params({ factor: 1.1 })\n     *    )\n     *    .order('asc');\n     *\n     * @param {Script} script\n     * @returns {Sort} returns `this` so that calls can be chained\n     * @throws {TypeError} If `script` is not an instance of `Script`\n     */\n    script(script) {\n        checkType(script, Script);\n\n        this._script = script;\n        return this;\n    }\n\n    /**\n     * Sets the format of the date when sorting a date field.\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/7.17/mapping-date-format.html#built-in-date-formats)\n     *\n     * Note: The format argument is [supported since version 7.13](https://www.elastic.co/guide/en/elasticsearch/reference/7.13/release-notes-7.13.0.html) of ElasticSearch.\n     *\n     * @param {string} type\n     * @returns {Sort} returns `this` so that calls can be chained\n     */\n    type(type) {\n        this._opts.type = type;\n        return this;\n    }\n\n    /**\n     * Sets the format of the date when sorting a date field.\n     *\n     *  [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/7.17/mapping-date-format.html#built-in-date-formats)\n     *\n     * @param {string} fmt\n     * @returns {Sort} returns `this` so that calls can be chained\n     */\n    format(fmt) {\n        this._opts.format = fmt;\n        return this;\n    }\n\n    /**\n     * Reverse the sort order. Valid during sort types: field, geo distance, and script.\n     *\n     * @param {boolean} reverse If sort should be in reverse order.\n     * @returns {Sort} returns `this` so that calls can be chained\n     */\n    reverse(reverse) {\n        this._opts.reverse = reverse;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for `sort` parameter.\n     *\n     * @override\n     * @returns {Object|string} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        const geoPointIsNil = _.isNil(this._geoPoint);\n        const scriptIsNil = _.isNil(this._script);\n\n        if (geoPointIsNil && scriptIsNil) {\n            if (_.isEmpty(this._opts)) return this._field;\n\n            if (\n                Object.keys(this._opts).length === 1 &&\n                _.has(this._opts, 'order')\n            ) {\n                return { [this._field]: this._opts.order };\n            }\n        }\n\n        let repr;\n\n        // Should I pick only the accepted properties here?\n        if (!geoPointIsNil) {\n            repr = {\n                _geo_distance: Object.assign(\n                    { [this._field]: this._geoPoint },\n                    this._opts\n                )\n            };\n        } else if (!scriptIsNil) {\n            repr = {\n                _script: Object.assign({ script: this._script }, this._opts)\n            };\n        } else {\n            repr = { [this._field]: this._opts };\n        }\n\n        return recursiveToJSON(repr);\n    }\n}\n\nmodule.exports = Sort;\n"
  },
  {
    "path": "src/core/suggester.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\n/**\n * Base class implementation for all suggester types.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class should be extended and used, as validation against the class\n * type is present in various places.\n *\n * @param {string} suggesterType The type of suggester.\n * Can be one of `term`, `phrase`, `completion`\n * @param {string} name The name of the Suggester, an arbitrary identifier\n * @param {string=} field The field to fetch the candidate suggestions from.\n *\n * @throws {Error} if `name` is empty\n * @throws {Error} if `suggesterType` is empty\n */\nclass Suggester {\n    // eslint-disable-next-line require-jsdoc\n    constructor(suggesterType, name, field) {\n        if (_.isEmpty(suggesterType))\n            throw new Error('Suggester `suggesterType` cannot be empty');\n        if (_.isEmpty(name))\n            throw new Error('Suggester `name` cannot be empty');\n\n        this.name = name;\n        this.suggesterType = suggesterType;\n\n        this._body = {};\n        this._opts = this._body[name] = {};\n        this._suggestOpts = this._opts[suggesterType] = {};\n\n        if (!_.isNil(field)) this._suggestOpts.field = field;\n    }\n\n    /**\n     * Sets field to fetch the candidate suggestions from. This is a required option\n     * that either needs to be set globally or per suggestion.\n     *\n     * @param {string} field a valid field name\n     * @returns {Suggester} returns `this` so that calls can be chained\n     */\n    field(field) {\n        this._suggestOpts.field = field;\n        return this;\n    }\n\n    /**\n     * Sets the number of suggestions to return (defaults to `5`).\n     *\n     * @param {number} size\n     * @returns {Suggester} returns `this` so that calls can be chained.\n     */\n    size(size) {\n        this._suggestOpts.size = size;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the `suggester`\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch DSL\n     */\n    toJSON() {\n        return this._body;\n    }\n}\n\nmodule.exports = Suggester;\n"
  },
  {
    "path": "src/core/util.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst inspect = require('./inspect');\n\n/**\n * Check if the object is instance of class type\n *\n * @private\n * @param {Object} instance\n * @param {Class} type\n * @throws {TypeError} Object must be an instance of class type\n */\nexports.checkType = function checkType(instance, type) {\n    if (!(instance instanceof type)) {\n        if (_.isNil(instance)) {\n            console.warn(\n                `Was expecting instance of ${type.name} but got ${instance}!`\n            );\n        } else\n            console.warn(\n                `${inspect(instance)} is of the type ${typeof instance}`\n            );\n\n        throw new TypeError(`Argument must be an instance of ${type.name}`);\n    }\n};\n\n/**\n * Wrapper for calling constructor with given parameters\n *\n * @private\n * @param {function(new:T, ...*)} Cls The class constructor.\n * @returns {function(...*): T} Wrapper of the class constructor which creates an instance of given Class\n * @template T\n */\nexports.constructorWrapper = function constructorWrapper(Cls) {\n    return (...args) => new Cls(...args);\n};\n\n/**\n * Check if the number is in the given range.\n * Returns `true` is number is less than or equal to min, max.\n *\n * @private\n * @param {number} num\n * @param {number} min\n * @param {number} max\n * @returns {boolean} `true` if in range, `false` otherwise\n */\nfunction between(num, min, max) {\n    return num >= min && num <= max;\n}\n\n/**\n * Finds and returns the first position of first digit in string\n *\n * @private\n * @param {string} str\n * @returns {number} Index of first digit in string.\n * `-1` if digit is not found in string\n */\nexports.firstDigitPos = function firstDigitPos(str) {\n    if (_.isEmpty(str)) return -1;\n\n    const len = str.length;\n    for (let idx = 0; idx < len; idx++) {\n        // '0'.charCodeAt(0) => 48\n        // '9'.charCodeAt(0) => 57\n        if (between(str.charCodeAt(idx), 48, 57)) return idx;\n    }\n\n    return -1;\n};\n\n/**\n * Convert class object to JSON by recursively calling `toJSON` on the class members.\n *\n * @private\n * @param {*} obj\n * @returns {Object} JSON representation of class.\n */\nexports.recursiveToJSON = function recursiveToJSON(obj) {\n    // Each element in array needs to be recursively JSONified\n    if (Array.isArray(obj)) return obj.map(x => recursiveToJSON(x));\n\n    // Strings, numbers, booleans\n    if (!_.isObject(obj)) return obj;\n\n    // If it is a native object, we'll not get anything different by calling toJSON\n    // If it is a custom object, toJSON needs to be called\n    // Custom object toJSON might return any datatype\n    // So let us handle it recursively\n    if (_.hasIn(obj, 'toJSON') && obj.constructor !== Object) {\n        return recursiveToJSON(obj.toJSON());\n    }\n\n    // Custom object toJSON or native object might have values which need to be JSONified\n    const json = {};\n    for (const key of Object.keys(obj)) {\n        json[key] = recursiveToJSON(obj[key]);\n    }\n\n    return json;\n};\n\n/**\n * Helper function for creating function which will log warning and throw error\n * on receiving invalid parameter\n *\n * @private\n * @param {string} refUrl\n * @param {string} paramName\n * @param {*} validValues\n * @returns {function}\n */\nexports.invalidParam = function invalidParam(refUrl, paramName, validValues) {\n    return (paramVal, referenceUrl = refUrl) => {\n        referenceUrl && console.log(`See ${referenceUrl}`);\n        console.warn(`Got '${paramName}' - '${paramVal}'`);\n\n        const validValuesStr = _.isString(validValues)\n            ? validValues\n            : inspect(validValues);\n        throw new Error(\n            `The '${paramName}' parameter should be one of ${validValuesStr}`\n        );\n    };\n};\n\n/**\n * Set given default value on object if key is not present.\n *\n * @private\n * @param {Object} obj\n * @param {string} key\n * @param {*} value\n * @returns {boolean} `true` if the given object did not have `key` and `false` otherwise.\n */\nexports.setDefault = function setDefault(obj, key, value) {\n    const itHasNot = !_.has(obj, key);\n    if (itHasNot) obj[key] = value;\n    return itHasNot;\n};\n"
  },
  {
    "path": "src/index.d.ts",
    "content": "// Type definitions for elastic-builder\n// Project: https://elastic-builder.js.org\n// Definitions by: Suhas Karanth <sudo.suhas@gmail.com>\n\nexport = esb;\n\ndeclare namespace esb {\n    /**\n     * The `RequestBodySearch` object provides methods generating an elasticsearch\n     * search request body. The search request can be executed with a search DSL,\n     * which includes the Query DSL, within its body.\n     */\n    export class RequestBodySearch {\n        /**\n         * Define query on the search request body using the Query DSL.\n         *\n         * @param {Query} query\n         */\n        query(query: Query): this;\n\n        /**\n         * Sets knn on the request body.\n         *\n         * @param {KNN|KNN[]} knn\n         */\n        kNN(knn: KNN | KNN[]): this;\n\n        /**\n         * Sets aggregation on the request body.\n         * Alias for method `aggregation`\n         *\n         * @param {Aggregation} agg Any valid `Aggregation`\n         * @throws {TypeError} If `agg` is not an instance of `Aggregation`\n         */\n        agg(agg: Aggregation): this;\n\n        /**\n         * Sets aggregation on the request body.\n         *\n         * @param {Aggregation} agg Any valid `Aggregation`\n         * @throws {TypeError} If `agg` is not an instance of `Aggregation`\n         */\n        aggregation(agg: Aggregation): this;\n\n        /**\n         * Sets multiple aggregation items on the request body.\n         * Alias for method `aggregations`\n         *\n         * @param {Array<Aggregation>} aggs Array of valid `Aggregation` items\n         * @throws {TypeError} If `aggs` is not an instance of `Array`\n         * @throws {TypeError} If `aggs` contains instances not of type `Aggregation`\n         */\n        aggs(aggs: Aggregation[]): this;\n\n        /**\n         * Sets multiple aggregation items on the request body.\n         *\n         * @param {Array<Aggregation>} aggs Array of valid `Aggregation` items\n         * @throws {TypeError} If `aggs` is not an instance of `Array`\n         * @throws {TypeError} If `aggs` contains instances not of type `Aggregation`\n         */\n        aggregations(aggs: Aggregation[]): this;\n\n        /**\n         * Sets suggester on the request body.\n         *\n         * @param {Suggester} suggest Any valid `Suggester`\n         * @throws {TypeError} If `suggest` is not an instance of `Suggester`\n         */\n        suggest(suggest: Suggester): this;\n\n        /**\n         * Sets the global suggest text to avoid repetition for multiple suggestions.\n         *\n         * @param {string} txt Global suggest text\n         */\n        suggestText(txt: string): this;\n\n        /**\n         * Sets a search timeout, bounding the search request to be executed within\n         * the specified time value and bail with the hits accumulated up to that\n         * point when expired.\n         *\n         * @param {string} timeout Duration can be specified using time units.\n         * Defaults to no timeout.\n         */\n        timeout(timeout: string): this;\n\n        /**\n         * To retrieve hits from a certain offset.\n         *\n         * @param {number} from Defaults to 0.\n         */\n        from(from: number): this;\n\n        /**\n         * The number of hits to return. If you do not care about getting some hits back\n         * but only about the number of matches and/or aggregations, setting the value\n         * to 0 will help performance.\n         *\n         * @param {number} size Defaults to 10.\n         */\n        size(size: number): this;\n\n        /**\n         * The maximum number of documents to collect for each shard, upon reaching which\n         * the query execution will terminate early. If set, the response will have a\n         * boolean field `terminated_early` to indicate whether the query execution has\n         * actually terminated early.\n         *\n         * @param {number} numberOfDocs Maximum number of documents to collect for each shard.\n         * Defaults to no limit.\n         */\n        terminateAfter(numberOfDocs: number): this;\n\n        /**\n         * Allows to add sort on specific field. The sort can be reversed as well.\n         * The sort is defined on a per field level, with special field name for `_score` to\n         * sort by score, and `_doc` to sort by index order.\n         *\n         * @param {Sort} sort\n         * @throws {TypeError} If parameter `sort` is not an instance of `Sort`.\n         */\n        sort(sort: Sort): this;\n\n        /**\n         * Allows to add multiple sort on specific fields. Each sort can be reversed as well.\n         * The sort is defined on a per field level, with special field name for _score to\n         * sort by score, and _doc to sort by index order.\n         *\n         * @param {Array<Sort>} sorts Arry of sort\n         * @throws {TypeError} If any item in parameter `sorts` is not an instance of `Sort`.\n         */\n        sorts(sorts: Sort[]): this;\n\n        /**\n         * When sorting on a field, scores are not computed. By setting `track_scores` to true,\n         * scores will still be computed and tracked.\n         *\n         * @param {boolean} enable\n         */\n        trackScores(enable: boolean): this;\n\n        /**\n         * The `track_total_hits` parameter allows you to control how the total number of hits\n         * should be tracked. Passing `false` can increase performance in some situations.\n         * (Added in elasticsearch@7)\n         *\n         * Pass true, false, or the upper limit of hits you want tracked.\n         *\n         * @param {boolean|number} enable\n         */\n        trackTotalHits(enable: boolean | number): this;\n\n        /**\n         * Allows to control how the `_source` field is returned with every hit.\n         * You can turn off `_source` retrieval by passing `false`.\n         * It also accepts one(string) or more wildcard(array) patterns to control\n         * what parts of the `_source` should be returned\n         * An object can also be used to specify the wildcard patterns for `includes` and `excludes`.\n         *\n         * @param {boolean|string|Array|Object} source\n         */\n        source(source: boolean | string | string[] | object): this;\n\n        /**\n         * The `stored_fields` parameter is about fields that are explicitly marked as stored in the mapping.\n         * Selectively load specific stored fields for each document represented by a search hit\n         * using array of stored fields.\n         * An empty array will cause only the `_id` and `_type` for each hit to be returned.\n         * To disable the stored fields (and metadata fields) entirely use: `_none_`\n         *\n         * @param {Array|string} fields\n         */\n        storedFields(fields: object | string): this;\n\n\n\n        /**\n        * Computes a document property dynamically based on the supplied `runtimeField`.\n        *\n        * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/runtime-search-request.html)\n        *\n        * Added in Elasticsearch v7.11.0\n        * [Release note](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/release-notes-7.11.0.html)\n        *\n        * @example\n        * const reqBody = esb.requestBodySearch()\n        *     .query(esb.matchAllQuery())\n        *     .runtimeMapping(\n        *       'sessionId-name',\n        *       esb.runtimeField(\n        *         'keyword',\n        *         `emit(doc['session_id'].value + '::' + doc['name'].value)`\n        *       )\n        *     )\n        *\n        * @example\n        * // runtime fields can also be used in query aggregation\n        * const reqBody = esb.requestBodySearch()\n        *     .query(esb.matchAllQuery())\n        *     .runtimeMapping(\n        *       'sessionId-eventName',\n        *       esb.runtimeField(\n        *         'keyword',\n        *         `emit(doc['session_id'].value + '::' + doc['eventName'].value)`,\n        *       )\n        *     )\n        *     .agg(esb.cardinalityAggregation('uniqueCount', `sessionId-eventName`)),;\n        *\n        * @param {string} runtimeFieldName Name for the computed runtime mapping field.\n        * @param {RuntimeField} runtimeField Instance of RuntimeField\n        *\n        * @returns {RequestBodySearch} returns `this` so that calls can be chained\n        *\n        */\n        runtimeMapping(runtimeFieldName: string, runtimeField: RuntimeField): this;\n\n\n        /**\n        * Computes one or more document properties dynamically based on supplied `RuntimeField`s.\n        *\n        * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/runtime-search-request.html)\n        *\n        * Added in Elasticsearch v7.11.0\n        * [Release note](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/release-notes-7.11.0.html)\n        *\n        * @example\n        * const fieldA = esb.runtimeField(\n        *       'keyword',\n        *       `emit(doc['session_id'].value + '::' + doc['name'].value)`\n        * );\n        * const reqBody = esb.requestBodySearch()\n        *     .query(esb.matchAllQuery())\n        *     .runtimeMappings({\n        *       'sessionId-name': fieldA,\n        *     })\n        *\n        * @param {Object} runtimeMappings Object with `runtimeFieldName` as key and instance of `RuntimeField` as the value.\n        * @returns {RequestBodySearch} returns `this` so that calls can be chained\n        */\n        runtimeMappings(runtimeMappings: object): this;\n\n        /**\n         * Computes a document property dynamically based on the supplied `Script`.\n         *\n         * @param {string} scriptFieldName\n         * @param {string|Script} script string or instance of `Script`\n         */\n        scriptField(scriptFieldName: string, script: string | Script): this;\n\n        /**\n         * Sets given dynamic document properties to be computed using supplied `Script`s.\n         * Object should have `scriptFieldName` as key and `script` as the value.\n         *\n         * @param {object} scriptFields Object with `scriptFieldName` as key and `script` as the value.\n         */\n        scriptFields(scriptFields: object): this;\n\n        /**\n         * Allows to return the doc value representation of a field for each hit.\n         * Doc value fields can work on fields that are not stored.\n         *\n         * @param {Array<string>} fields\n         */\n        docvalueFields(fields: string[]): this;\n\n        /**\n         * The `post_filter` is applied to the search hits at the very end of a search request,\n         * after aggregations have already been calculated.\n         *\n         * @param {Query} filterQuery The filter to be applied after aggregation.\n         */\n        postFilter(filterQuery: Query): this;\n\n        /**\n         * Allows to highlight search results on one or more fields. The implementation\n         * uses either the lucene `plain` highlighter, the fast vector highlighter (`fvh`)\n         * or `postings` highlighter.\n         *\n         * Note: The `postings` highlighter has been removed in elasticsearch 6.0. The `unified`\n         * highlighter outputs the same highlighting when `index_options` is set to `offsets`.\n         *\n         * @param {Highlight} highlight\n         */\n        highlight(highlight: Highlight): this;\n\n        /**\n         * Rescoring can help to improve precision by reordering just the top (eg 100 - 500)\n         * documents returned by the `query` and `post_filter` phases, using a secondary\n         * (usually more costly) algorithm, instead of applying the costly algorithm to\n         * all documents in the index.\n         *\n         * @param {Rescore} rescore\n         * @throws {TypeError} If `query` is not an instance of `Rescore`\n         */\n        rescore(rescore: Rescore): this;\n\n        /**\n         * Enables explanation for each hit on how its score was computed.\n         *\n         * @param {boolean} enable\n         */\n        explain(enable: boolean): this;\n\n        /**\n         * Returns a version for each search hit.\n         *\n         * @param {boolean} enable\n         */\n        version(enable: boolean): this;\n\n        /**\n         * Allows to configure different boost level per index when searching across\n         * more than one indices. This is very handy when hits coming from one index\n         * matter more than hits coming from another index.\n         * Alias for method `indicesBoost`.\n         *\n         * @param {string} index Index windcard expression or alias\n         * @param {number} boost\n         */\n        indexBoost(index: string, boost: number): this;\n\n        /**\n         * Allows to configure different boost level per index when searching across\n         * more than one indices. This is very handy when hits coming from one index\n         * matter more than hits coming from another index.\n         *\n         * @param {string} index Index windcard expression or alias\n         * @param {number} boost\n         */\n        indicesBoost(index: string, boost: number): this;\n\n        /**\n         * Exclude documents which have a `_score` less than the minimum specified in `min_score`.\n         *\n         * @param {number} score\n         */\n        minScore(score: number): this;\n\n        /**\n         * Allows to collapse search results based on field values. The collapsing\n         * is done by selecting only the top sorted document per collapse key.\n         * The field used for collapsing must be a single valued `keyword` or `numeric`\n         * field with `doc_values` activated\n         *\n         * @param {string} field\n         * @param {InnerHits=} innerHits Allows to expand each collapsed top hits.\n         * @param {number=} maxConcurrentGroupRequests The number of concurrent\n         * requests allowed to retrieve the inner_hits' per group\n         * @throws {TypeError} If `innerHits` is not an instance of `InnerHits`\n         */\n        collapse(\n            field: string,\n            innerHits?: InnerHits,\n            maxConcurrentGroupRequests?: number\n        ): this;\n\n        /**\n         * Allows to use the results from the previous page to help the retrieval\n         * of the next page. The `search_after` parameter provides a live cursor.\n         * The parameter `from` must be set to `0` (or `-1`) when `search_after` is used.\n         *\n         * @param {Array<*>} values The `sort values` of the last document to retrieve\n         * the next page of results\n         */\n        searchAfter(values: any[]): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the request body search\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * The `RequestBodySearch` object provides methods generating an elasticsearch\n     * search request body. The search request can be executed with a search DSL,\n     * which includes the Query DSL, within its body.\n     */\n    export function requestBodySearch(): RequestBodySearch;\n\n    /**\n     * Base class implementation for all query types.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class should be extended and used, as validation against the class\n     * type is present in various places.\n     *\n     * @param {string} queryType\n     */\n    export class Query {\n        constructor(queryType: string);\n\n        /**\n         * Sets the boost value for documents matching the `Query`.\n         *\n         * @param {number} factor\n         */\n        boost(factor: number): this;\n\n        /**\n         * Sets the query name.\n         *\n         * @param {string} name\n         */\n        name(name: string): this;\n\n        /**\n         * Build and returns DSL representation of the `Query` class instance.\n         *\n         */\n        getDSL(): object;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the `query`\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * The most simple query, which matches all documents, giving them all a `_score` of `1.0`.\n     *\n     * @extends Query\n     */\n    export class MatchAllQuery extends Query {\n        constructor();\n    }\n\n    /**\n     * The most simple query, which matches all documents, giving them all a `_score` of `1.0`.\n     */\n    export function matchAllQuery(): MatchAllQuery;\n\n    /**\n     * The inverse of the `match_all` query, which matches no documents.\n     *\n     * @extends Query\n     */\n    export class MatchNoneQuery extends Query {\n        constructor();\n    }\n\n    /**\n     * The inverse of the `match_all` query, which matches no documents.\n     */\n    export function matchNoneQuery(): MatchNoneQuery;\n\n    /**\n     * The `FullTextQueryBase` provides support for common options used across\n     * various full text query implementations.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} queryType\n     * @param {string=} queryString The query string\n     * @extends Query\n     */\n    class FullTextQueryBase extends Query {\n        constructor(queryType: string, queryString?: string);\n\n        /**\n         * Set the analyzer to control which analyzer will perform the analysis process on the text\n         *\n         * @param {string} analyzer\n         */\n        analyzer(analyzer: string): this;\n\n        /**\n         * Sets the value controlling how many \"should\" clauses in the resulting boolean\n         * query should match. It can be an absolute value (2), a percentage (30%)\n         * or a combination of both. For Common Terms Query when specifying different\n         * `minimum_should_match` for low and high frequency terms, an object with the\n         * keys `low_freq` and `high_freq` can be used.\n         *\n         * @param {string|number|Object} minimumShouldMatch\n         * Note: Object notation can only be used with Common Terms Query.\n         */\n        minimumShouldMatch(minimumShouldMatch: string | number | object): this;\n\n        /**\n         * Sets the query string.\n         *\n         * @param {string} queryString\n         */\n        query(queryString: string): this;\n    }\n\n    /**\n     * The `MonoFieldQueryBase` provides support for common options used across\n     * various full text query implementations with single search field.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} queryType\n     * @param {string=} field The document field to query against\n     * @param {string=} queryString The query string\n     * @extends FullTextQueryBase\n     */\n    class MonoFieldQueryBase extends FullTextQueryBase {\n        constructor(queryType: string, field?: string, queryString?: string);\n\n        /**\n         * Sets the field to search on.\n         *\n         * @param {string} field\n         */\n        field(field: string): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation of the Full text query\n         * class instance.\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * `match` query accepts text/numerics/dates, analyzes them, and constructs a query.\n     *\n     * @param {string=} field The document field to query against\n     * @param {string=} queryString The query string\n     * @extends MonoFieldQueryBase\n     */\n    export class MatchQuery extends MonoFieldQueryBase {\n        constructor(field?: string, queryString?: string);\n\n        /**\n         * The operator to be used in the boolean query which is constructed\n         * by analyzing the text provided. The `operator` flag can be set to `or` or\n         * `and` to control the boolean clauses (defaults to `or`).\n         *\n         * @param {string} operator Can be `and`/`or`. Default is `or`.\n         */\n        operator(operator: 'and' | 'or'): this;\n\n        /**\n         * Sets the `lenient` parameter which allows to ignore exceptions caused\n         * by data-type mismatches such as trying to query a numeric field with a\n         * text query string when set to `true`.\n         *\n         * @param {boolean} enable Defaules to `false`\n         */\n        lenient(enable: boolean): this;\n\n        /**\n         * Sets the `fuzziness` parameter which is interpreted as a Levenshtein Edit Distance —\n         * the number of one character changes that need to be made to one string to make it\n         * the same as another string.\n         *\n         * @param {number|string} factor Can be specified either as a number, or the maximum\n         * number of edits, or as `AUTO` which generates an edit distance based on the length\n         * of the term.\n         */\n        fuzziness(factor: number | string): this;\n\n        /**\n         * Sets the prefix length for a fuzzy prefix `MatchQuery`\n         *\n         * @param {number} len\n         */\n        prefixLength(len: number): this;\n\n        /**\n         * Sets the max expansions for a fuzzy prefix `MatchQuery`\n         *\n         * @param {number} limit\n         */\n        maxExpansions(limit: number): this;\n\n        /**\n         * Sets the rewrite method. Valid values are:\n         * - `constant_score` - tries to pick the best constant-score rewrite\n         *  method based on term and document counts from the query.\n         *  Synonyms - `constant_score_auto`, `constant_score_filter`\n         * - `scoring_boolean` - translates each term into boolean should and\n         *  keeps the scores as computed by the query\n         * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n         *  are computed.\n         * - `constant_score_filter` - first creates a private Filter, by visiting\n         *  each term in sequence and marking all docs for that term\n         * - `top_terms_boost_N` - first translates each term into boolean should\n         *  and scores are only computed as the boost using the top N\n         *  scoring terms. Replace N with an integer value.\n         * - `top_terms_N` - first translates each term into boolean should\n         *  and keeps the scores as computed by the query. Only the top N\n         *  scoring terms are used. Replace N with an integer value.\n         * Default is `constant_score`.\n         * This is an advanced option, use with care.\n         *\n         * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n         * `constant_score_filter` (synonyms for `constant_score`) have been removed in\n         * elasticsearch 6.0.\n         *\n         * @param {string} method The rewrite method as a string.\n         * @throws {Error} If the given `rewrite` method is not valid.\n         */\n        rewrite(method: string): this;\n\n        /**\n         * Sets the fuzzy rewrite method. Valid values are:\n         * - `constant_score` - tries to pick the best constant-score rewrite\n         *  method based on term and document counts from the query.\n         *  Synonyms - `constant_score_auto`, `constant_score_filter`\n         * - `scoring_boolean` - translates each term into boolean should and\n         *  keeps the scores as computed by the query\n         * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n         *  are computed.\n         * - `constant_score_filter` - first creates a private Filter, by visiting\n         *  each term in sequence and marking all docs for that term\n         * - `top_terms_boost_N` - first translates each term into boolean should\n         *  and scores are only computed as the boost using the top N\n         *  scoring terms. Replace N with an integer value.\n         * - `top_terms_N` - first translates each term into boolean should\n         *  and keeps the scores as computed by the query. Only the top N\n         *  scoring terms are used. Replace N with an integer value.\n         * Default is `constant_score`.\n         * This is an advanced option, use with care.\n         *\n         * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n         * `constant_score_filter` (synonyms for `constant_score`) have been removed in\n         * elasticsearch 6.0.\n         *\n         * @param {string} method The rewrite method as a string.\n         * @throws {Error} If the given `fuzzy_rewrite` method is not valid.\n         */\n        fuzzyRewrite(method: string): this;\n\n        /**\n         * Fuzzy transpositions (`ab` → `ba`) are allowed by default but can be disabled\n         * by setting `fuzzy_transpositions` to false.\n         *\n         * @param {boolean} enable\n         */\n        fuzzyTranspositions(enable: boolean): this;\n\n        /**\n         * If the analyzer used removes all tokens in a query like a `stop` filter does,\n         * the default behavior is to match no documents at all. In order to change that\n         * the `zero_terms_query` option can be used, which accepts `none` (default) and `all`\n         * which corresponds to a `match_all` query.\n         *\n         * @param {string} behavior A no match action, `all` or `none`. Default is `none`.\n         */\n        zeroTermsQuery(behavior: 'all' | 'none'): this;\n\n        /**\n         * Allows specifying an absolute or relative document frequency where high frequency\n         * terms are moved into an optional subquery and are only scored if one of the\n         * low frequency (below the cutoff) terms in the case of an `or` operator or\n         * all of the low frequency terms in the case of an `and` operator match.\n         *\n         * @param {number} frequency It can either be relative to the total number of documents\n         * if in the range `[0..1)` or absolute if greater or equal to `1.0`.\n         */\n        cutoffFrequency(frequency: number): this;\n    }\n\n    /**\n     * `match` query accepts text/numerics/dates, analyzes them, and constructs a query.\n     *\n     * @param {string=} field The document field to query against\n     * @param {string=} queryString The query string\n     */\n    export function matchQuery(\n        field?: string,\n        queryString?: string\n    ): MatchQuery;\n\n    /**\n     * The `MatchPhraseQueryBase` provides support for common options used across\n     * various bucket match phrase query implementations.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} queryType\n     * @param {string} refUrl\n     * @param {string=} field The document field to query against\n     * @param {string=} queryString The query string\n     * @extends MonoFieldQueryBase\n     */\n    class MatchPhraseQueryBase extends MonoFieldQueryBase {\n        constructor(\n            queryType: string,\n            refUrl: string,\n            field?: string,\n            queryString?: string\n        );\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on `MatchPhraseQueryBase`\n         */\n        minimumShouldMatch(): never;\n\n        /**\n         * Configures the `slop`(default is 0) for matching terms in any order.\n         * Transposed terms have a slop of 2.\n         * @param {number} slop A positive integer value, defaults is 0.\n         */\n        slop(slop: number): this;\n    }\n\n    /**\n     * The `match_phrase` query analyzes the text and creates a `phrase` query out of\n     * the analyzed text.\n     *\n     * @param {string=} field The document field to query against\n     * @param {string=} queryString The query string\n     * @extends MatchPhraseQueryBase\n     */\n    export class MatchPhraseQuery extends MatchPhraseQueryBase {\n        constructor(field?: string, queryString?: string);\n    }\n\n    /**\n     * The `match_phrase` query analyzes the text and creates a `phrase` query out of\n     * the analyzed text.\n     *\n     * @param {string=} field The document field to query against\n     * @param {string=} queryString The query string\n     */\n    export function matchPhraseQuery(\n        field?: string,\n        queryString?: string\n    ): MatchPhraseQuery;\n\n    /**\n     * @param {string=} field The document field to query against\n     * @param {string=} queryString The query string\n     * @extends MatchPhraseQueryBase\n     */\n    export class MatchPhrasePrefixQuery extends MatchPhraseQueryBase {\n        constructor(field?: string, queryString?: string);\n\n        /**\n         * Control to how many prefixes the last term will be expanded.\n         *\n         * @param {number} limit Defaults to 50.\n         */\n        maxExpansions(limit: number): this;\n    }\n\n    /**\n     * @param {string=} field The document field to query against\n     * @param {string=} queryString The query string\n     */\n    export function matchPhrasePrefixQuery(\n        field?: string,\n        queryString?: string\n    ): MatchPhrasePrefixQuery;\n\n    /**\n     * A `MultiMatchQuery` query builds further on top of the\n     * `MultiMatchQuery` by allowing multiple fields to be specified.\n     * The idea here is to allow to more easily build a concise match type query\n     * over multiple fields instead of using a relatively more expressive query\n     * by using multiple match queries within a bool query.\n     *\n     * @param {Array<string>|string=} fields The fields to be queried\n     * @param {string=} queryString The query string\n     * @extends FullTextQueryBase\n     */\n    export class MultiMatchQuery extends FullTextQueryBase {\n        constructor(fields?: string[] | string, queryString?: string);\n\n        /**\n         * Appends given field to the list of fields to search against.\n         * Fields can be specified with wildcards.\n         * Individual fields can be boosted with the caret (^) notation.\n         * Example - `\"subject^3\"`\n         *\n         * @param {string} field One of the fields to be queried\n         */\n        field(field: string): this;\n\n        /**\n         * Appends given fields to the list of fields to search against.\n         * Fields can be specified with wildcards.\n         * Individual fields can be boosted with the caret (^) notation.\n         *\n         * @param {Array<string>} fields The fields to be queried\n         */\n        fields(fields: string[]): this;\n\n        /**\n         * Sets the type of multi match query. Valid values are:\n         * - `best_fields` - (default) Finds documents which match any field,\n         * but uses the `_score` from the best field.\n         * - `most_fields` - Finds documents which match any field and combines\n         * the `_score` from each field.\n         * - `cross_fields` - Treats fields with the same `analyzer` as though\n         * they were one big field. Looks for each word in *any* field\n         * - `phrase` - Runs a `match_phrase` query on each field and combines\n         * the `_score` from each field.\n         * - `phrase_prefix` - Runs a `match_phrase_prefix` query on each field\n         * and combines the `_score` from each field.\n         * - `bool_prefix` - (added in v7.2) Creates a match_bool_prefix query on each field and\n         * combines the _score from each field.\n         *\n         * @param {string} type Can be one of `best_fields`, `most_fields`,\n         * `cross_fields`, `phrase`, `phrase_prefix` and `bool_prefix`. Default is `best_fields`.\n         */\n        type(\n            type:\n                | 'best_fields'\n                | 'most_fields'\n                | 'cross_fields'\n                | 'phrase'\n                | 'phrase_prefix'\n                | 'bool_prefix'\n        ): this;\n\n        /**\n         * The tie breaker value. The tie breaker capability allows results\n         * that include the same term in multiple fields to be judged better than\n         * results that include this term in only the best of those multiple\n         * fields, without confusing this with the better case of two different\n         * terms in the multiple fields. Default: `0.0`.\n         *\n         * @param {number} factor\n         */\n        tieBreaker(factor: number): this;\n\n        /**\n         * The operator to be used in the boolean query which is constructed\n         * by analyzing the text provided. The `operator` flag can be set to `or` or\n         * `and` to control the boolean clauses (defaults to `or`).\n         *\n         * @param {string} operator Can be `and`/`or`. Default is `or`.\n         */\n        operator(operator: 'and' | 'or'): this;\n\n        /**\n         * Sets the `lenient` parameter which allows to ignore exceptions caused\n         * by data-type mismatches such as trying to query a numeric field with a\n         * text query string when set to `true`.\n         *\n         * @param {boolean} enable Defaules to `false`\n         */\n        lenient(enable: boolean): this;\n\n        /**\n         * Configures the `slop`(default is 0) for matching terms in any order.\n         * Transposed terms have a slop of 2.\n         *\n         * @param {number} slop A positive integer value, defaults is 0.\n         */\n        slop(slop: number): this;\n\n        /**\n         * Sets the `fuzziness` parameter which is interpreted as a Levenshtein Edit Distance —\n         * the number of one character changes that need to be made to one string to make it\n         * the same as another string.\n         * The `fuzziness` parameter cannot be used with the `phrase`, `phrase_prefix`\n         * or `cross_fields` type.\n         *\n         * @param {number|string} factor Can be specified either as a number, or the maximum\n         * number of edits, or as `AUTO` which generates an edit distance based on the length\n         * of the term.\n         */\n        fuzziness(factor: number | string): this;\n\n        /**\n         * Sets the prefix length for a fuzzy prefix `MultiMatchQuery`\n         *\n         * @param {number} len\n         */\n        prefixLength(len: number): this;\n\n        /**\n         * Sets the max expansions for a fuzzy prefix `MultiMatchQuery`\n         *\n         * @param {number} limit\n         */\n        maxExpansions(limit: number): this;\n\n        /**\n         * Sets the rewrite method. Valid values are:\n         * - `constant_score` - tries to pick the best constant-score rewrite\n         *  method based on term and document counts from the query.\n         *  Synonyms - `constant_score_auto`, `constant_score_filter`\n         * - `scoring_boolean` - translates each term into boolean should and\n         *  keeps the scores as computed by the query\n         * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n         *  are computed.\n         * - `constant_score_filter` - first creates a private Filter, by visiting\n         *  each term in sequence and marking all docs for that term\n         * - `top_terms_boost_N` - first translates each term into boolean should\n         *  and scores are only computed as the boost using the top N\n         *  scoring terms. Replace N with an integer value.\n         * - `top_terms_N` - first translates each term into boolean should\n         *  and keeps the scores as computed by the query. Only the top N\n         *  scoring terms are used. Replace N with an integer value.\n         *\n         * Default is `constant_score`.\n         * This is an advanced option, use with care.\n         *\n         * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n         * `constant_score_filter` (synonyms for `constant_score`) have been removed in\n         * elasticsearch 6.0.\n         *\n         * @param {string} method The rewrite method as a string.\n         * @throws {Error} If the given `rewrite` method is not valid.\n         */\n        rewrite(method: string): this;\n\n        /**\n         * Sets the fuzzy rewrite method. Valid values are:\n         * - `constant_score` - tries to pick the best constant-score rewrite\n         *  method based on term and document counts from the query.\n         *  Synonyms - `constant_score_auto`, `constant_score_filter`\n         * - `scoring_boolean` - translates each term into boolean should and\n         *  keeps the scores as computed by the query\n         * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n         *  are computed.\n         * - `constant_score_filter` - first creates a private Filter, by visiting\n         *  each term in sequence and marking all docs for that term\n         * - `top_terms_boost_N` - first translates each term into boolean should\n         *  and scores are only computed as the boost using the top N\n         *  scoring terms. Replace N with an integer value.\n         * - `top_terms_N` - first translates each term into boolean should\n         *  and keeps the scores as computed by the query. Only the top N\n         *  scoring terms are used. Replace N with an integer value.\n         *\n         * Default is `constant_score`.\n         * This is an advanced option, use with care.\n         *\n         * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n         * `constant_score_filter` (synonyms for `constant_score`) have been removed in\n         * elasticsearch 6.0.\n         *\n         * @param {string} method The rewrite method as a string.\n         * @throws {Error} If the given `fuzzy_rewrite` method is not valid.\n         */\n        fuzzyRewrite(method: string): this;\n\n        /**\n         * If the analyzer used removes all tokens in a query like a `stop` filter does,\n         * the default behavior is to match no documents at all. In order to change that\n         * the `zero_terms_query` option can be used, which accepts `none` (default) and `all`\n         * which corresponds to a `match_all` query.\n         *\n         * @param {string} behavior A no match action, `all` or `none`. Default is `none`.\n         */\n        zeroTermsQuery(behavior: 'all' | 'none'): this;\n\n        /**\n         * Allows specifying an absolute or relative document frequency where high frequency\n         * terms are moved into an optional subquery and are only scored if one of the\n         * low frequency (below the cutoff) terms in the case of an `or` operator or\n         * all of the low frequency terms in the case of an `and` operator match.\n         *\n         * @param {number} frequency It can either be relative to the total number of documents\n         * if in the range `[0..1)` or absolute if greater or equal to `1.0`.\n         */\n        cutoffFrequency(frequency: number): this;\n    }\n\n    /**\n     * A `MultiMatchQuery` query builds further on top of the\n     * `MultiMatchQuery` by allowing multiple fields to be specified.\n     * The idea here is to allow to more easily build a concise match type query\n     * over multiple fields instead of using a relatively more expressive query\n     * by using multiple match queries within a bool query.\n     *\n     * @param {Array<string>|string=} fields The fields to be queried\n     * @param {string=} queryString The query string\n     */\n    export function multiMatchQuery(\n        fields?: string[] | string,\n        queryString?: string\n    ): MultiMatchQuery;\n\n    /**\n     * The `common` terms query is a modern alternative to stopwords which\n     * improves the precision and recall of search results (by taking\n     * stopwords into account), without sacrificing performance.\n     *\n     * @param {string=} field The document field to query against\n     * @param {string=} queryString The query string\n     * @extends MonoFieldQueryBase\n     */\n    export class CommonTermsQuery extends MonoFieldQueryBase {\n        constructor(field?: string, queryString?: string);\n\n        /**\n         * Allows specifying an absolute or relative document frequency where high frequency\n         * terms are moved into an optional subquery and are only scored if one of the\n         * low frequency (below the cutoff) terms in the case of an `or` operator or\n         * all of the low frequency terms in the case of an `and` operator match.\n         *\n         * @param {number} frequency It can either be relative to the total number of documents\n         * if in the range `[0..1)` or absolute if greater or equal to `1.0`.\n         */\n        cutoffFrequency(frequency: number): this;\n\n        /**\n         * The operator to be used on low frequency terms in the boolean query\n         * which is constructed by analyzing the text provided. The `operator` flag\n         * can be set to `or` or `and` to control the boolean clauses (defaults to `or`).\n         *\n         * @param {string} operator Can be `and`/`or`. Default is `or`.\n         */\n        lowFreqOperator(operator: 'and' | 'or'): this;\n\n        /**\n         * The operator to be used on high frequency terms in the boolean query\n         * which is constructed by analyzing the text provided. The `operator` flag\n         * can be set to `or` or `and` to control the boolean clauses (defaults to `or`).\n         *\n         * @param {string} operator Can be `and`/`or`. Default is `or`.\n         */\n        highFreqOperator(operator: 'and' | 'or'): this;\n\n        /**\n         * Sets the value controlling how many \"should\" clauses in the resulting boolean\n         * query should match for low frequency terms. It can be an absolute value (2),\n         * a percentage (30%) or a combination of both.\n         *\n         * @param {string|number} lowFreqMinMatch\n         */\n        lowFreq(lowFreqMinMatch: string | number): this;\n\n        /**\n         * Sets the value controlling how many \"should\" clauses in the resulting boolean\n         * query should match for high frequency terms. It can be an absolute value (2),\n         * a percentage (30%) or a combination of both.\n         *\n         * @param {string|number} highFreqMinMatch\n         */\n        highFreq(highFreqMinMatch: string | number): this;\n\n        /**\n         * Enables or disables similarity coordinate scoring of documents\n         * commoning the `CommonTermsQuery`. Default: `false`.\n         *\n         * NOTE: This has been removed in elasticsearch 6.0. If provided,\n         * it will be ignored and a deprecation warning will be issued.\n         *\n         * @param {boolean} enable\n         */\n        disableCoord(enable: boolean): this;\n    }\n\n    /**\n     * The `common` terms query is a modern alternative to stopwords which\n     * improves the precision and recall of search results (by taking\n     * stopwords into account), without sacrificing performance.\n     *\n     * @param {string=} field The document field to query against\n     * @param {string=} queryString The query string\n     */\n    export function commonTermsQuery(\n        field?: string,\n        queryString?: string\n    ): CommonTermsQuery;\n\n    /**\n     * The `QueryStringQueryBase` provides support for common options used across\n     * full text query implementations `QueryStringQuery` and `SimpleQueryStringQuery`.\n     * A query that uses a query parser in order to parse its content.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} queryType\n     * @param {string} refUrl\n     * @param {string=} queryString The actual query to be parsed.\n     * @extends FullTextQueryBase\n     */\n    class QueryStringQueryBase extends FullTextQueryBase {\n        constructor(queryType: string, refUrl: string, queryString?: string);\n\n        /**\n         * Appends given field to the list of fields to search against.\n         * Fields can be specified with wildcards.\n         * Individual fields can be boosted with the caret (^) notation.\n         * Example - `\"subject^3\"`\n         *\n         * @param {string} field One of the fields to be queried\n         */\n        field(field: string): this;\n\n        /**\n         * Appends given fields to the list of fields to search against.\n         * Fields can be specified with wildcards.\n         * Individual fields can be boosted with the caret (^) notation.\n         * Example - `[ \"subject^3\", \"message\" ]`\n         *\n         * @param {Array<string>} fields The fields to be queried\n         */\n        fields(fields: string[]): this;\n\n        /**\n         * The default operator used if no explicit operator is specified.\n         * For example, with a default operator of `OR`, the query `capital of Hungary`\n         * is translated to `capital OR of OR Hungary`, and with default operator of AND,\n         * the same query is translated to `capital AND of AND Hungary`.\n         * The default value is OR.\n         *\n         * @param {string} operator Can be `AND`/`OR`. Default is `OR`.\n         */\n        defaultOperator(operator: 'AND' | 'OR'): this;\n\n        /**\n         * By default, wildcards terms in a query string are not analyzed.\n         * By setting this value to `true`, a best effort will be made to analyze those as well.\n         *\n         * @param {boolean} enable\n         */\n        analyzeWildcard(enable: boolean): this;\n\n        /**\n         * Sets the `lenient` parameter which allows to ignore exceptions caused\n         * by data-type mismatches such as trying to query a numeric field with a\n         * text query string when set to `true`.\n         *\n         * @param {boolean} enable Defaules to `false`\n         */\n        lenient(enable: boolean): this;\n\n        /**\n         * A suffix to append to fields for quoted parts of the query string.\n         * This allows to use a field that has a different analysis chain for exact matching.\n         *\n         * @param {string} suffix\n         */\n        quoteFieldSuffix(suffix: string): this;\n\n        /**\n         * Perform the query on all fields detected in the mapping that can be queried.\n         * Will be used by default when the `_all` field is disabled and\n         * no `default_field` is specified (either in the index settings or\n         * in the request body) and no `fields` are specified.\n         *\n         * @param {boolean} enable\n         */\n        allFields(enable: boolean): this;\n    }\n\n    /**\n     * A query that uses a query parser in order to parse its content.\n     *\n     * @param {string=} queryString The actual query to be parsed.\n     * @extends QueryStringQueryBase\n     */\n    export class QueryStringQuery extends QueryStringQueryBase {\n        constructor(queryString?: string);\n\n        /**\n         * The default field for query terms if no prefix field is specified.\n         * Defaults to the `index.query.default_field` index settings, which\n         * in turn defaults to `_all`.\n         *\n         * @param {string} field\n         */\n        defaultField(field: string): this;\n\n        /**\n         * When set, `*` or `?` are allowed as the first character. Defaults to `true`.\n         *\n         * @param {boolean} enable\n         */\n        allowLeadingWildcard(enable: boolean): this;\n\n        /**\n         * Set to true to enable position increments in result queries. Defaults to true.\n         *\n         * @param {boolean} enable\n         */\n        enablePositionIncrements(enable: boolean): this;\n\n        /**\n         * Controls the number of terms fuzzy queries will expand to. Defaults to `50`.\n         *\n         * @param {number} limit\n         */\n        fuzzyMaxExpansions(limit: number): this;\n\n        /**\n         * Sets the `fuzziness` parameter which is interpreted as a Levenshtein Edit Distance —\n         * the number of one character changes that need to be made to one string to make it\n         * the same as another string. Defaults to `AUTO`.\n         *\n         * @param {number|string} factor Can be specified either as a number, or the maximum\n         * number of edits, or as `AUTO` which generates an edit distance based on the length\n         * of the term. Defaults to `AUTO`.\n         */\n        fuzziness(factor: number | string): this;\n\n        /**\n         * Set the prefix length for fuzzy queries. Default is `0`.\n         *\n         * @param {number} len\n         */\n        fuzzyPrefixLength(len: number): this;\n\n        /**\n         * Sets the rewrite method. Valid values are:\n         * - `constant_score` - tries to pick the best constant-score rewrite\n         *  method based on term and document counts from the query.\n         *  Synonyms - `constant_score_auto`, `constant_score_filter`\n         * - `scoring_boolean` - translates each term into boolean should and\n         *  keeps the scores as computed by the query\n         * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n         *  are computed.\n         * - `constant_score_filter` - first creates a private Filter, by visiting\n         *  each term in sequence and marking all docs for that term\n         * - `top_terms_boost_N` - first translates each term into boolean should\n         *  and scores are only computed as the boost using the top N\n         *  scoring terms. Replace N with an integer value.\n         * - `top_terms_N` - first translates each term into boolean should\n         *  and keeps the scores as computed by the query. Only the top N\n         *  scoring terms are used. Replace N with an integer value.\n         *\n         * Default is `constant_score`.\n         * This is an advanced option, use with care.\n         *\n         * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n         * `constant_score_filter` (synonyms for `constant_score`) have been removed in\n         * elasticsearch 6.0.\n         *\n         * @param {string} method The rewrite method as a string.\n         * @throws {Error} If the given `rewrite` method is not valid.\n         */\n        rewrite(method: string): this;\n\n        /**\n         * Sets the fuzzy rewrite method. Valid values are:\n         * - `constant_score` - tries to pick the best constant-score rewrite\n         *  method based on term and document counts from the query.\n         *  Synonyms - `constant_score_auto`, `constant_score_filter`\n         * - `scoring_boolean` - translates each term into boolean should and\n         *  keeps the scores as computed by the query\n         * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n         *  are computed.\n         * - `constant_score_filter` - first creates a private Filter, by visiting\n         *  each term in sequence and marking all docs for that term\n         * - `top_terms_boost_N` - first translates each term into boolean should\n         *  and scores are only computed as the boost using the top N\n         *  scoring terms. Replace N with an integer value.\n         * - `top_terms_N` - first translates each term into boolean should\n         *  and keeps the scores as computed by the query. Only the top N\n         *  scoring terms are used. Replace N with an integer value.\n         *\n         * Default is `constant_score`.\n         * This is an advanced option, use with care.\n         *\n         * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n         * `constant_score_filter` (synonyms for `constant_score`) have been removed in\n         * elasticsearch 6.0.\n         *\n         * @param {string} method The rewrite method as a string.\n         * @throws {Error} If the given `fuzzy_rewrite` method is not valid.\n         */\n        fuzzyRewrite(method: string): this;\n\n        /**\n         * Sets the default slop for phrases. If zero, then exact phrase matches are required.\n         * Default value is 0.\n         *\n         * @param {number} slop A positive integer value, defaults is 0.\n         */\n        phraseSlop(slop: number): this;\n\n        /**\n         * Auto generate phrase queries. Defaults to `false`.\n         *\n         * Note: This parameter has been removed in elasticsearch 6.0. If provided, it will be\n         * ignored and issue a deprecation warning.\n         *\n         * @param {boolean} enable\n         */\n        autoGeneratePhraseQueries(enable: boolean): this;\n\n        /**\n         * Limit on how many automaton states regexp queries are allowed to create.\n         * This protects against too-difficult (e.g. exponentially hard) regexps.\n         * Defaults to 10000.\n         *\n         * @param {number} limit\n         */\n        maxDeterminizedStates(limit: number): this;\n\n        /**\n         * Time Zone to be applied to any range query related to dates.\n         *\n         * @param {string} zone\n         */\n        timeZone(zone: string): this;\n\n        /**\n         * Whether query text should be split on whitespace prior to analysis.\n         * Instead the queryparser would parse around only real operators.\n         * Default is `false`. It is not allowed to set this option to `false`\n         * if `auto_generate_phrase_queries` is already set to `true`.\n         *\n         * Note: This parameter has been removed in elasticsearch 6.0. If provided, it will be\n         * ignored and issue a deprecation warning. The `query_string` query now splits on operator\n         * only.\n         *\n         * @param {string} enable\n         */\n        splitOnWhitespace(enable: string): this;\n\n        /**\n         * Should the queries be combined using `dis_max` (set it to `true`),\n         * or a bool query (set it to `false`). Defaults to `true`.\n         *\n         * Note: This parameter has been removed in elasticsearch 6.0. If provided, it will be\n         * ignored and issue a deprecation warning. The `tie_breaker` parameter must be used\n         * instead.\n         *\n         * @param {boolean} enable\n         */\n        useDisMax(enable: boolean): this;\n\n        /**\n         * When using `dis_max`, the disjunction max tie breaker. Defaults to `0`.\n         *\n         * @param {number} factor\n         */\n        tieBreaker(factor: number): this;\n\n        /**\n         * Sets the quote analyzer name used to analyze the `query`\n         * when in quoted text.\n         *\n         * @param {string} analyzer A valid analyzer name.\n         */\n        quoteAnalyzer(analyzer: string): this;\n\n        /**\n         * If they query string should be escaped or not.\n         *\n         * @param {boolean} enable\n         */\n        escape(enable: boolean): this;\n    }\n\n    /**\n     * A query that uses a query parser in order to parse its content.\n     *\n     * @param {string=} queryString The actual query to be parsed.\n     */\n    export function queryStringQuery(queryString?: string): QueryStringQuery;\n\n    /**\n     * A query that uses the `SimpleQueryParser` to parse its context.\n     * Unlike the regular `query_string` query, the `simple_query_string` query\n     * will never throw an exception, and discards invalid parts of the query.\n     *\n     * @param {string=} queryString The query string\n     * @extends QueryStringQueryBase\n     */\n    export class SimpleQueryStringQuery extends QueryStringQueryBase {\n        constructor(queryString?: string);\n\n        /**\n         * `simple_query_string` support multiple flags to specify which parsing features\n         * should be enabled. It is specified as a `|`-delimited string.\n         *\n         * @param {string} flags `|` delimited string. The available flags are: `ALL`, `NONE`,\n         * `AND`, `OR`, `NOT`, `PREFIX`, `PHRASE`, `PRECEDENCE`, `ESCAPE`, `WHITESPACE`,\n         * `FUZZY`, `NEAR`, and `SLOP`.\n         */\n        flags(flags: string): this;\n    }\n\n    /**\n     * A query that uses the `SimpleQueryParser` to parse its context.\n     * Unlike the regular `query_string` query, the `simple_query_string` query\n     * will never throw an exception, and discards invalid parts of the query.\n     *\n     * @param {string=} queryString The query string\n     */\n    export function simpleQueryStringQuery(\n        queryString?: string\n    ): SimpleQueryStringQuery;\n\n    /**\n     * The `combined_fields` query supports searching multiple text fields as if\n     * their contents had been indexed into one combined field. It takes a term-centric\n     * view of the query: first it analyzes the query string to produce individual terms,\n     * then looks for each term in any of the fields.\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-combined-fields-query.html)\n     *\n     * NOTE: This query was added in elasticsearch v7.13.\n     *\n     * @param {Array<string>|string=} fields The fields to be queried\n     * @param {string=} queryString The query string\n     * @extends FullTextQueryBase\n     */\n    export class CombinedFieldsQuery extends FullTextQueryBase {\n        constructor(fields?: string[] | string, queryString?: string);\n\n        /**\n         * Appends given field to the list of fields to search against.\n         * Fields can be specified with wildcards.\n         * Individual fields can be boosted with the caret (^) notation.\n         * Example - `\"subject^3\"`\n         *\n         * @param {string} field One of the fields to be queried\n         * @returns {CombinedFieldsQuery} returns `this` so that calls can be chained.\n         */\n        field(field: string): this;\n\n        /**\n         * Appends given fields to the list of fields to search against.\n         * Fields can be specified with wildcards.\n         * Individual fields can be boosted with the caret (^) notation.\n         *\n         * @example\n         * // Boost individual fields with caret `^` notation\n         * const qry = esb.combinedFieldsQuery(['subject^3', 'message'], 'this is a test');\n         *\n         * @example\n         * // Specify fields with wildcards\n         * const qry = esb.combinedFieldsQuery(['title', '*_name'], 'Will Smith');\n         *\n         * @param {Array<string>} fields The fields to be queried\n         * @returns {CombinedFieldsQuery} returns `this` so that calls can be chained.\n         */\n        fields(fields: string[]): this;\n\n        /**\n         * If true, match phrase queries are automatically created for multi-term synonyms.\n         *\n         * @param {boolean} enable Defaults to `true`\n         * @returns {CombinedFieldsQuery} returns `this` so that calls can be chained.\n         */\n        autoGenerateSynonymsPhraseQuery(enable: boolean): this;\n\n        /**\n         * The operator to be used in the boolean query which is constructed\n         * by analyzing the text provided. The `operator` flag can be set to `or` or\n         * `and` to control the boolean clauses (defaults to `or`).\n         *\n         * @param {string} operator Can be `and`/`or`. Default is `or`.\n         * @returns {CombinedFieldsQuery} returns `this` so that calls can be chained.\n         */\n        operator(operator: 'and' | 'or'): this;\n\n        /**\n         * If the analyzer used removes all tokens in a query like a `stop` filter does,\n         * the default behavior is to match no documents at all. In order to change that\n         * the `zero_terms_query` option can be used, which accepts `none` (default) and `all`\n         * which corresponds to a `match_all` query.\n         *\n         * @example\n         * const qry = esb.combinedFieldsQuery('message', 'to be or not to be')\n         *     .operator('and')\n         *     .zeroTermsQuery('all');\n         *\n         * @param {string} behavior A no match action, `all` or `none`. Default is `none`.\n         * @returns {CombinedFieldsQuery} returns `this` so that calls can be chained.\n         */\n        zeroTermsQuery(behavior: 'all' | 'none'): this;\n    }\n\n    /**\n     * The `combined_fields` query supports searching multiple text fields as if\n     * their contents had been indexed into one combined field. It takes a term-centric\n     * view of the query: first it analyzes the query string to produce individual terms,\n     * then looks for each term in any of the fields.\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-combined-fields-query.html)\n     *\n     * NOTE: This query was added in elasticsearch v7.13.\n     *\n     * @example\n     * const qry = esb.combinedFieldsQuery(['subject', 'message'], 'this is a test');\n     *\n     * @param {Array<string>|string=} fields The fields to be queried\n     * @param {string=} queryString The query string\n     */\n    export function combinedFieldsQuery(\n        fields?: string[] | string,\n        queryString?: string\n    ): CombinedFieldsQuery;\n\n    /**\n     * The `ValueTermQueryBase` provides support for common options used across\n     * various term level query implementations.\n     *\n     * @param {string} queryType\n     * @param {string=} field The document field to query against\n     * @param {string=} value The query string\n     * @extends Query\n     */\n    class ValueTermQueryBase extends Query {\n        constructor(queryType: string, field?: string, value?: string);\n\n        /**\n         * Sets the field to search on.\n         *\n         * @param {string} field\n         */\n        field(field: string): this;\n\n        /**\n         * Sets the query string.\n         *\n         * @param {string|number|boolean} queryVal\n         */\n        value(queryVal: string | number | boolean): this;\n\n        /**\n         * Allows ASCII case insensitive matching of the value with the indexed\n         * field values when set to true.\n         *\n         * NOTE: Only available in Elasticsearch v7.10.0+\n         *\n         * @param enable\n         */\n        caseInsensitive(enable: boolean): this;\n    }\n\n    /**\n     * The `term` query finds documents that contain the *exact* term specified\n     * in the inverted index.\n     *\n     * @param {string=} field\n     * @param {string|number|boolean=} queryVal\n     * @extends ValueTermQueryBase\n     */\n    export class TermQuery extends ValueTermQueryBase {\n        constructor(field?: string, queryVal?: string | number | boolean);\n    }\n\n    /**\n     * The `term` query finds documents that contain the *exact* term specified\n     * in the inverted index.\n     *\n     * @param {string=} field\n     * @param {string|number|boolean=} queryVal\n     */\n    export function termQuery(\n        field?: string,\n        queryVal?: string | number | boolean\n    ): TermQuery;\n\n    /**\n     * Filters documents that have fields that match any of the provided terms (**not analyzed**).\n     *\n     * @param {string=} field\n     * @param {Array|string|number|boolean=} values\n     * @extends Query\n     */\n    export class TermsQuery extends Query {\n        constructor(\n            field?: string,\n            values?: string[] | string | number | boolean\n        );\n\n        /**\n         * Sets the field to search on.\n         *\n         * @param {string} field\n         */\n        field(field: string): this;\n\n        /**\n         * Append given value to list of values to run Terms Query with.\n         *\n         * @param {string|number|boolean} value\n         */\n        value(value: string | number | boolean): this;\n\n        /**\n         * Specifies the values to run query for.\n         *\n         * @param {Array<string|number|boolean>} values Values to run query for.\n         * @throws {TypeError} If `values` is not an instance of Array\n         */\n        values(values: string[] | number[] | boolean[]): this;\n\n        /**\n         * Convenience method for setting term lookup options.\n         * Valid options are `index`, `type`, `id`, `path`and `routing`\n         *\n         * @param {object} lookupOpts An object with any of the keys `index`,\n         * `type`, `id`, `path` and `routing`.\n         */\n        termsLookup(lookupOpts: object): this;\n\n        /**\n         * The index to fetch the term values from. Defaults to the current index.\n         *\n         * Note: The `index` parameter in the terms filter, used to look up terms in a dedicated\n         * index is mandatory in elasticsearch 6.0. Previously, the index defaulted to the index\n         * the query was executed on. In 6.0, this index must be explicitly set in the request.\n         *\n         * @param {string} idx The index to fetch the term values from.\n         * Defaults to the current index.\n         */\n        index(idx: string): this;\n\n        /**\n         * The type to fetch the term values from.\n         *\n         * @param {string} type\n         */\n        type(type: string): this;\n\n        /**\n         * The id of the document to fetch the term values from.\n         *\n         * @param {string} id\n         */\n        id(id: string): this;\n\n        /**\n         * The field specified as path to fetch the actual values for the `terms` filter.\n         *\n         * @param {string} path\n         */\n        path(path: string): this;\n\n        /**\n         * A custom routing value to be used when retrieving the external terms doc.\n         *\n         * @param {string} routing\n         */\n        routing(routing: string): this;\n    }\n\n    /**\n     * Filters documents that have fields that match any of the provided terms (**not analyzed**).\n     *\n     * @param {string=} field\n     * @param {Array<string|number|boolean>|string|number|boolean=} values\n     */\n    export function termsQuery(\n        field?: string,\n        values?: string[] | number[] | boolean[] | string | number | boolean\n    ): TermsQuery;\n\n    /**\n     * Returns any documents that match with at least one or more of the provided\n     * terms. The terms are not analyzed and thus must match exactly. The number of\n     * terms that must match varies per document and is either controlled by a\n     * minimum should match field or computed per document in a minimum should match\n     * script.\n     *\n     * NOTE: This query was added in elasticsearch v6.1.\n     *\n     * @param {string=} field\n     * @param {Array<string|number|boolean>|string|number=} terms\n     *\n     * @extends Query\n     */\n    export class TermsSetQuery extends Query {\n        constructor(\n            field?: string,\n            terms?: string[] | number[] | boolean[] | string | number\n        );\n\n        /**\n         * Sets the field to search on.\n         *\n         * @param {string} field\n         */\n        field(field: string): this;\n\n        /**\n         * Append given term to set of terms to run Terms Set Query with.\n         *\n         * @param {string|number|boolean} term\n         */\n        term(term: string | number | boolean): this;\n\n        /**\n         * Specifies the terms to run query for.\n         *\n         * @param {Array<string|number|boolean>} terms Terms set to run query for.\n         * @throws {TypeError} If `terms` is not an instance of Array\n         */\n        terms(terms: string[] | number[] | boolean[]): this;\n\n        /**\n         * Controls the number of terms that must match per document.\n         *\n         * @param {string} fieldName\n         */\n        minimumShouldMatchField(fieldName: string): this;\n\n        /**\n         * Sets the `script` for query. It controls how many terms are required to\n         * match in a more dynamic way.\n         *\n         * The `params.num_terms` parameter is available in the script to indicate\n         * the number of terms that have been specified.\n         *\n         * @param {Script|string|Object} script\n         * @returns {ScriptQuery} returns `this` so that calls can be chained.\n         */\n        minimumShouldMatchScript(script: Script | string | object): this;\n    }\n\n    /**\n     * Returns any documents that match with at least one or more of the provided\n     * terms. The terms are not analyzed and thus must match exactly. The number of\n     * terms that must match varies per document and is either controlled by a\n     * minimum should match field or computed per document in a minimum should match\n     * script.\n     *\n     * NOTE: This query was added in elasticsearch v6.1.\n     *\n     * @param {string=} field\n     * @param {Array|string|number=} terms\n     */\n    export function termsSetQuery(\n        field?: string,\n        terms?: string[] | number[] | boolean[] | string | number\n    ): TermsSetQuery;\n\n    /**\n     * Interface-like class used to group and identify various implementations of\n     * multi term queries:\n     * - Wildcard Query\n     * - Fuzzy Query\n     * - Prefix Query\n     * - Range Query\n     * - Regexp Query\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     * @extends ValueTermQueryBase\n     */\n    class MultiTermQueryBase extends ValueTermQueryBase {}\n\n    /**\n     * Matches documents with fields that have terms within a certain range.\n     *\n     * @extends MultiTermQueryBase\n     */\n    export class RangeQuery extends MultiTermQueryBase {\n        constructor(field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on RangeQuery\n         */\n        value(): never;\n\n        /**\n         * Greater-than or equal to\n         *\n         * @param {string|number} val\n         */\n        gte(val: string | number): this;\n\n        /**\n         * Less-than or equal to\n         *\n         * @param {string|number} val\n         */\n        lte(val: string | number): this;\n\n        /**\n         * Greater-than\n         *\n         * @param {string|number} val\n         */\n        gt(val: string | number): this;\n\n        /**\n         * Less-than\n         *\n         * @param {string|number} val\n         */\n        lt(val: string | number): this;\n\n        /**\n         * The lower bound. Defaults to start from the first.\n         *\n         * @param {string|number} val The lower bound value, type depends on field type\n         */\n        from(val: string | number): this;\n\n        /**\n         * The upper bound. Defaults to unbounded.\n         *\n         * @param {string|number} val The upper bound value, type depends on field type\n         */\n        to(val: string | number): this;\n\n        /**\n         * Should the first from (if set) be inclusive or not. Defaults to `true`\n         *\n         * @param {boolean} enable `true` to include, `false` to exclude\n         */\n        includeLower(enable: boolean): this;\n\n        /**\n         * Should the last to (if set) be inclusive or not. Defaults to `true`.\n         *\n         * @param {boolean} enable `true` to include, `false` to exclude\n         */\n        includeUpper(enable: boolean): this;\n\n        /**\n         * Time Zone to be applied to any range query related to dates.\n         *\n         * @param {string} zone\n         */\n        timeZone(zone: string): this;\n\n        /**\n         * Sets the format expression for parsing the upper and lower bounds.\n         * If no format is specified, then it will use the first format specified in the field mapping.\n         *\n         * @param {string} fmt Format for parsing upper and lower bounds.\n         */\n        format(fmt: string): this;\n\n        /**\n         * Sets the relationship between Query and indexed data\n         * that will be used to determine if a Document should be matched or not.\n         *\n         * @param {string} relation Can be one of `WITHIN`, `CONTAINS`, `DISJOINT`\n         * or `INTERSECTS`(default)\n         */\n        relation(\n            relation: 'WITHIN' | 'CONTAINS' | 'DISJOINT' | 'INTERSECTS'\n        ): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation of the `range` query\n         * class instance.\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * Matches documents with fields that have terms within a certain range.\n     */\n    export function rangeQuery(field?: string): RangeQuery;\n\n    /**\n     * Returns documents that have at least one non-`null` value in the original field\n     *\n     * @param {string=} field\n     * @extends Query\n     */\n    export class ExistsQuery extends Query {\n        constructor(field?: string);\n\n        /**\n         * Sets the field to search on.\n         *\n         * @param {string} field\n         */\n        field(field: string): this;\n    }\n\n    /**\n     * Returns documents that have at least one non-`null` value in the original field\n     *\n     * @param {string=} field\n     */\n    export function existsQuery(field?: string): ExistsQuery;\n\n    /**\n     * Matches documents that have fields containing terms with a specified prefix (**not analyzed**).\n     *\n     * @param {string=} field\n     * @param {string|number=} value\n     * @extends MultiTermQueryBase\n     */\n    export class PrefixQuery extends MultiTermQueryBase {\n        constructor(field?: string, value?: string | number);\n\n        /**\n         * Sets the rewrite method. Valid values are:\n         * - `constant_score` - tries to pick the best constant-score rewrite\n         *  method based on term and document counts from the query.\n         *  Synonyms - `constant_score_auto`, `constant_score_filter`\n         * - `scoring_boolean` - translates each term into boolean should and\n         *  keeps the scores as computed by the query\n         * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n         *  are computed.\n         * - `constant_score_filter` - first creates a private Filter, by visiting\n         *  each term in sequence and marking all docs for that term\n         * - `top_terms_boost_N` - first translates each term into boolean should\n         *  and scores are only computed as the boost using the top N\n         *  scoring terms. Replace N with an integer value.\n         * - `top_terms_N` - first translates each term into boolean should\n         *  and keeps the scores as computed by the query. Only the top N\n         *  scoring terms are used. Replace N with an integer value.\n         *\n         * Default is `constant_score`.\n         * This is an advanced option, use with care.\n         *\n         * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n         * `constant_score_filter` (synonyms for `constant_score`) have been removed in\n         * elasticsearch 6.0.\n         *\n         * @param {string} method The rewrite method as a string.\n         * @throws {Error} If the given `rewrite` method is not valid.\n         */\n        rewrite(method: string): this;\n    }\n\n    /**\n     * Matches documents that have fields containing terms with a specified prefix (**not analyzed**).\n     *\n     * @param {string=} field\n     * @param {string|number=} value\n     */\n    export function prefixQuery(\n        field?: string,\n        value?: string | number\n    ): PrefixQuery;\n\n    /**\n     * Matches documents that have fields matching a wildcard expression (**not analyzed**).\n     *\n     * @param {string=} field\n     * @param {string=} value\n     * @extends MultiTermQueryBase\n     */\n    export class WildcardQuery extends MultiTermQueryBase {\n        constructor(field?: string, value?: string);\n\n        /**\n         * Allow case insensitive matching or not (added in 7.10.0).\n         * Defaults to false.\n         *\n         * @param {boolean} caseInsensitive\n         */\n        caseInsensitive(caseInsensitive: boolean): this;\n\n        /**\n         * Sets the rewrite method. Valid values are:\n         * - `constant_score` - tries to pick the best constant-score rewrite\n         *  method based on term and document counts from the query.\n         *  Synonyms - `constant_score_auto`, `constant_score_filter`\n         * - `scoring_boolean` - translates each term into boolean should and\n         *  keeps the scores as computed by the query\n         * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n         *  are computed.\n         * - `constant_score_filter` - first creates a private Filter, by visiting\n         *  each term in sequence and marking all docs for that term\n         * - `top_terms_boost_N` - first translates each term into boolean should\n         *  and scores are only computed as the boost using the top N\n         *  scoring terms. Replace N with an integer value.\n         * - `top_terms_N` - first translates each term into boolean should\n         *  and keeps the scores as computed by the query. Only the top N\n         *  scoring terms are used. Replace N with an integer value.\n         *\n         * Default is `constant_score`.\n         * This is an advanced option, use with care.\n         *\n         * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n         * `constant_score_filter` (synonyms for `constant_score`) have been removed in\n         * elasticsearch 6.0.\n         *\n         * @param {string} method The rewrite method as a string.\n         * @throws {Error} If the given `rewrite` method is not valid.\n         */\n        rewrite(method: string): this;\n    }\n\n    /**\n     * Matches documents that have fields matching a wildcard expression (**not analyzed**).\n     *\n     * @param {string=} field\n     * @param {string=} value\n     */\n    export function wildcardQuery(\n        field?: string,\n        value?: string\n    ): WildcardQuery;\n\n    /**\n     * Query for regular expression term queries. Elasticsearch will apply the regexp\n     * to the terms produced by the tokenizer for that field, and not to the original\n     * text of the field.\n     *\n     * @param {string=} field\n     * @param {string|number=} value\n     * @extends MultiTermQueryBase\n     */\n    export class RegexpQuery extends MultiTermQueryBase {\n        constructor(field?: string, value?: string | number);\n\n        /**\n         * Set special flags. Possible flags are `ALL` (default),\n         * `ANYSTRING`, `COMPLEMENT`, `EMPTY`, `INTERSECTION`, `INTERVAL`, or `NONE`.\n         *\n         * @param {string} flags `|` separated flags. Possible flags are `ALL` (default),\n         * `ANYSTRING`, `COMPLEMENT`, `EMPTY`, `INTERSECTION`, `INTERVAL`, or `NONE`.\n         */\n        flags(flags: string): this;\n\n        /**\n         * Allow case insensitive matching or not (added in 7.10.0).\n         * Defaults to false.\n         *\n         * @param {boolean} caseInsensitive\n         */\n        caseInsensitive(caseInsensitive: boolean): this;\n\n        /**\n         * Limit on how many automaton states regexp queries are allowed to create.\n         * This protects against too-difficult (e.g. exponentially hard) regexps.\n         * Defaults to 10000.\n         *\n         * @param {number} limit\n         */\n        maxDeterminizedStates(limit: number): this;\n\n        /**\n         * Sets the rewrite method. Valid values are:\n         * - `constant_score` - tries to pick the best constant-score rewrite\n         *  method based on term and document counts from the query.\n         *  Synonyms - `constant_score_auto`, `constant_score_filter`\n         * - `scoring_boolean` - translates each term into boolean should and\n         *  keeps the scores as computed by the query\n         * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n         *  are computed.\n         * - `constant_score_filter` - first creates a private Filter, by visiting\n         *  each term in sequence and marking all docs for that term\n         * - `top_terms_boost_N` - first translates each term into boolean should\n         *  and scores are only computed as the boost using the top N\n         *  scoring terms. Replace N with an integer value.\n         * - `top_terms_N` - first translates each term into boolean should\n         *  and keeps the scores as computed by the query. Only the top N\n         *  scoring terms are used. Replace N with an integer value.\n         *\n         * Default is `constant_score`.\n         * This is an advanced option, use with care.\n         *\n         * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n         * `constant_score_filter` (synonyms for `constant_score`) have been removed in\n         * elasticsearch 6.0.\n         *\n         * @param {string} method The rewrite method as a string.\n         * @throws {Error} If the given `rewrite` method is not valid.\n         */\n        rewrite(method: string): this;\n    }\n\n    /**\n     * Query for regular expression term queries. Elasticsearch will apply the regexp\n     * to the terms produced by the tokenizer for that field, and not to the original\n     * text of the field.\n     *\n     * @param {string=} field\n     * @param {string|number=} value\n     */\n    export function regexpQuery(\n        field?: string,\n        value?: string | number\n    ): RegexpQuery;\n\n    /**\n     * The fuzzy query generates all possible matching terms that are within\n     * the maximum edit distance specified in `fuzziness` and then checks\n     * the term dictionary to find out which of those generated terms\n     * actually exist in the index.\n     * The fuzzy query uses similarity based on Levenshtein edit distance.\n     *\n     * @param {string=} field\n     * @param {string|number=} value\n     * @extends MultiTermQueryBase\n     */\n    export class FuzzyQuery extends MultiTermQueryBase {\n        constructor(field?: string, value?: string | number);\n\n        /**\n         * Sets the `fuzziness` parameter which is interpreted as a Levenshtein Edit Distance —\n         * the number of one character changes that need to be made to one string to make it\n         * the same as another string.\n         *\n         * @param {number|string} factor Can be specified either as a number, or the maximum\n         * number of edits, or as `AUTO` which generates an edit distance based on the length\n         * of the term.\n         */\n        fuzziness(factor: number | string): this;\n\n        /**\n         * The number of initial characters which will not be “fuzzified”.\n         * This helps to reduce the number of terms which must be examined. Defaults to `0`.\n         *\n         * @param {number} len Characters to skip fuzzy for. Defaults to `0`.\n         */\n        prefixLength(len: number): this;\n\n        /**\n         * The maximum number of terms that the fuzzy query will expand to. Defaults to `50`.\n         *\n         * @param {number} limit Limit for fuzzy query expansion. Defaults to `50`.\n         */\n        maxExpansions(limit: number): this;\n\n        /**\n         * Transpositions (`ab` → `ba`) are allowed by default but can be disabled\n         * by setting `transpositions` to false.\n         *\n         * @param {boolean} enable\n         */\n        transpositions(enable: boolean): this;\n    }\n\n    /**\n     * The fuzzy query generates all possible matching terms that are within\n     * the maximum edit distance specified in `fuzziness` and then checks\n     * the term dictionary to find out which of those generated terms\n     * actually exist in the index.\n     * The fuzzy query uses similarity based on Levenshtein edit distance.\n     *\n     * @param {string=} field\n     * @param {string|number=} value\n     */\n    export function fuzzyQuery(\n        field?: string,\n        value?: string | number\n    ): FuzzyQuery;\n\n    /**\n     * Filters documents matching the provided document / mapping type.\n     *\n     * @param {string=} type The elasticsearch doc type\n     * @extends Query\n     */\n    export class TypeQuery extends Query {\n        constructor(type?: string);\n\n        /**\n         * Sets the elasticsearch doc type to query on.\n         *\n         * @param {string} type The elasticsearch doc type\n         */\n        value(type: string): this;\n\n        /**\n         * Sets the elasticsearch doc type to query on.\n         * Alias for method `value`.\n         *\n         * @param {string} type The elasticsearch doc type\n         */\n        type(type: string): this;\n    }\n\n    /**\n     * Filters documents matching the provided document / mapping type.\n     *\n     * @param {string=} type The elasticsearch doc type\n     */\n    export function typeQuery(type?: string): TypeQuery;\n\n    /**\n     * Filters documents that only have the provided ids.\n     * Note, this query uses the _uid field.\n     *\n     * @param {Array|string=} type The elasticsearch doc type\n     * @param {Array=} ids List of ids to fiter on.\n     * @extends Query\n     */\n    export class IdsQuery extends Query {\n        constructor(type?: string[] | string, ids?: object);\n\n        /**\n         * Sets the elasticsearch doc type to query on.\n         * The type is optional and can be omitted, and can also accept an array of values.\n         * If no type is specified, all types defined in the index mapping are tried.\n         *\n         * @param {Array<string>|string} type The elasticsearch doc type\n         */\n        type(type: string[] | string): this;\n\n        /**\n         * Sets the list of ids to fiter on.\n         *\n         * @param {Array<string|number>} ids\n         */\n        values(ids: string[] | number[]): this;\n\n        /**\n         * Sets the list of ids to fiter on.\n         * Alias for `values` method.\n         *\n         * @param {Array<string|number>} ids\n         */\n        ids(ids: string[] | number[]): this;\n    }\n\n    /**\n     * Filters documents that only have the provided ids.\n     * Note, this query uses the _uid field.\n     *\n     * @param {Array|string=} type The elasticsearch doc type\n     * @param {Array=} ids List of ids to fiter on.\n     */\n    export function idsQuery(type?: string[] | string, ids?: object): IdsQuery;\n\n    /**\n     * A query that wraps another query and simply returns a constant score\n     * equal to the query boost for every document in the filter.\n     * Maps to Lucene `ConstantScoreQuery`.\n     * Constructs a query where each documents returned by the internal\n     * query or filter have a constant score equal to the boost factor.\n     *\n     * @param {Query=} filterQuery Query to filter on.\n     * @extends Query\n     */\n    export class ConstantScoreQuery extends Query {\n        constructor(filterQuery?: Query);\n\n        /**\n         * Adds the query to apply a constant score to.\n         *\n         * @param {Query} filterQuery  Query to filter on.\n         */\n        filter(filterQuery: Query): this;\n\n        /**\n         * Adds the query to apply a constant score to.\n         * Alias for method `filter`.\n         *\n         * Note: This parameter has been removed in elasticsearch 6.0. Use `filter` instead.\n         *\n         * @param {Query} filterQuery  Query to filter on.\n         */\n        query(filterQuery: Query): this;\n    }\n\n    /**\n     * A query that wraps another query and simply returns a constant score\n     * equal to the query boost for every document in the filter.\n     * Maps to Lucene `ConstantScoreQuery`.\n     * Constructs a query where each documents returned by the internal\n     * query or filter have a constant score equal to the boost factor.\n     *\n     * @param {Query=} filterQuery Query to filter on.\n     */\n    export function constantScoreQuery(filterQuery?: Query): ConstantScoreQuery;\n\n    /**\n     * A query that matches documents matching boolean combinations of other queries.\n     * The bool query maps to Lucene `BooleanQuery`. It is built using one or more\n     * boolean clauses, each clause with a typed occurrence.\n     *\n     * @extends Query\n     */\n    export class BoolQuery extends Query {\n        constructor();\n        /**\n         * Adds `must` query to boolean container.\n         * The clause (query) **must** appear in matching documents and will contribute to the score.\n         *\n         * @param {Array<Query>|Query} queries List of valid `Query` objects or a `Query` object\n         * @throws {TypeError} If Array item or query is not an instance of `Query`\n         */\n        must(queries: Query[] | Query): this;\n\n        /**\n         * Adds `filter` query to boolean container.\n         * The clause (query) **must** appear in matching documents. However unlike `must` the score\n         * of the query will be ignored. Filter clauses are executed in filter context, meaning that\n         * scoring is ignored and clauses are considered for caching.\n         *\n         * @param {Array<Query>|Query} queries List of valid `Query` objects or a `Query` object\n         * @throws {TypeError} If Array item or query is not an instance of `Query`\n         */\n        filter(queries: Query[] | Query): this;\n\n        /**\n         * Adds `must_not` query to boolean container.\n         * The clause (query) **must not** appear in the matching documents.\n         * Clauses are executed in filter context meaning that scoring is ignored\n         * and clauses are considered for caching. Because scoring is ignored,\n         * a score of 0 for all documents is returned.\n         *\n         * @param {Array<Query>|Query} queries List of valid `Query` objects or a `Query` object\n         * @throws {TypeError} If Array item or query is not an instance of `Query`\n         */\n        mustNot(queries: Query[] | Query): this;\n\n        /**\n         * Adds `should` query to boolean container.\n         * The clause (query) **should** appear in the matching document. In a boolean query with\n         * no must or filter clauses, one or more should clauses must match a document.\n         * The minimum number of should clauses to match can be set using the\n         * `minimum_should_match` parameter.\n         *\n         * @param {Array<Query>|Query} queries List of valid `Query` objects or a `Query` object\n         * @throws {TypeError} If Array item or query is not an instance of `Query`\n         */\n        should(queries: Query[] | Query): this;\n\n        /**\n         * Enables or disables similarity coordinate scoring of documents\n         * commoning the `CommonTermsQuery`. Default: `false`.\n         *\n         * **NOTE**: This has been removed in elasticsearch 6.0. If provided,\n         * it will be ignored and a deprecation warning will be issued.\n         *\n         * @param {boolean} enable\n         */\n        disableCoord(enable: boolean): this;\n\n        /**\n         * Sets the value controlling how many `should` clauses in the boolean\n         * query should match. It can be an absolute value (2), a percentage (30%)\n         * or a combination of both. By default no optional clauses are necessary for a match.\n         * However, if the bool query is used in a filter context and it has `should` clauses then,\n         * at least one `should` clause is required to match.\n         *\n         * @param {string|number} minimumShouldMatch An absolute value (2), a percentage (30%)\n         * or a combination of both.\n         */\n        minimumShouldMatch(minimumShouldMatch: string | number): this;\n\n        /**\n         * Sets if the `Query` should be enhanced with a `MatchAllQuery` in order\n         * to act as a pure exclude when only negative (mustNot) clauses exist. Default: true.\n         *\n         * @param {boolean} enable\n         */\n        adjustPureNegative(enable: boolean): this;\n    }\n\n    /**\n     * A query that matches documents matching boolean combinations of other queries.\n     * The bool query maps to Lucene `BooleanQuery`. It is built using one or more\n     * boolean clauses, each clause with a typed occurrence.\n     */\n    export function boolQuery(): BoolQuery;\n\n    /**\n     * A query that generates the union of documents produced by its subqueries,\n     * and that scores each document with the maximum score for that document\n     * as produced by any subquery, plus a tie breaking increment for\n     * any additional matching subqueries.\n     *\n     * @extends Query\n     */\n    export class DisMaxQuery extends Query {\n        constructor();\n        /**\n         * The tie breaker value. The tie breaker capability allows results\n         * that include the same term in multiple fields to be judged better than\n         * results that include this term in only the best of those multiple\n         * fields, without confusing this with the better case of two different\n         * terms in the multiple fields. Default: `0.0`.\n         *\n         * @param {number} factor\n         */\n        tieBreaker(factor: number): this;\n\n        /**\n         * Add given query array or query to list of queries\n         *\n         * @param {Array<Query>|Query} queries Array of valid `Query` objects or a `Query` object\n         */\n        queries(queries: Query[] | Query): this;\n    }\n\n    /**\n     * A query that generates the union of documents produced by its subqueries,\n     * and that scores each document with the maximum score for that document\n     * as produced by any subquery, plus a tie breaking increment for\n     * any additional matching subqueries.\n     */\n    export function disMaxQuery(): DisMaxQuery;\n\n    /**\n     * The `function_score` allows you to modify the score of documents that are\n     * retrieved by a query. This can be useful if, for example, a score function\n     * is computationally expensive and it is sufficient to compute the score on\n     * a filtered set of documents.\n     *\n     * @extends Query\n     */\n    export class FunctionScoreQuery extends Query {\n        constructor();\n        /**\n         * Sets the source query.\n         * @param {Query} query A valid `Query` object\n         */\n        query(query: Query): this;\n\n        /**\n         * Controls the way the scores are combined.\n         * @param {string} mode Can be one of `multiply`, `sum`, `first`, `min`, `max`, `avg`.\n         * Defaults to `multiply`.\n         */\n        scoreMode(\n            mode: 'multiply' | 'sum' | 'first' | 'min' | 'max' | 'avg'\n        ): this;\n\n        /**\n         * Controls the way the query and function scores are combined.\n         * @param {string} mode Can be one of `multiply`, `replace`, `sum`, `avg`, `max`, `min`.\n         * Defaults to `multiply`.\n         */\n        boostMode(\n            mode: 'multiply' | 'sum' | 'replace' | 'min' | 'max' | 'avg'\n        ): this;\n\n        /**\n         * Restricts new score to not exceed given limit. The default for `max_boost` is `FLT_MAX`.\n         * @param {number} limit\n         */\n        maxBoost(limit: number): this;\n\n        /**\n         * Sets the minimum score limit for documents to be included in search result.\n         * @param {number} limit Minimum score threshold\n         */\n        minScore(limit: number): this;\n\n        /**\n         * Add a single score function to the list of existing functions.\n         * @param {ScoreFunction} func A valid `ScoreFunction` object.\n         */\n        function(func: ScoreFunction): this;\n\n        /**\n         * Adds array of score functions to the list of existing functions.\n         * @param {Array<ScoreFunction>} funcs An array of valid `ScoreFunction` objects\n         */\n        functions(funcs: ScoreFunction[]): this;\n    }\n\n    /**\n     * The `function_score` allows you to modify the score of documents that are\n     * retrieved by a query. This can be useful if, for example, a score function\n     * is computationally expensive and it is sufficient to compute the score on\n     * a filtered set of documents.\n     */\n    export function functionScoreQuery(): FunctionScoreQuery;\n\n    /**\n     * The boosting query can be used to effectively demote results that match\n     * a given query. Unlike the \"NOT\" clause in bool query, this still selects\n     * documents that contain undesirable terms, but reduces their overall\n     * score.\n     *\n     * @param {Query=} positiveQry A valid `Query` object.\n     * @param {Query=} negativeQry A valid `Query` object.\n     * @param {number=} negativeBoost A positive `double` value where `0 < n < 1`.\n     * @extends Query\n     */\n    export class BoostingQuery extends Query {\n        constructor(\n            positiveQry?: Query,\n            negativeQry?: Query,\n            negativeBoost?: number\n        );\n\n        /**\n         * Sets the \"master\" query that determines which results are returned.\n         *\n         * @param {Query} query A valid `Query` object.\n         */\n        positive(query: Query): this;\n\n        /**\n         * Sets the query used to match documents in the `positive`\n         * query that will be negatively boosted.\n         *\n         * @param {Query} query A valid `Query` object.\n         */\n        negative(query: Query): this;\n\n        /**\n         * Sets the negative boost value.\n         *\n         * @param {number} factor A positive `double` value where `0 < n < 1`.\n         */\n        negativeBoost(factor: number): this;\n    }\n\n    /**\n     * The boosting query can be used to effectively demote results that match\n     * a given query. Unlike the \"NOT\" clause in bool query, this still selects\n     * documents that contain undesirable terms, but reduces their overall\n     * score.\n     *\n     * @param {Query=} positiveQry A valid `Query` object.\n     * @param {Query=} negativeQry A valid `Query` object.\n     * @param {number=} negativeBoost A positive `double` value where `0 < n < 1`.\n     */\n    export function boostingQuery(\n        positiveQry?: Query,\n        negativeQry?: Query,\n        negativeBoost?: number\n    ): BoostingQuery;\n\n    /**\n     * The `JoiningQueryBase` class provides support for common options used across\n     * various joining query implementations.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} queryType\n     * @param {string} refUrl\n     * @param {Query=} qry A valid `Query` object\n     * @extends Query\n     */\n    class JoiningQueryBase extends Query {\n        constructor(queryType: string, refUrl: string, qry?: Query);\n\n        /**\n         * Sets the nested query to be executed.\n         *\n         * @param {Query} qry A valid `Query` object\n         */\n        query(qry: Query): this;\n\n        /**\n         * Sets the scoring method.\n         * Valid values are:\n         * - `none` - no scoring\n         * - `max` - the highest score of all matched child documents is used\n         * - `min` - the lowest score of all matched child documents is used\n         * - `sum` - the sum the all the matched child documents is used\n         * - `avg` - the default, the average of all matched child documents is used\n         *\n         * @param {string} mode Can be one of `none`, `sum`, `min`, `max`, `avg`.\n         * Defaults to `avg` for `NestedQuery`, `none` for `HasChildQuery`.\n         */\n        scoreMode(mode: 'none' | 'sum' | 'min' | 'max' | 'avg'): this;\n\n        /**\n         * When set to `true` will ignore an unmapped `path` and will not match any\n         * documents for this query. When set to `false` (the default value) the query\n         * will throw an exception if the path is not mapped.\n         *\n         * @param {boolean} enable `true` or `false`, `false` by default.\n         */\n        ignoreUnmapped(enable: boolean): this;\n\n        /**\n         * Sets the inner hits options\n         *\n         * @param {InnerHits} innerHits A valid `InnerHits` object\n         */\n        innerHits(innerHits: InnerHits): this;\n    }\n\n    /**\n     * Nested query allows to query nested objects. The query is executed against\n     * the nested objects / docs as if they were indexed as separate docs\n     * (they are, internally) and resulting in the root parent doc (or parent nested mapping).\n     *\n     * @param {Query=} qry A valid `Query` object\n     * @param {string=} path The nested object path.\n     * @extends JoiningQueryBase\n     */\n    export class NestedQuery extends JoiningQueryBase {\n        constructor(qry?: Query, path?: string);\n\n        /**\n         * Sets the root context for the nested query.\n         *\n         * @param {string} path\n         */\n        path(path: string): this;\n    }\n\n    /**\n     * Nested query allows to query nested objects. The query is executed against\n     * the nested objects / docs as if they were indexed as separate docs\n     * (they are, internally) and resulting in the root parent doc (or parent nested mapping).\n     *\n     * @param {Query=} qry A valid `Query` object\n     * @param {string=} path The nested object path.\n     */\n    export function nestedQuery(qry?: Query, path?: string): NestedQuery;\n\n    /**\n     * The `has_child` filter accepts a query and the child type to run against, and\n     * results in parent documents that have child docs matching the query.\n     *\n     * @param {Query=} qry A valid `Query` object\n     * @param {string=} type The child type\n     * @extends JoiningQueryBase\n     */\n    export class HasChildQuery extends JoiningQueryBase {\n        constructor(qry?: Query, type?: string);\n\n        /**\n         * Sets the child document type to search against.\n         * Alias for method `childType`.\n         *\n         * @param {string} type A valid doc type name\n         */\n        type(type: string): this;\n\n        /**\n         * Sets the child document type to search against\n         *\n         * @param {string} type A valid doc type name\n         */\n        childType(type: string): this;\n\n        /**\n         * Specify the minimum number of children are required to match\n         * for the parent doc to be considered a match\n         *\n         * @param {number} limit A positive `integer` value.\n         */\n        minChildren(limit: number): this;\n\n        /**\n         * Specify the maximum number of children are required to match\n         * for the parent doc to be considered a match\n         *\n         * @param {number} limit A positive `integer` value.\n         */\n        maxChildren(limit: number): this;\n    }\n\n    /**\n     * The `has_child` filter accepts a query and the child type to run against, and\n     * results in parent documents that have child docs matching the query.\n     *\n     * @param {Query=} qry A valid `Query` object\n     * @param {string=} type The child type\n     */\n    export function hasChildQuery(qry?: Query, type?: string): HasChildQuery;\n\n    /**\n     * The `has_parent` query accepts a query and a parent type. The query is\n     * executed in the parent document space, which is specified by the parent\n     * type. This query returns child documents which associated parents have\n     * matched.\n     *\n     * @param {Query=} qry A valid `Query` object\n     * @param {string=} type The parent type\n     * @extends JoiningQueryBase\n     */\n    export class HasParentQuery extends JoiningQueryBase {\n        constructor(qry?: Query, type?: string);\n\n        /**\n         * @override\n         * @throws {Error} `score_mode` is deprecated. Use `score` instead.\n         */\n        scoreMode(): never;\n\n        /**\n         * Sets the child document type to search against\n         * Alias for method `parentType`\n         *\n         * @param {string} type A valid doc type name\n         */\n        type(type: string): this;\n\n        /**\n         * Sets the child document type to search against\n         *\n         * @param {string} type A valid doc type name\n         */\n        parentType(type: string): this;\n\n        /**\n         * By default, scoring is `false` which ignores the score from the parent document.\n         * The score is in this case equal to the boost on the `has_parent` query (Defaults to 1).\n         * If the score is set to `true`, then the score of the matching parent document is\n         * aggregated into the child documents belonging to the matching parent document.\n         *\n         * @param {boolean} enable `true` to enable scoring, `false` to disable.\n         * `false` by default.\n         */\n        score(enable: boolean): this;\n    }\n\n    /**\n     * The `has_parent` query accepts a query and a parent type. The query is\n     * executed in the parent document space, which is specified by the parent\n     * type. This query returns child documents which associated parents have\n     * matched.\n     *\n     * @param {Query=} qry A valid `Query` object\n     * @param {string=} type The parent type\n     */\n    export function hasParentQuery(qry?: Query, type?: string): HasParentQuery;\n\n    /**\n     * The `parent_id` query can be used to find child documents which belong to a particular parent.\n     *\n     * @param {string=} type The **child** type. This must be a type with `_parent` field.\n     * @param {string|number=} id The required parent id select documents must refer to.\n     * @extends Query\n     */\n    export class ParentIdQuery extends Query {\n        constructor(type?: string, id?: string | number);\n\n        /**\n         * Sets the child type.\n         *\n         * @param {string} type The **child** type. This must be a type with `_parent` field.\n         */\n        type(type: string): this;\n\n        /**\n         * Sets the id.\n         *\n         * @param {string|number} id The required parent id select documents must refer to.\n         */\n        id(id: string | number): this;\n\n        /**\n         * When set to `true` will ignore an unmapped `path` and will not match any\n         * documents for this query. When set to `false` (the default value) the query\n         * will throw an exception if the path is not mapped.\n         *\n         * @param {boolean} enable `true` or `false`, `false` by default.\n         */\n        ignoreUnmapped(enable: boolean): this;\n    }\n\n    /**\n     * The `parent_id` query can be used to find child documents which belong to a particular parent.\n     *\n     * @param {string=} type The **child** type. This must be a type with `_parent` field.\n     * @param {string|number=} id The required parent id select documents must refer to.\n     */\n    export function parentIdQuery(\n        type?: string,\n        id?: string | number\n    ): ParentIdQuery;\n\n    /**\n     * The `GeoQueryBase` provides support for common options used across\n     * various geo query implementations.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} queryType\n     * @param {string=} field\n     * @extends Query\n     */\n    export class GeoQueryBase extends Query {\n        constructor(queryType: string, field?: string);\n\n        /**\n         * Sets the field to run the geo query on.\n         *\n         * @param {string} field\n         */\n        field(field: string): this;\n\n        /**\n         * Sets the `validation_method` parameter. Can be set to `IGNORE_MALFORMED` to accept\n         * geo points with invalid latitude or longitude, `COERCE` to try and infer correct latitude\n         * or longitude, or `STRICT` (default is `STRICT`).\n         *\n         * Note: The `ignore_malformed` and `coerce` parameters have been removed from\n         * `geo_bounding_box`, `geo_polygon`, and `geo_distance` queries in elasticsearch 6.0.\n         *\n         * @param {string} method One of `IGNORE_MALFORMED`, `COERCE` or `STRICT`(default)\n         * @throws {Error} If `method` parameter is not one of `IGNORE_MALFORMED`, `COERCE` or `STRICT`\n         */\n        validationMethod(\n            method: 'IGNORE_MALFORMED' | 'COERCE' | 'STRICT'\n        ): this;\n    }\n\n    /**\n     * Filter documents indexed using the `geo_shape` type. Requires\n     * the `geo_shape` Mapping.\n     *\n     * The `geo_shape` query uses the same grid square representation as\n     * the `geo_shape` mapping to find documents that have a shape that\n     * intersects with the query shape. It will also use the same PrefixTree\n     * configuration as defined for the field mapping.\n     *\n     * The query supports two ways of defining the query shape, either by\n     * providing a whole shape definition, or by referencing the name of\n     * a shape pre-indexed in another index.\n     *\n     * @param {string=} field\n     * @extends GeoQueryBase\n     */\n    export class GeoShapeQuery extends GeoQueryBase {\n        constructor(field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GeoShapeQuery\n         */\n        validationMethod(): never;\n\n        /**\n         * Sets the shape definition for the geo query.\n         *\n         * @param {GeoShape} shape\n         * @throws {TypeError} If given `shape` is not an instance of `GeoShape`\n         */\n        shape(shape: GeoShape): this;\n\n        /**\n         * Sets the reference name of a shape pre-indexed in another index.\n         *\n         * @param {IndexedShape} shape\n         * @throws {TypeError} If given `shape` is not an instance of `IndexedShape`\n         */\n        indexedShape(shape: IndexedShape): this;\n\n        /**\n         * Sets the relationship between Query and indexed data\n         * that will be used to determine if a Document should be matched or not.\n         *\n         * @param {string} relation Can be one of `WITHIN`, `CONTAINS`, `DISJOINT`\n         * or `INTERSECTS`(default)\n         */\n        relation(\n            relation: 'WITHIN' | 'CONTAINS' | 'DISJOINT' | 'INTERSECTS'\n        ): this;\n\n        /**\n         * When set to `true` will ignore an unmapped `path` and will not match any\n         * documents for this query. When set to `false` (the default value) the query\n         * will throw an exception if the path is not mapped.\n         *\n         * @param {boolean} enable `true` or `false`, `false` by default.\n         */\n        ignoreUnmapped(enable: boolean): this;\n    }\n\n    /**\n     * Filter documents indexed using the `geo_shape` type. Requires\n     * the `geo_shape` Mapping.\n     *\n     * The `geo_shape` query uses the same grid square representation as\n     * the `geo_shape` mapping to find documents that have a shape that\n     * intersects with the query shape. It will also use the same PrefixTree\n     * configuration as defined for the field mapping.\n     *\n     * The query supports two ways of defining the query shape, either by\n     * providing a whole shape definition, or by referencing the name of\n     * a shape pre-indexed in another index.\n     *\n     * @param {string=} field\n     */\n    export function geoShapeQuery(field?: string): GeoShapeQuery;\n\n    /**\n     * A query allowing to filter hits based on a point location using a bounding box.\n     *\n     * @param {string=} field\n     * @extends GeoQueryBase\n     */\n    export class GeoBoundingBoxQuery extends GeoQueryBase {\n        constructor(field?: string);\n\n        /**\n         * Sets the top left coordinate for the Geo bounding box filter for\n         * querying documents\n         *\n         * @param {GeoPoint} point A valid `GeoPoint`\n         */\n        topLeft(point: GeoPoint): this;\n\n        /**\n         * Sets the bottom right coordinate for the Geo bounding box filter for\n         * querying documents\n         *\n         * @param {GeoPoint} point A valid `GeoPoint`\n         */\n        bottomRight(point: GeoPoint): this;\n\n        /**\n         * Sets the top right coordinate for the Geo bounding box filter for\n         * querying documents\n         *\n         * @param {GeoPoint} point A valid `GeoPoint`\n         */\n        topRight(point: GeoPoint): this;\n\n        /**\n         * Sets the bottom left coordinate for the Geo bounding box filter for\n         * querying documents\n         *\n         * @param {GeoPoint} point A valid `GeoPoint`\n         */\n        bottomLeft(point: GeoPoint): this;\n\n        /**\n         * Sets value for top of the bounding box.\n         *\n         * @param {number} val\n         */\n        top(val: number): this;\n\n        /**\n         * Sets value for left of the bounding box.\n         *\n         * @param {number} val\n         */\n        left(val: number): this;\n\n        /**\n         * Sets value for bottom of the bounding box.\n         *\n         * @param {number} val\n         */\n        bottom(val: number): this;\n\n        /**\n         * Sets value for right of the bounding box.\n         *\n         * @param {number} val\n         */\n        right(val: number): this;\n\n        /**\n         * Sets the type of execution for the bounding box query.\n         * The type of the bounding box execution by default is set to memory,\n         * which means in memory checks if the doc falls within the bounding\n         * box range. In some cases, an indexed option will perform faster\n         * (but note that the geo_point type must have lat and lon indexed in this case)\n         *\n         * @param {string} type Can either `memory` or `indexed`\n         */\n        type(type: 'memory' | 'indexed'): this;\n    }\n\n    /**\n     * A query allowing to filter hits based on a point location using a bounding box.\n     *\n     * @param {string=} field\n     */\n    export function geoBoundingBoxQuery(field?: string): GeoBoundingBoxQuery;\n\n    /**\n     * Filters documents that include only hits that exists within a specific distance from a geo point.\n     *\n     * @param {string=} field\n     * @param {GeoPoint=} point Geo point used to measure and filter documents based on distance from it.\n     * @extends GeoQueryBase\n     */\n    export class GeoDistanceQuery extends GeoQueryBase {\n        constructor(field?: string, point?: GeoPoint);\n\n        /**\n         * Sets the radius of the circle centred on the specified location. Points which\n         * fall into this circle are considered to be matches. The distance can be specified\n         * in various units.\n         *\n         * @param {string|number} distance Radius of circle centred on specified location.\n         */\n        distance(distance: string | number): this;\n\n        /**\n         * Sets the distance calculation mode, `arc` or `plane`.\n         * The `arc` calculation is the more accurate.\n         * The `plane` is the faster but least accurate.\n         *\n         * @param {string} type\n         * @throws {Error} If `type` is neither `plane` nor `arc`.\n         */\n        distanceType(type: 'arc' | 'plane'): this;\n\n        /**\n         * Sets the point to filter documents based on the distance from it.\n         *\n         * @param {GeoPoint} point Geo point used to measure and filter documents based on distance from it.\n         * @throws {TypeError} If parameter `point` is not an instance of `GeoPoint`\n         */\n        geoPoint(point: GeoPoint): this;\n    }\n\n    /**\n     * Filters documents that include only hits that exists within a specific distance from a geo point.\n     *\n     * @param {string=} field\n     * @param {GeoPoint=} point Geo point used to measure and filter documents based on distance from it.\n     */\n    export function geoDistanceQuery(\n        field?: string,\n        point?: GeoPoint\n    ): GeoDistanceQuery;\n\n    /**\n     * A query allowing to include hits that only fall within a polygon of points.\n     *\n     * @param {string=} field\n     * @extends GeoQueryBase\n     */\n    export class GeoPolygonQuery extends GeoQueryBase {\n        constructor(field?: string);\n\n        /**\n         * Sets the points which form the polygon.\n         * Points can be instances of `GeoPoint`, Object with `lat`, `lon` keys,\n         * `GeoJSON` array representation or string(`geohash`/`lat, lon`)\n         *\n         * @param {Array<*>} points\n         * @throws {TypeError} If `points` parameter is not an instance of `Array`.\n         */\n        points(points: any[]): GeoPolygonQuery;\n    }\n\n    /**\n     * A query allowing to include hits that only fall within a polygon of points.\n     *\n     * @param {string=} field\n     */\n    export function geoPolygonQuery(field?: string): GeoPolygonQuery;\n\n    /**\n     * The More Like This Query (MLT Query) finds documents that are \"like\" a given set\n     * of documents. In order to do so, MLT selects a set of representative terms of\n     * these input documents, forms a query using these terms, executes the query and\n     * returns the results. The user controls the input documents, how the terms should\n     * be selected and how the query is formed.\n     *\n     * @extends Query\n     */\n    export class MoreLikeThisQuery extends Query {\n        constructor();\n        /**\n         * Sets the list of fields to fetch and analyze the text from. Defaults to\n         * the `_all` field for free text and to all possible fields for document inputs.\n         *\n         * @param {Array<string>} fields Array of fields to search against\n         */\n        fields(fields: string[]): this;\n\n        /**\n         * Sets the search clause for the query. It is the only required parameter of the MLT query\n         * and follows a versatile syntax, in which the user can specify free form text and/or\n         * a single or multiple documents (see examples above). The syntax to specify documents\n         * is similar to the one used by the Multi GET API.\n         * When specifying documents, the text is fetched from fields unless overridden\n         * in each document request. The text is analyzed by the analyzer at the field,\n         * but could also be overridden. The syntax to override the analyzer at the\n         * field follows a similar syntax to the `per_field_analyzer` parameter of the\n         * Term Vectors API.\n         * Additionally, to provide documents not necessarily present in the index,\n         * artificial documents are also supported.\n         * If string or object is passed, it is\n         * appended to the list. If an array is passed, it replaces the existing list.\n         *\n         * @param {string|Object|Array} like Can be passed as a string,\n         * Object representing indexed document, or array of string/objects.\n         */\n        like(like: string | object | string[] | object[]): this;\n\n        /**\n         * The `unlike` parameter is used in conjunction with `like` in order not to\n         * select terms found in a chosen set of documents. In other words, we could ask\n         * for documents `like`: \"Apple\", but `unlike`: \"cake crumble tree\".\n         * The syntax is the same as like.\n         *\n         * @param {string|Object|Array} unlike Can be passed as a string,\n         * Object representing indexed document, or array of string/objects.\n         */\n        unlike(unlike: string | object | string[] | object[]): this;\n\n        /**\n         * Sets the text to find documents like it.\n         *\n         * Note: This parameter has been removed in elasticsearch 6.0. Use `like` instead.\n         *\n         * @param {string} txt The text to find documents like it.\n         */\n        likeText(txt: string): this;\n\n        /**\n         * Sets the list of `ids` for the documents with syntax similar to\n         * the Multi GET API.\n         *\n         * Note: This parameter has been removed in elasticsearch 6.0. Use `like` instead.\n         *\n         * @param {Array<string>} ids\n         */\n        ids(ids: string[]): this;\n\n        /**\n         * Sets the list of `docs` for the documents with syntax similar to\n         * the Multi GET API.\n         *\n         * Note: This parameter has been removed in elasticsearch 6.0. Use `like` instead.\n         *\n         * @param {Array<Object>} docs\n         */\n        docs(docs: object[]): this;\n\n        /**\n         * Sets the maximum number of query terms that will be selected.\n         * Increasing this value gives greater accuracy at the expense of query execution speed.\n         * Defaults to `25`.\n         *\n         * @param {number} termsLimit The maximum number of query terms that will be selected.\n         */\n        maxQueryTerms(termsLimit: number): this;\n\n        /**\n         * Sets the minimum term frequency below which the terms will be ignored from\n         * the input document Defaults to 2.\n         *\n         * @param {number} termFreqLimit\n         */\n        minTermFreq(termFreqLimit: number): this;\n\n        /**\n         * Sets the minimum document frequency below which the terms will be ignored\n         * from the input document. Defaults to `5`.\n         *\n         * @param {number} docFreqLimit The minimum document frequency\n         */\n        minDocFreq(docFreqLimit: number): this;\n\n        /**\n         * Sets the maximum document frequency above which the terms will be ignored\n         * from the input document. Defaults to unbounded (`0`).\n         *\n         * @param {number} docFreqLimit The minimum document frequency\n         */\n        maxDocFreq(docFreqLimit: number): this;\n\n        /**\n         * Sets the minimum word length below which the terms will be ignored.\n         * Defaults to `0`.\n         *\n         * @param {number} wordLenLimit\n         */\n        minWordLength(wordLenLimit: number): this;\n\n        /**\n         * Sets the maximum word length above which the terms will be ignored.\n         * Defaults to unbounded (`0`).\n         *\n         * @param {number} wordLenLimit\n         */\n        maxWordLength(wordLenLimit: number): this;\n\n        /**\n         * Sets the array of stop words. Any word in this set is considered\n         * \"uninteresting\" and ignored.\n         *\n         * @param {Array<string>} words Array of stop words.\n         */\n        stopWords(words: string[]): this;\n\n        /**\n         * Set the analyzer to control which analyzer will perform the analysis process on the text.\n         * Defaults to the analyzer associated with the first field in `fields`.\n         *\n         * @param {string} analyzer A valid text analyzer.\n         */\n        analyzer(analyzer: string): this;\n\n        /**\n         * Sets the value controlling how many `should` clauses in the boolean\n         * query should match. It can be an absolute value (2), a percentage (30%)\n         * or a combination of both. (Defaults to `\"30%\"`).\n         *\n         * @param {string|number} minimumShouldMatch An absolute value (`2`), a percentage (`30%`)\n         * or a combination of both.\n         */\n        minimumShouldMatch(minimumShouldMatch: string | number): this;\n\n        /**\n         * Sets the boost factor to use when boosting terms.\n         * Defaults to deactivated (`0`).\n         *\n         * @param {number} boost A positive value to boost terms.\n         */\n        boostTerms(boost: number): this;\n\n        /**\n         * Specifies whether the input documents should also be included in the\n         * search results returned. Defaults to `false`.\n         *\n         * @param {boolean} enable\n         */\n        include(enable: boolean): this;\n    }\n\n    /**\n     * The More Like This Query (MLT Query) finds documents that are \"like\" a given set\n     * of documents. In order to do so, MLT selects a set of representative terms of\n     * these input documents, forms a query using these terms, executes the query and\n     * returns the results. The user controls the input documents, how the terms should\n     * be selected and how the query is formed.\n     */\n    export function moreLikeThisQuery(): MoreLikeThisQuery;\n\n    /**\n     * A query allowing to define scripts as queries.\n     * They are typically used in a filter context.\n     *\n     * @param {Script=} script\n     * @extends Query\n     */\n    export class ScriptQuery extends Query {\n        constructor(script?: Script);\n\n        /**\n         * Sets the `script` for query.\n         * @param {Script} script\n         */\n        script(script: Script): this;\n    }\n\n    /**\n     * A query allowing to define scripts as queries.\n     * They are typically used in a filter context.\n     *\n     * @param {Script=} script\n     */\n    export function scriptQuery(script?: Script): ScriptQuery;\n\n    /**\n     * A query that uses a script to provide a custom score for returned documents.\n     *\n     * @extends Query\n     */\n    export class ScriptScoreQuery extends Query {\n        constructor();\n\n        /**\n         * Sets the query used to return documents.\n         *\n         * @param {Query} query A valid `Query` object\n         */\n        query(query: Query): this;\n\n        /**\n         * Sets the script used to compute the score of documents returned by the query.\n         *\n         * @param {Script} script A valid `Script` object\n         */\n        script(script: Script): this;\n\n        /**\n         * Sets the minimum score limit for documents to be included in search result.\n         *\n         * @param {number} limit Minimum score threshold\n         */\n        minScore(limit: number): this;\n    }\n\n    /**\n     * A query that uses a script to provide a custom score for returned documents.\n     */\n    export function scriptScoreQuery(): ScriptScoreQuery;\n\n    /**\n     * The `percolate` query can be used to match queries stored in an index.\n     * The `percolate` query itself contains the document that will be used\n     * as query to match with the stored queries.\n     *\n     * @param {string=} field The field of type `percolator` and that holds the indexed queries.\n     * @param {string=} docType The type / mapping of the document being percolated.\n     * @extends Query\n     */\n    export class PercolateQuery extends Query {\n        constructor(field?: string, docType?: string);\n\n        /**\n         * Sets the field of type `percolator` and that holds the indexed queries.\n         *\n         * @param {string} field The field of type `percolator` and that holds the indexed queries.\n         */\n        field(field: string): this;\n\n        /**\n         * Sets the type / mapping of the document being percolated.\n         *\n         * Note: This param has been deprecated in elasticsearch 6.0. From 6.0 and later, it is no\n         * longer required to specify the `document_type` parameter.\n         *\n         * @param {string} docType The type / mapping of the document being percolated.\n         */\n        documentType(docType: string): this;\n\n        /**\n         * Appends given source document to the list of source documents being percolated.\n         * Instead of specifying the source document being percolated,\n         * the source can also be retrieved from an already stored document.\n         *\n         * @param {Object} doc The source document being percolated.\n         */\n        document(doc: object): this;\n\n        /**\n         * Appends given source documents to the list of source documents being percolated.\n         * Instead of specifying the source documents being percolated,\n         * the source can also be retrieved from already stored documents.\n         *\n         * @param {Object[]} docs The source documents being percolated.\n         */\n        documents(docs: object[]): this;\n\n        /**\n         * Sets the index the document resides in. This is a required parameter if `document`\n         * is not specified.\n         *\n         * @param {string} index The index the document resides in.\n         */\n        index(index: string): this;\n\n        /**\n         * Sets the type of the document to fetch. This is a required parameter if `document`\n         * is not specified.\n         *\n         * @param {string} type The type of the document to fetch.\n         */\n        type(type: string): this;\n\n        /**\n         * Sets the id of the document to fetch. This is a required parameter if `document`\n         * is not specified.\n         *\n         * @param {string} id The id of the document to fetch.\n         */\n        id(id: string): this;\n\n        /**\n         * Sets the routing to be used to fetch document to percolate. Optional.\n         *\n         * @param {string} routing The routing to be used to fetch document to percolate.\n         */\n        routing(routing: string): this;\n\n        /**\n         * Sets the preference to be used to fetch document to percolate. Optional.\n         *\n         * @param {string} preference The preference to be used to fetch document to percolate.\n         */\n        preference(preference: string): this;\n\n        /**\n         * Sets the expected version of the document to be fetched. Optional.\n         * If the version does not match, the search request will fail\n         * with a version conflict error.\n         *\n         * @param {string} version The expected version of the document to be fetched.\n         */\n        version(version: string): this;\n    }\n\n    /**\n     * The `percolate` query can be used to match queries stored in an index.\n     * The `percolate` query itself contains the document that will be used\n     * as query to match with the stored queries.\n     *\n     * @param {string=} field The field of type `percolator` and that holds the indexed queries.\n     * @param {string=} docType The type / mapping of the document being percolated.\n     */\n    export function percolateQuery(\n        field?: string,\n        docType?: string\n    ): PercolateQuery;\n\n    /**\n     * The `distance_feature` query can be used to filter documents that are inside\n     * a timeframe or radius given an **origin** point. For dates the difference can be\n     * minutes, hours, etc and for coordinates it can be meters, kilometers..\n     *\n     *  [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-distance-feature-query.html)\n     *\n     * NOTE: Only available in Elasticsearch 7.1.0+.\n     *\n     * @example\n     * const query = new DistanceFeatureQuery('time');\n     *   query\n     *       .origin('now')\n     *       .pivot('1h')\n     *       .toJSON();\n     *\n     * @param {string} field The field inside the document to be used in the query\n     * @extends Query\n     */\n    export class DistanceFeatureQuery extends Query {\n        constructor(field?: string);\n\n        /**\n         * Sets the field for the `distance_feature` query\n         * @param {string} fieldName Name of the field inside the document\n         * @returns {DistanceFeatureQuery} Instance of the query\n         */\n        field(fieldName: string): DistanceFeatureQuery;\n\n        /**\n         * Sets the origin of the function. Date or point of coordinates\n         * used to calculate distances\n         * @param {GeoPoint | string} originPoint\n         * @returns {DistanceFeatureQuery} Instance of the distance feature query\n         */\n        origin(originPoint: string | GeoPoint): DistanceFeatureQuery;\n\n        /**\n         * Distance from the origin at which relevance scores receive half of the boost value.\n         * @param {string} pivotDistance Distance value. If the field value is date then this must be a\n         * [time unit](https://www.elastic.co/guide/en/elasticsearch/reference/current/api-conventions.html#time-units).\n         * If it's a geo point field, then a [distance unit](https://www.elastic.co/guide/en/elasticsearch/reference/current/api-conventions.html#distance-units)\n         * @returns {DistanceFeatureQuery} Instance of the distance feature query\n         */\n        pivot(pivotDistance: string): DistanceFeatureQuery;\n    }\n\n    /**\n     * The `distance_feature` query can be used to filter documents that are inside\n     * a timeframe or radius given an **origin** point. For dates the difference can be\n     * minutes, hours, etc and for coordinates it can be meters, kilometers..\n     *\n     *  [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-distance-feature-query.html)\n     *\n     * NOTE: Only available in Elasticsearch 7.1.0+.\n     *\n     * @example\n     * const query = new DistanceFeatureQuery('time');\n     *   query\n     *       .origin('now')\n     *       .pivot('1h')\n     *       .toJSON();\n     *\n     * @param {string} field The field inside the document to be used in the query\n     * @return {DistanceFeatureQuery}\n     */\n    export function distanceFeatureQuery(field?: string): DistanceFeatureQuery;\n\n    /**\n     * The `rank_feature` query boosts the relevance score on the numeric value of\n     * document with a rank_feature/rank_features field.\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-rank-feature-query.html)\n     *\n     * NOTE: This query was added in elasticsearch v7.0.\n     *\n     * @example\n     * const query = new RankFeatureQuery('rank_feature_field');\n     *   query\n     *       .linear()\n     *       .toJSON();\n     * @param {string} field The field inside the document to be used in the query\n     * @return {RankFeatureQuery}\n     */\n    export class RankFeatureQuery extends Query {\n        constructor(field?: string);\n\n        /**\n         * Sets the field for the `distance_feature` query\n         * @param {string} fieldName Name of the field inside the document\n         * @returns {DistanceFeatureQuery} Instance of the query\n         */\n        field(fieldName: string): RankFeatureQuery;\n\n        /**\n         * Linear function to boost relevance scores based on the value of the rank feature field\n         * @returns {RankFeatureQuery}\n         */\n        linear() : RankFeatureQuery;\n\n        /**\n         * Saturation function to boost relevance scores based on the value of the rank feature field.\n         * Uses a default pivot value computed by Elasticsearch.\n         * @returns {RankFeatureQuery}\n         */\n        saturation() : RankFeatureQuery;\n\n        /**\n         * Saturation function to boost relevance scores based on the value of the rank feature field.\n         * @param {number} pivot\n         * @returns {RankFeatureQuery}\n         */\n        saturationPivot(pivot : number) : RankFeatureQuery;\n\n        /**\n         * The log function gives a score equal to log(scaling_factor + S), where S\n         * is the value of the rank feature field and scaling_factor is a configurable\n         * scaling factor.\n         * @param {number} scaling_factor\n         * @returns {RankFeatureQuery}\n         */\n        log(scalingFactor : number) : RankFeatureQuery;\n\n        /**\n         * The sigmoid function extends the saturation function with a configurable exponent.\n         * @param {number} pivot\n         * @param {number} exponent\n         * @returns {RankFeatureQuery}\n         */\n        sigmoid(pivot : number, exponent : number) : RankFeatureQuery\n    }\n\n    /**\n     * The `rank_feature` query boosts the relevance score on the numeric value of\n     * document with a rank_feature/rank_features field.\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-rank-feature-query.html)\n     *\n     * @example\n     * const query = new RankFeatureQuery('rank_feature_field');\n     *   query\n     *       .linear()\n     *       .toJSON();\n     * @param {string} field The field inside the document to be used in the query\n     * @return {RankFeatureQuery}\n     */\n    export function rankFeatureQuery(field?: string): RankFeatureQuery;\n\n    /**\n     * Interface-like class used to group and identify various implementations of Span queries.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @extends Query\n     */\n    export class SpanQueryBase extends Query {}\n\n    /**\n     * Matches spans containing a term. The span term query maps to Lucene `SpanTermQuery`.\n     *\n     * @param {string=} field The document field to query against\n     * @param {string|number=} value The query string\n     * @extends SpanQueryBase\n     */\n    export class SpanTermQuery extends SpanQueryBase {\n        constructor(field?: string, value?: string | number);\n\n        /**\n         * Sets the field to search on.\n         *\n         * @param {string} field\n         */\n        field(field: string): this;\n\n        /**\n         * Sets the query string.\n         *\n         * @param {string|number} queryVal\n         */\n        value(queryVal: string | number): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation of the Span term query\n         * class instance.\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * Matches spans containing a term. The span term query maps to Lucene `SpanTermQuery`.\n     *\n     * @param {string=} field The document field to query against\n     * @param {string|number=} value The query string\n     */\n    export function spanTermQuery(\n        field?: string,\n        value?: string | number\n    ): SpanTermQuery;\n\n    /**\n     * The `span_multi` query allows you to wrap a `multi term query` (one of wildcard,\n     * fuzzy, prefix, range or regexp query) as a `span query`, so it can be nested.\n     *\n     * @param {MultiTermQueryBase=} multiTermQry One of wildcard, fuzzy, prefix, range or regexp query\n     * @extends SpanQueryBase\n     */\n    export class SpanMultiTermQuery extends SpanQueryBase {\n        constructor(multiTermQry?: MultiTermQueryBase);\n\n        /**\n         * Sets the multi term query.\n         *\n         * @param {MultiTermQueryBase} multiTermQry One of wildcard, fuzzy, prefix, range or regexp query\n         */\n        match(multiTermQry: MultiTermQueryBase): this;\n    }\n\n    /**\n     * The `span_multi` query allows you to wrap a `multi term query` (one of wildcard,\n     * fuzzy, prefix, range or regexp query) as a `span query`, so it can be nested.\n     *\n     * @param {MultiTermQueryBase=} multiTermQry One of wildcard, fuzzy, prefix, range or regexp query\n     */\n    export function spanMultiTermQuery(\n        multiTermQry?: MultiTermQueryBase\n    ): SpanMultiTermQuery;\n\n    /**\n     * Matches spans near the beginning of a field. The span first query maps to Lucene `SpanFirstQuery`.\n     *\n     * @param {SpanQueryBase=} spanQry Any other span type query\n     * @extends SpanQueryBase\n     */\n    export class SpanFirstQuery extends SpanQueryBase {\n        constructor(spanQry?: SpanQueryBase);\n\n        /**\n         * Sets the `match` clause which can be any other span type query.\n         *\n         * @param {SpanQueryBase} spanQry\n         */\n        match(spanQry: SpanQueryBase): this;\n\n        /**\n         * Sets the maximum end position permitted in a match.\n         *\n         * @param {number} limit The maximum end position permitted in a match.\n         */\n        end(limit: number): this;\n    }\n\n    /**\n     * Matches spans near the beginning of a field. The span first query maps to Lucene `SpanFirstQuery`.\n     *\n     * @param {SpanQueryBase=} spanQry Any other span type query\n     */\n    export function spanFirstQuery(spanQry?: SpanQueryBase): SpanFirstQuery;\n\n    /**\n     * Matches spans which are near one another. One can specify `slop`, the maximum\n     * number of intervening unmatched positions, as well as whether matches are\n     * required to be in-order. The span near query maps to Lucene `SpanNearQuery`.\n     *\n     * @extends SpanQueryBase\n     */\n    export class SpanNearQuery extends SpanQueryBase {\n        constructor();\n\n        /**\n         * Sets the clauses element which is a list of one or more other span type queries.\n         *\n         * @param {Array<SpanQueryBase>} clauses\n         * @throws {TypeError} If parameter `clauses` is not an instance of Array or if\n         * any member of the array is not an instance of `SpanQueryBase`.\n         */\n        clauses(clauses: SpanQueryBase[]): this;\n\n        /**\n         * Configures the `slop`(default is 0), the maximum number of intervening\n         * unmatched positions permitted.\n         *\n         * @param {number} slop A positive integer value, defaults is 0.\n         */\n        slop(slop: number): this;\n\n        /**\n         * @param {boolean} enable\n         */\n        inOrder(enable: boolean): this;\n    }\n\n    /**\n     * Matches spans which are near one another. One can specify `slop`, the maximum\n     * number of intervening unmatched positions, as well as whether matches are\n     * required to be in-order. The span near query maps to Lucene `SpanNearQuery`.\n     */\n    export function spanNearQuery(): SpanNearQuery;\n\n    /**\n     * Matches the union of its span clauses. The span or query maps to Lucene `SpanOrQuery`.\n     *\n     * @extends SpanQueryBase\n     */\n    export class SpanOrQuery extends SpanQueryBase {\n        constructor();\n        /**\n         * Sets the clauses element which is a list of one or more other span type queries.\n         *\n         * @param {Array<SpanQueryBase>} clauses\n         * @throws {TypeError} If parameter `clauses` is not an instance of Array or if\n         * any member of the array is not an instance of `SpanQueryBase`.\n         */\n        clauses(clauses: SpanQueryBase[]): this;\n    }\n\n    /**\n     * Matches the union of its span clauses. The span or query maps to Lucene `SpanOrQuery`.\n     */\n    export function spanOrQuery(): SpanOrQuery;\n\n    /**\n     * Removes matches which overlap with another span query. The span not query\n     * maps to Lucene `SpanNotQuery`.\n     *\n     * @extends SpanQueryBase\n     */\n    export class SpanNotQuery extends SpanQueryBase {\n        constructor();\n        /**\n         * Sets the `include` clause which is the span query whose matches are filtered\n         *\n         * @param {SpanQueryBase} spanQry\n         */\n        include(spanQry: SpanQueryBase): this;\n\n        /**\n         * Sets the `exclude` clause which is the span query whose matches must\n         * not overlap those returned.\n         *\n         * @param {SpanQueryBase} spanQry\n         */\n        exclude(spanQry: SpanQueryBase): this;\n\n        /**\n         * If set the amount of tokens before the include span can't have overlap with\n         * the exclude span.\n         *\n         * @param {number} pre\n         */\n        pre(pre: number): this;\n\n        /**\n         * If set the amount of tokens after the include span can't have overlap with the exclude span.\n         *\n         * @param {number} post\n         */\n        post(post: number): this;\n\n        /**\n         * If set the amount of tokens from within the include span can't have overlap\n         * with the exclude span. Equivalent of setting both `pre` and `post`.\n         *\n         * @param {number} dist\n         */\n        dist(dist: number): this;\n    }\n\n    /**\n     * Removes matches which overlap with another span query. The span not query\n     * maps to Lucene `SpanNotQuery`.\n     */\n    export function spanNotQuery(): SpanNotQuery;\n\n    /**\n     * Base class for span queries with `little`, `big` clauses.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @extends SpanQueryBase\n     */\n    export class SpanLittleBigQueryBase extends SpanQueryBase {\n        /**\n         * Sets the `little` clause.\n         *\n         * @param {SpanQueryBase} spanQry Any span type query\n         */\n        little(spanQry: SpanQueryBase): this;\n\n        /**\n         * Sets the `big` clause.\n         *\n         * @param {SpanQueryBase} spanQry Any span type query\n         */\n        big(spanQry: SpanQueryBase): this;\n    }\n\n    /**\n     * Returns matches which enclose another span query. The span containing query\n     * maps to Lucene `SpanContainingQuery`.\n     * Matching spans from big that contain matches from little are returned.\n     *\n     * @extends SpanLittleBigQueryBase\n     */\n    export class SpanContainingQuery extends SpanLittleBigQueryBase {\n        constructor();\n    }\n\n    /**\n     * Returns matches which enclose another span query. The span containing query\n     * maps to Lucene `SpanContainingQuery`.\n     * Matching spans from big that contain matches from little are returned.\n     */\n    export function spanContainingQuery(): SpanContainingQuery;\n\n    /**\n     * Returns matches which are enclosed inside another span query. The span within\n     * query maps to Lucene `SpanWithinQuery`.\n     * Matching spans from `little` that are enclosed within `big` are returned.\n     *\n     * @extends SpanLittleBigQueryBase\n     */\n    export class SpanWithinQuery extends SpanLittleBigQueryBase {\n        constructor();\n    }\n\n    /**\n     * Returns matches which are enclosed inside another span query. The span within\n     * query maps to Lucene `SpanWithinQuery`.\n     * Matching spans from `little` that are enclosed within `big` are returned.\n     */\n    export function spanWithinQuery(): SpanWithinQuery;\n\n    /**\n     * Wrapper to allow span queries to participate in composite single-field\n     * span queries by lying about their search field. The span field masking\n     * query maps to Lucene's `SpanFieldMaskingQuery`.\n     *\n     * This can be used to support queries like span-near or span-or across\n     * different fields, which is not ordinarily permitted.\n     *\n     * Span field masking query is invaluable in conjunction with multi-fields\n     * when same content is indexed with multiple analyzers. For instance we\n     * could index a field with the standard analyzer which breaks text up into\n     * words, and again with the english analyzer which stems words into their root form.\n     *\n     * @param {string=} field\n     * @param {SpanQueryBase=} spanQry Any other span type query\n     * @extends SpanQueryBase\n     */\n    export class SpanFieldMaskingQuery extends SpanQueryBase {\n        constructor(field?: string, spanQry?: SpanQueryBase);\n\n        /**\n         * Sets the span query.\n         *\n         * @param {SpanQueryBase} spanQry\n         */\n        query(spanQry: SpanQueryBase): this;\n\n        /**\n         * Sets the field to mask.\n         *\n         * @param {string} field\n         */\n        field(field: string): this;\n    }\n\n    /**\n     * Wrapper to allow span queries to participate in composite single-field\n     * span queries by lying about their search field. The span field masking\n     * query maps to Lucene's `SpanFieldMaskingQuery`.\n     *\n     * This can be used to support queries like span-near or span-or across\n     * different fields, which is not ordinarily permitted.\n     *\n     * Span field masking query is invaluable in conjunction with multi-fields\n     * when same content is indexed with multiple analyzers. For instance we\n     * could index a field with the standard analyzer which breaks text up into\n     * words, and again with the english analyzer which stems words into their root form.\n     *\n     * @param {string=} field\n     * @param {SpanQueryBase=} spanQry Any other span type query\n     */\n    export function spanFieldMaskingQuery(\n        field?: string,\n        spanQry?: SpanQueryBase\n    ): SpanFieldMaskingQuery;\n\n    /**\n     * The sparse vector query executes a query consisting of sparse vectors, such as built by a learned sparse retrieval model,\n     *\n     * NOTE: Only available in Elasticsearch v8.15+\n     */\n    export class SparseVectorQuery extends Query {\n        constructor(field?: string);\n\n        /**\n         * Sets the field to query\n         *\n         * @param {string} field the field for the query\n         * @returns {SparseVectorQuery}\n         */\n        field(field : string) : SparseVectorQuery;\n\n        /**\n         * Set model inference id\n         *\n         * @param {string} inferenceId The model inference ID\n         * @returns {SparseVectorQuery}\n         */\n        inferenceId(inferenceId : string) : SparseVectorQuery;\n\n        /**\n         * Sets the input query\n         *\n         * @param {string} query The input query\n         * @returns {SparseVectorQuery}\n         */\n        query(query : string) : SparseVectorQuery;\n\n        /**\n         * Set a query vector to the query to run. if you don't use inference\n         *\n         * @param {Object} queryVector\n         * @returns {SparseVectorQuery}\n         */\n        queryVector(queryVector : object) : SparseVectorQuery;\n\n        /**\n         * Enable pruning\n         *\n         * NOTE: Only available in Elasticsearch v9.0+\n         *\n         * @param {boolean} prune\n         * @returns {SparseVectorQuery} returns `this` so that calls can be chained.\n         */\n        prune(prune: boolean): SparseVectorQuery;\n\n        /**\n         * Set pruning config tokens_freq_ratio_threshold\n         *\n         * NOTE: Only available in Elasticsearch v9.0+\n         *\n         * @param {number} tokensFreqRatioThreshold\n         * @returns {SparseVectorQuery} returns `this` so that calls can be chained.\n         */\n        tokensFreqRatioThreshold(tokensFreqRatioThreshold : number) : SparseVectorQuery;\n\n        /**\n         * Set pruning config tokens_weight_threshold\n         *\n         * NOTE: Only available in Elasticsearch v9.0+\n         *\n         * @param {number} tokensWeightThreshold\n         * @returns {SparseVectorQuery} returns `this` so that calls can be chained.\n         */\n        tokensWeightThreshold(tokensWeightThreshold : number) : SparseVectorQuery;\n\n        /**\n         * Set pruning config only_score_pruned_tokens\n         *\n         * NOTE: Only available in Elasticsearch v9.0+\n         *\n         * @param {boolean} onlyScorePrunedTokens\n         * @returns {SparseVectorQuery} returns `this` so that calls can be chained.\n         */\n        onlyScorePrunedTokens(onlyScorePrunedTokens : boolean) : SparseVectorQuery;\n    }\n\n    /**\n     * Factory function to instantiate a new SparseVectorQuery object.\n     *\n     * @returns {SparseVectorQuery}\n     */\n    export function sparseVectorQuery(\n        field? : string\n    ) : SparseVectorQuery;\n\n    /**\n     * The `semantic` query enables semantic search on a `semantic_text` field.\n     * \n     *  NOTE: Only available in Elasticsearch v9.0+\n     *\n     * @param {string=} field The semantic_text field to query.\n     * @param {string=} query The semantic query text.\n     * @extends Query\n     */\n    export class SemanticQuery extends Query {\n        constructor(field?: string, query?: string);\n\n        /**\n         * Sets the semantic field to query.\n         *\n         * @param {string} field The `semantic_text` field name.\n         */\n        field(field: string): this;\n\n        /**\n         * Sets the semantic query text.\n         *\n         * @param {string} query The query text.\n         */\n        query(query: string): this;\n    }\n\n    /**\n     * Creates a `semantic` query.\n     *\n     * @param {string=} field The semantic_text field to query.\n     * @param {string=} query The semantic query text.\n     */\n    export function semanticQuery(\n        field?: string, \n        query?: string\n    ): SemanticQuery;\n\n    /**\n     * Knn performs k-nearest neighbor (KNN) searches.\n     * This class allows configuring the KNN search with various parameters such as field, query vector,\n     * number of nearest neighbors (k), number of candidates, boost factor, and similarity metric.\n     *\n     * NOTE: Only available in Elasticsearch v8.0+\n     */\n    export class KNN {\n        /**\n         * Creates an instance of Knn, initializing the internal state for the k-NN search.\n         *\n         * @param {string} field - (Optional) The field against which to perform the k-NN search.\n         * @param {number} k - (Optional) The number of nearest neighbors to retrieve.\n         * @param {number} numCandidates - (Optional) The number of candidate neighbors to consider during the search.\n         * @throws {Error} If the number of candidates (numCandidates) is less than the number of neighbors (k).\n         */\n        constructor(field: string, k: number, numCandidates: number);\n\n        /**\n         * Sets the query vector for the KNN search, an array of numbers representing the reference point.\n         *\n         * @param {number[]} vector\n         */\n        queryVector(vector: number[]): this;\n\n        /**\n         * Sets the query vector builder for the k-NN search.\n         * This method configures a query vector builder using a specified model ID and model text.\n         * Note that either a direct query vector or a query vector builder can be provided, but not both.\n         *\n         * @param {string} modelId - The ID of the model used for generating the query vector.\n         * @param {string} modelText - The text input based on which the query vector is generated.\n         * @returns {KNN} Returns the instance of Knn for method chaining.\n         * @throws {Error} If both query_vector_builder and query_vector are provided.\n         */\n        queryVectorBuilder(modelId: string, modelText: string): this;\n\n        /**\n         * Adds one or more filter queries to the k-NN search.\n         * This method is designed to apply filters to the k-NN search. It accepts either a single\n         * query or an array of queries. Each query acts as a filter, refining the search results\n         * according to the specified conditions. These queries must be instances of the `Query` class.\n         *\n         * @param {Query|Query[]} queries - A single `Query` instance or an array of `Query` instances for filtering.\n         * @returns {KNN} Returns `this` to allow method chaining.\n         * @throws {TypeError} If any of the provided queries is not an instance of `Query`.\n         */\n        filter(queries: Query | Query[]): this;\n\n        /**\n         * Applies a boost factor to the query to influence the relevance score of returned documents.\n         *\n         * @param {number} boost\n         */\n        boost(boost: number): this;\n\n        /**\n         * Sets the similarity metric used in the KNN algorithm to calculate similarity.\n         *\n         * @param {number} similarity\n         */\n        similarity(similarity: number): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the `query`\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * Factory function to instantiate a new Knn object.\n     *\n     * @returns {KNN}\n     */\n    export function kNN(field: string, k: number, numCandidates: number): KNN;\n\n    /**\n     * Base class implementation for all aggregation types.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class should be extended and used, as validation against the class\n     * type is present in various places.\n     *\n     * @param {string} name\n     * @param {string} aggType Type of aggregation\n     * @throws {Error} if `name` is empty\n     * @throws {Error} if `aggType` is empty\n     */\n    class Aggregation {\n        constructor(name: string, aggType: string);\n\n        /**\n         * Sets nested aggregations.\n         * This method can be called multiple times in order to set multiple nested aggregations.\n         *\n         * @param {Aggregation} agg Any valid `Aggregation`\n         * @throws {TypeError} If `agg` is not an instance of `Aggregation`\n         */\n        aggregation(agg: Aggregation): this;\n\n        /**\n         * Sets nested aggregation.\n         * This method can be called multiple times in order to set multiple nested aggregations.\n         *\n         * @param {Aggregation} agg Any valid {@link Aggregation}\n         */\n        agg(agg: Aggregation): this;\n\n        /**\n         * Sets multiple aggregation items on the request body.\n         *\n         * @param {Array<Aggregation>} aggs Array of valid `Aggregation` items\n         * @throws {TypeError} If `aggs` is not an instance of `Array`\n         * @throws {TypeError} If `aggs` contains instances not of type `Aggregation`\n         */\n        aggregations(aggs: Aggregation[]): this;\n\n        /**\n         * Sets multiple aggregation items on the request body.\n         * Alias for method `aggregations`\n         *\n         * @param {Array<Aggregation>} aggs Array of valid `Aggregation` items\n         * @throws {TypeError} If `aggs` is not an instance of `Array`\n         * @throws {TypeError} If `aggs` contains instances not of type `Aggregation`\n         */\n        aggs(aggs: Aggregation[]): this;\n\n        /**\n         * You can associate a piece of metadata with individual aggregations at request time\n         * that will be returned in place at response time.\n         *\n         * @param {object} meta\n         */\n        meta(meta: object): this;\n\n        /**\n         * Build and returns DSL representation of the `Aggregation` class instance.\n         *\n         */\n        getDSL(): object;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the `aggregation` query.\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * The `MetricsAggregationBase` provides support for common options used across\n     * various metrics `Aggregation` implementations.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} name a valid aggregation name\n     * @param {string} aggType type of aggregation\n     * @param {string=} field The field to aggregate on\n     * @extends Aggregation\n     */\n    export class MetricsAggregationBase extends Aggregation {\n        constructor(name: string, aggType: string, field?: string);\n\n        /**\n         * Sets field to run aggregation on.\n         *\n         * @param {string} field a valid field name\n         */\n        field(field: string): this;\n\n        /**\n         * Sets script parameter for aggregation.\n         *\n         * @param {Script} script\n         * @throws {TypeError} If `script` is not an instance of `Script`\n         */\n        script(script: Script): this;\n\n        /**\n         * Sets the missing parameter which defines how documents\n         * that are missing a value should be treated.\n         *\n         * @param {string} value\n         */\n        missing(value: string): this;\n\n        /**\n         * Sets the format expression if applicable.\n         *\n         * @param {string} fmt Format mask to apply on aggregation response. Example: ####.00\n         */\n        format(fmt: string): this;\n    }\n\n    /**\n     * A single-value metrics aggregation that computes the average of numeric\n     * values that are extracted from the aggregated documents. These values can be\n     * extracted either from specific numeric fields in the documents, or be\n     * generated by a provided script.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends MetricsAggregationBase\n     */\n    export class AvgAggregation extends MetricsAggregationBase {\n        constructor(name: string, field?: string);\n    }\n\n    /**\n     * A single-value metrics aggregation that computes the average of numeric\n     * values that are extracted from the aggregated documents. These values can be\n     * extracted either from specific numeric fields in the documents, or be\n     * generated by a provided script.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function avgAggregation(\n        name: string,\n        field?: string\n    ): AvgAggregation;\n\n    /**\n     * A single-value metrics aggregation that computes the weighted average of numeric values that are extracted from the aggregated documents.\n     * These values can be extracted either from specific numeric fields in the documents.\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-weight-avg-aggregation.html)\n     *\n     * Added in Elasticsearch v6.4.0\n     * [Release notes](https://www.elastic.co/guide/en/elasticsearch/reference/6.4/release-notes-6.4.0.html)\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} value The field or script to be used as the value.\n     * @param {string | Script =} weight The field or script to be used as the weighting.\n     * @extends MetricsAggregationBase\n     */\n    export class WeightedAverageAggregation extends MetricsAggregationBase {\n        constructor(name: string, value?: string | Script, weight?: string | Script);\n\n        /**\n         * Sets the value\n         *\n         * @param {string | Script} value Field name or script to be used as the value\n         * @param {number=} missing Sets the missing parameter which defines how documents\n         * that are missing a value should be treated.\n         * @return {WeightedAverageAggregation} returns `this` so that calls can be chained\n         */\n        value(value: string | Script, missing?: number): WeightedAverageAggregation\n\n        /**\n         * Sets the weight\n         *\n         * @param {string | Script} weight Field name or script to be used as the weight\n         * @param {number=} missing Sets the missing parameter which defines how documents\n         * that are missing a value should be treated.\n         * @return {WeightedAverageAggregation} returns `this` so that calls can be chained\n         */\n        weight(weight: string | Script, missing?: number): WeightedAverageAggregation\n\n        /**\n        * @override\n        * @throws {Error} This method cannot be called on WeightedAverageAggregation\n        */\n        script(): never;\n\n        /**\n        * @override\n        * @throws {Error} This method cannot be called on WeightedAverageAggregation\n        */\n        missing(): never;\n\n        /**\n        * @override\n        * @throws {Error} This method cannot be called on WeightedAverageAggregation\n        */\n        field(): never;\n    }\n\n    /**\n     * A single-value metrics aggregation that computes the weighted average of numeric values that are extracted from the aggregated documents.\n     * These values can be extracted either from specific numeric fields in the documents.\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-weight-avg-aggregation.html)\n     *\n     * Added in Elasticsearch v6.4.0\n     * [Release notes](https://www.elastic.co/guide/en/elasticsearch/reference/6.4/release-notes-6.4.0.html)\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string | Script =} value The field or script to be used as the value.\n     * @param {string | Script =} weight The field or script to be used as the weighting.\n     */\n    export function weightedAverageAggregation(\n        name: string,\n        value?: string | Script,\n        weight?: string | Script\n    ): WeightedAverageAggregation;\n\n    /**\n     * A single-value metrics aggregation that calculates an approximate count of\n     * distinct values. Values can be extracted either from specific fields in the\n     * document or generated by a script.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends MetricsAggregationBase\n     */\n    export class CardinalityAggregation extends MetricsAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on CardinalityAggregation\n         */\n        format(): never;\n\n        /**\n         * The `precision_threshold` options allows to trade memory for accuracy,\n         * and defines a unique count below which counts are expected to be close to accurate.\n         *\n         * @param {number} threshold The threshold value.\n         * The maximum supported value is 40000, thresholds above this number\n         * will have the same effect as a threshold of 40000. The default values is 3000.\n         */\n        precisionThreshold(threshold: number): this;\n    }\n\n    /**\n     * A single-value metrics aggregation that calculates an approximate count of\n     * distinct values. Values can be extracted either from specific fields in the\n     * document or generated by a script.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function cardinalityAggregation(\n        name: string,\n        field?: string\n    ): CardinalityAggregation;\n\n    /**\n     * A multi-value metrics aggregation that computes stats over numeric values\n     * extracted from the aggregated documents. These values can be extracted either\n     * from specific numeric fields in the documents, or be generated by a provided\n     * script.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends MetricsAggregationBase\n     */\n    export class ExtendedStatsAggregation extends MetricsAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * Set sigma in the request for getting custom boundary.\n         * sigma controls how many standard deviations +/- from the mean should be displayed\n         *\n         * @param {number} sigma sigma can be any non-negative double,\n         * meaning you can request non-integer values such as 1.5.\n         * A value of 0 is valid, but will simply return the average for both upper and lower bounds.\n         */\n        sigma(sigma: number): this;\n    }\n\n    /**\n     * A multi-value metrics aggregation that computes stats over numeric values\n     * extracted from the aggregated documents. These values can be extracted either\n     * from specific numeric fields in the documents, or be generated by a provided\n     * script.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function extendedStatsAggregation(\n        name: string,\n        field?: string\n    ): ExtendedStatsAggregation;\n\n    /**\n     * A metric aggregation that computes the bounding box\n     * containing all geo_point values for a field.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends MetricsAggregationBase\n     */\n    export class GeoBoundsAggregation extends MetricsAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GeoBoundsAggregation\n         */\n        format(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GeoBoundsAggregation\n         */\n        script(): never;\n\n        /**\n         * @param {boolean} allowOverlap Optional parameter which specifies whether\n         * the bounding box should be allowed to overlap the international date line.\n         * The default value is true\n         */\n        wrapLongitude(allowOverlap: boolean): GeoBoundsAggregation;\n    }\n\n    /**\n     * A metric aggregation that computes the bounding box\n     * containing all geo_point values for a field.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function geoBoundsAggregation(\n        name: string,\n        field?: string\n    ): GeoBoundsAggregation;\n\n    /**\n     * A metric aggregation that computes the weighted centroid\n     * from all coordinate values for a Geo-point datatype field.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on. field must be a Geo-point datatype type\n     * @extends MetricsAggregationBase\n     */\n    export class GeoCentroidAggregation extends MetricsAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GeoCentroidAggregation\n         */\n        format(): never;\n    }\n\n    /**\n     * A metric aggregation that computes the weighted centroid\n     * from all coordinate values for a Geo-point datatype field.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on. field must be a Geo-point datatype type\n     */\n    export function geoCentroidAggregation(\n        name: string,\n        field?: string\n    ): GeoCentroidAggregation;\n\n    /**\n     * A single-value metrics aggregation that keeps track and returns the\n     * maximum value among the numeric values extracted from the aggregated\n     * documents. These values can be extracted either from specific numeric fields\n     * in the documents, or be generated by a provided script.\n     *\n     * Aggregation that keeps track and returns the maximum value among the\n     * numeric values extracted from the aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends MetricsAggregationBase\n     */\n    export class MaxAggregation extends MetricsAggregationBase {\n        constructor(name: string, field?: string);\n    }\n\n    /**\n     * A single-value metrics aggregation that keeps track and returns the\n     * maximum value among the numeric values extracted from the aggregated\n     * documents. These values can be extracted either from specific numeric fields\n     * in the documents, or be generated by a provided script.\n     *\n     * Aggregation that keeps track and returns the maximum value among the\n     * numeric values extracted from the aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function maxAggregation(\n        name: string,\n        field?: string\n    ): MaxAggregation;\n\n    /**\n     * A single-value metrics aggregation that keeps track and returns the\n     * minimum value among the numeric values extracted from the aggregated\n     * documents. These values can be extracted either from specific numeric fields\n     * in the documents, or be generated by a provided script.\n     *\n     * Aggregation that keeps track and returns the minimum value among numeric\n     * values extracted from the aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends MetricsAggregationBase\n     */\n    export class MinAggregation extends MetricsAggregationBase {\n        constructor(name: string, field?: string);\n    }\n\n    /**\n     * A single-value metrics aggregation that keeps track and returns the\n     * minimum value among the numeric values extracted from the aggregated\n     * documents. These values can be extracted either from specific numeric fields\n     * in the documents, or be generated by a provided script.\n     *\n     * Aggregation that keeps track and returns the minimum value among numeric\n     * values extracted from the aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function minAggregation(\n        name: string,\n        field?: string\n    ): MinAggregation;\n\n    /**\n     * A multi-value metrics aggregation that calculates one or more percentiles\n     * over numeric values extracted from the aggregated documents. These values can\n     * be extracted either from specific numeric fields in the documents, or be\n     * generated by a provided script.\n     *\n     * Aggregation that calculates one or more percentiles over numeric values\n     * extracted from the aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends MetricsAggregationBase\n     */\n    export class PercentilesAggregation extends MetricsAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * Enable the response to be returned as a keyed object where the key is the\n         * bucket interval.\n         *\n         * @param {boolean} keyed To enable keyed response or not. True by default\n         */\n        keyed(keyed: boolean): this;\n\n        /**\n         * Specifies the percents of interest.\n         * Requested percentiles must be a value between 0-100 inclusive\n         *\n         * @param {Array<number>} percents Parameter to specify particular percentiles to calculate\n         * @throws {TypeError} If `percents` is not an instance of Array\n         */\n        percents(percents: number[]): this;\n\n        /**\n         * Compression controls memory usage and approximation error. The compression\n         * value limits the maximum number of nodes to 100 * compression. By\n         * increasing the compression value, you can increase the accuracy of your\n         * percentiles at the cost of more memory. Larger compression values also make\n         * the algorithm slower since the underlying tree data structure grows in\n         * size, resulting in more expensive operations. The default compression\n         * value is 100.\n         *\n         * @param {number} compression Parameter to balance memory utilization with estimation accuracy.\n         */\n        tdigest(compression: number): this;\n\n        /**\n         * Compression controls memory usage and approximation error. The compression\n         * value limits the maximum number of nodes to 100 * compression. By\n         * increasing the compression value, you can increase the accuracy of your\n         * percentiles at the cost of more memory. Larger compression values also make\n         * the algorithm slower since the underlying tree data structure grows in\n         * size, resulting in more expensive operations. The default compression\n         * value is 100.\n         * Alias for `tdigest`\n         *\n         * @param {number} compression Parameter to balance memory utilization with estimation accuracy.\n         */\n        compression(compression: number): this;\n\n        /**\n         * HDR Histogram (High Dynamic Range Histogram) is an alternative implementation\n         * that can be useful when calculating percentiles for latency measurements\n         * as it can be faster than the t-digest implementation\n         * with the trade-off of a larger memory footprint.\n         * The HDR Histogram can be used by specifying the method parameter in the request.\n         *\n         * @param {number} numberOfSigDigits The resolution of values\n         * for the histogram in number of significant digits\n         */\n        hdr(numberOfSigDigits: number): this;\n    }\n\n    /**\n     * A multi-value metrics aggregation that calculates one or more percentiles\n     * over numeric values extracted from the aggregated documents. These values can\n     * be extracted either from specific numeric fields in the documents, or be\n     * generated by a provided script.\n     *\n     * Aggregation that calculates one or more percentiles over numeric values\n     * extracted from the aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function percentilesAggregation(\n        name: string,\n        field?: string\n    ): PercentilesAggregation;\n\n    /**\n     * A multi-value metrics aggregation that calculates one or more percentile ranks\n     * over numeric values extracted from the aggregated documents. These values can\n     * be extracted either from specific numeric fields in the documents, or be\n     * generated by a provided script.\n     *\n     * Aggregation that calculates one or more percentiles ranks over numeric values\n     * extracted from the aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on. It must be a numeric field\n     * @param {Array=} values Values to compute percentiles from.\n     * @throws {TypeError} If `values` is not an instance of Array\n     * @extends MetricsAggregationBase\n     */\n    export class PercentileRanksAggregation extends MetricsAggregationBase {\n        constructor(name: string, field?: string, values?: number[]);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on PercentileRanksAggregation\n         */\n        format(): never;\n\n        /**\n         * Enable the response to be returned as a keyed object where the key is the\n         * bucket interval.\n         *\n         * @param {boolean} keyed To enable keyed response or not.\n         */\n        keyed(keyed: boolean): this;\n\n        /**\n         * Specifies the values to compute percentiles from.\n         *\n         * @param {Array<number>} values Values to compute percentiles from.\n         * @throws {TypeError} If `values` is not an instance of Array\n         */\n        values(values: number[]): this;\n\n        /**\n         * Compression controls memory usage and approximation error. The compression\n         * value limits the maximum number of nodes to 100 * compression. By\n         * increasing the compression value, you can increase the accuracy of your\n         * percentiles at the cost of more memory. Larger compression values also make\n         * the algorithm slower since the underlying tree data structure grows in\n         * size, resulting in more expensive operations. The default compression\n         * value is 100.\n         *\n         * @param {number} compression Parameter to balance memory utilization with estimation accuracy.\n         */\n        tdigest(compression: number): this;\n\n        /**\n         * Compression controls memory usage and approximation error. The compression\n         * value limits the maximum number of nodes to 100 * compression. By\n         * increasing the compression value, you can increase the accuracy of your\n         * percentiles at the cost of more memory. Larger compression values also make\n         * the algorithm slower since the underlying tree data structure grows in\n         * size, resulting in more expensive operations. The default compression\n         * value is 100.\n         *\n         * Alias for `tdigest`\n         *\n         * @param {number} compression Parameter to balance memory utilization with estimation accuracy.\n         */\n        compression(compression: number): this;\n\n        /**\n         * HDR Histogram (High Dynamic Range Histogram) is an alternative implementation\n         * that can be useful when calculating percentiles for latency measurements\n         * as it can be faster than the t-digest implementation\n         * with the trade-off of a larger memory footprint.\n         * The HDR Histogram can be used by specifying the method parameter in the request.\n         *\n         * @param {number} numberOfSigDigits The resolution of values\n         * for the histogram in number of significant digits\n         */\n        hdr(numberOfSigDigits: number): this;\n    }\n\n    export function percentileRanksAggregation(\n        name: string,\n        field?: string,\n        values?: number[]\n    ): PercentileRanksAggregation;\n\n    /**\n     * A metric aggregation that executes using scripts to provide a metric output.\n     *\n     * Aggregation that keeps track and returns the minimum value among numeric\n     * values extracted from the aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @extends MetricsAggregationBase\n     */\n    export class ScriptedMetricAggregation extends MetricsAggregationBase {\n        constructor(name: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on ScriptedMetricAggregation\n         */\n        field(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on ScriptedMetricAggregation\n         */\n        script(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on ScriptedMetricAggregation\n         */\n        missing(): never;\n\n        /**\n         * Sets the initialization script.\n         * Executed prior to any collection of documents. Allows the aggregation to set up any initial state.\n         *\n         * @param {string|Script} initScript The initialization script. Can be a string or an Script instance\n         */\n        initScript(initScript: string | Script): this;\n\n        /**\n         * Sets the map script. This is the only required script.\n         * Executed once per document collected.\n         * If no combine_script is specified, the resulting state needs to be stored in an object named _agg.\n         *\n         * @param {string|Script} mapScript The map script. Can be a string or an Script instance\n         */\n        mapScript(mapScript: string | Script): this;\n\n        /**\n         * Sets the combine phase script.\n         * Executed once on each shard after document collection is complete.\n         * Allows the aggregation to consolidate the state returned from each shard.\n         * If a combine_script is not provided the combine phase will return the aggregation variable.\n         *\n         * @param {string|Script} combineScript The combine script. Can be a string or an Script instance\n         */\n        combineScript(combineScript: string | Script): this;\n\n        /**\n         * Sets the reduce phase script.\n         * Executed once on the coordinating node after all shards have returned their results.\n         * The script is provided with access to a variable _aggs\n         * which is an array of the result of the combine_script on each shard.\n         * If a reduce_script is not provided the reduce phase will return the _aggs variable.\n         *\n         * @param {string|Script} reduceScript The combine script. Can be a string or an Script instance\n         */\n        reduceScript(reduceScript: string | Script): this;\n\n        /**\n         * Sets the params for scripts.\n         * Optional object whose contents will be passed as variables to\n         * the init_script, map_script and combine_script\n         * If you specify script parameters then you must specify `\"_agg\": {}`.\n         *\n         * @param {object} params Object passed to init, map and combine script. Default value - `{ \"_agg\": {} }`\n         */\n        params(params: object): this;\n    }\n\n    /**\n     * A metric aggregation that executes using scripts to provide a metric output.\n     *\n     * Aggregation that keeps track and returns the minimum value among numeric\n     * values extracted from the aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     */\n    export function scriptedMetricAggregation(\n        name: string\n    ): ScriptedMetricAggregation;\n\n    /**\n     * A multi-value metrics aggregation that computes stats over numeric values\n     * extracted from the aggregated documents. These values can be extracted either\n     * from specific numeric fields in the documents, or be generated by a provided\n     * script.\n     * The stats that are returned consist of: min, max, sum, count and avg.\n     *\n     * Aggregation that computes stats over numeric values extracted from the\n     * aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends MetricsAggregationBase\n     */\n    export class StatsAggregation extends MetricsAggregationBase {\n        constructor(name: string, field?: string);\n    }\n\n    /**\n     * A multi-value metrics aggregation that computes stats over numeric values\n     * extracted from the aggregated documents. These values can be extracted either\n     * from specific numeric fields in the documents, or be generated by a provided\n     * script.\n     * The stats that are returned consist of: min, max, sum, count and avg.\n     *\n     * Aggregation that computes stats over numeric values extracted from the\n     * aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function statsAggregation(\n        name: string,\n        field?: string\n    ): StatsAggregation;\n\n    /**\n     * A single-value metrics aggregation that sums up numeric values that are\n     * extracted from the aggregated documents. These values can be extracted either\n     * from specific numeric fields in the documents, or be generated by a\n     * provided script.\n     *\n     * Aggregation that sums up numeric values that are extracted from the\n     * aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends MetricsAggregationBase\n     */\n    export class SumAggregation extends MetricsAggregationBase {\n        constructor(name: string, field?: string);\n    }\n\n    /**\n     * A single-value metrics aggregation that sums up numeric values that are\n     * extracted from the aggregated documents. These values can be extracted either\n     * from specific numeric fields in the documents, or be generated by a\n     * provided script.\n     *\n     * Aggregation that sums up numeric values that are extracted from the\n     * aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function sumAggregation(\n        name: string,\n        field?: string\n    ): SumAggregation;\n\n    /**\n     * A `top_hits` metric aggregator keeps track of the most relevant document being\n     * aggregated. This aggregator is intended to be used as a sub aggregator, so that\n     * the top matching documents can be aggregated per bucket.\n     *\n     * `top_hits` metric aggregator keeps track of the most relevant document being\n     * aggregated.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @extends MetricsAggregationBase\n     */\n    export class TopHitsAggregation extends MetricsAggregationBase {\n        constructor(name: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on TopHitsAggregation\n         */\n        field(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on TopHitsAggregation\n         */\n        script(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on TopHitsAggregation\n         */\n        missing(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on TopHitsAggregation\n         */\n        format(): never;\n\n        /**\n         * Sets the offset for fetching result.\n         *\n         * @param {number} from The offset from the first result you want to fetch.\n         */\n        from(from: number): this;\n\n        /**\n         * Sets the maximum number of top matching hits to return per bucket.\n         *\n         * @param {number} size The numer of aggregation entries to be returned per bucket.\n         */\n        size(size: number): this;\n\n        /**\n         * How the top matching hits should be sorted. Allows to add sort on specific field.\n         * The sort can be reversed as well. The sort is defined on a per field level,\n         * with special field name for `_score` to sort by score, and `_doc` to sort by\n         * index order.\n         *\n         * @param {Sort} sort How the top matching hits should be sorted.\n         * @throws {TypeError} If parameter `sort` is not an instance of `Sort`.\n         */\n        sort(sort: Sort): this;\n\n        /**\n         * Allows to add multiple sort on specific fields. Each sort can be reversed as well.\n         * The sort is defined on a per field level, with special field name for _score to\n         * sort by score, and _doc to sort by index order.\n         *\n         * @param {Array<Sort>} sorts Arry of sort How the top matching hits should be sorted.\n         * @throws {TypeError} If any item in parameter `sorts` is not an instance of `Sort`.\n         */\n        sorts(sorts: Sort[]): this;\n\n        /**\n         * Enables score computation and tracking during sorting.\n         * By default, sorting scores are not computed.\n         *\n         * @param {boolean} trackScores If scores should be computed and tracked. Defaults to false.\n         */\n        trackScores(trackScores: boolean): this;\n\n        /**\n         * Enable/Disable returning version number for each hit.\n         *\n         * @param {boolean} version true to enable, false to disable\n         */\n        version(version: boolean): this;\n\n        /**\n         * Enable/Disable explanation of score for each hit.\n         *\n         * @param {boolean} explain true to enable, false to disable\n         */\n        explain(explain: boolean): this;\n\n        /**\n         * Performs highlighting based on the `Highlight` settings.\n         *\n         * @param {Highlight} highlight\n         */\n        highlight(highlight: Highlight): this;\n\n        /**\n         * Allows to control how the `_source` field is returned with every hit.\n         * You can turn off `_source` retrieval by passing `false`.\n         * It also accepts one(string) or more wildcard(array) patterns to control\n         * what parts of the `_source` should be returned\n         * An object can also be used to specify the wildcard patterns for `includes` and `excludes`.\n         *\n         * @param {boolean|string|Array|Object} source\n         */\n        source(source: boolean | string | string[] | object): this;\n\n        /**\n         * The stored_fields parameter is about fields that are explicitly marked as stored in the mapping.\n         * Selectively load specific stored fields for each document represented by a search hit\n         * using array of stored fields.\n         * An empty array will cause only the _id and _type for each hit to be returned.\n         * To disable the stored fields (and metadata fields) entirely use: '_none_'\n         *\n         * @param {Array|string} fields\n         */\n        storedFields(fields: object | string): this;\n\n        /**\n         * Computes a document property dynamically based on the supplied `Script`.\n         *\n         * @param {string} scriptFieldName\n         * @param {string|Script} script string or instance of `Script`\n         */\n        scriptField(scriptFieldName: string, script: string | Script): this;\n\n        /**\n         * Sets given dynamic document properties to be computed using supplied `Script`s.\n         * Object should have `scriptFieldName` as key and `script` as the value.\n         *\n         * @param {object} scriptFields Object with `scriptFieldName` as key and `script` as the value.\n         */\n        scriptFields(scriptFields: object): this;\n\n        /**\n         * Allows to return the doc value representation of a field for each hit.\n         * Doc value fields can work on fields that are not stored.\n         *\n         * @param {Array<string>} fields\n         */\n        docvalueFields(fields: string[]): this;\n    }\n\n    /**\n     * A `top_hits` metric aggregator keeps track of the most relevant document being\n     * aggregated. This aggregator is intended to be used as a sub aggregator, so that\n     * the top matching documents can be aggregated per bucket.\n     *\n     * `top_hits` metric aggregator keeps track of the most relevant document being\n     * aggregated.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     */\n    export function topHitsAggregation(name: string): TopHitsAggregation;\n\n    /**\n     * A single-value metrics aggregation that counts the number of values that\n     * are extracted from the aggregated documents. These values can be extracted\n     * either from specific fields in the documents, or be generated by a provided\n     * script. Typically, this aggregator will be used in conjunction with other\n     * single-value aggregations.\n     *\n     * Aggregation that counts the number of values that are extracted from the\n     * aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends MetricsAggregationBase\n     */\n    export class ValueCountAggregation extends MetricsAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on ValueCountAggregation\n         */\n        format(): never;\n    }\n\n    /**\n     * A single-value metrics aggregation that counts the number of values that\n     * are extracted from the aggregated documents. These values can be extracted\n     * either from specific fields in the documents, or be generated by a provided\n     * script. Typically, this aggregator will be used in conjunction with other\n     * single-value aggregations.\n     *\n     * Aggregation that counts the number of values that are extracted from the\n     * aggregated documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function valueCountAggregation(\n        name: string,\n        field?: string\n    ): ValueCountAggregation;\n\n    /**\n     * The `BucketAggregationBase` provides support for common options used across\n     * various bucket `Aggregation` implementations.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} name a valid aggregation name\n     * @param {string} aggType type of aggregation\n     * @param {string=} field The field to aggregate on\n     * @extends Aggregation\n     */\n    export class BucketAggregationBase extends Aggregation {\n        constructor(name: string, aggType: string, field?: string);\n\n        /**\n         * Sets field to run aggregation on.\n         *\n         * @param {string} field a valid field name\n         */\n        field(field: string): this;\n\n        /**\n         * Sets script parameter for aggregation.\n         *\n         * @param {Script} script\n         * @throws {TypeError} If `script` is not an instance of `Script`\n         */\n        script(script: Script): this;\n    }\n\n    /**\n     * A bucket aggregation returning a form of adjacency matrix.\n     * The request provides a collection of named filter expressions,\n     * similar to the `filters` aggregation request. Each bucket in the response\n     * represents a non-empty cell in the matrix of intersecting filters.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     *\n     * @extends BucketAggregationBase\n     */\n    export class AdjacencyMatrixAggregation extends BucketAggregationBase {\n        constructor(name: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on AdjacencyMatrixAggregation\n         */\n        field(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on AdjacencyMatrixAggregation\n         */\n        script(): never;\n\n        /**\n         * Sets a named filter query.\n         *\n         * @param {string} filterName Name for the filter.\n         * @param {Query} filterQuery Query to filter on. Example - term query.\n         * @throws {TypeError} If `filterQuery` is not an instance of `Query`\n         */\n        filter(filterName: string, filterQuery: Query): this;\n\n        /**\n         * Assigns filters to already added filters.\n         * Does not mix with anonymous filters.\n         * If anonymous filters are present, they will be overwritten.\n         *\n         * @param {Object} filterQueries Object with multiple key value pairs\n         * where filter name is the key and filter query is the value.\n         * @throws {TypeError} If `filterQueries` is not an instance of object\n         */\n        filters(filterQueries: object): this;\n\n        /**\n         * Sets the `separator` parameter to use a separator string other than\n         * the default of the ampersand.\n         *\n         * @param {string} sep the string used to separate keys in intersections buckets\n         * e.g. & character for keyed filters A and B would return an\n         * intersection bucket named A&B\n         */\n        separator(sep: string): this;\n    }\n\n    /**\n     * A bucket aggregation returning a form of adjacency matrix.\n     * The request provides a collection of named filter expressions,\n     * similar to the `filters` aggregation request. Each bucket in the response\n     * represents a non-empty cell in the matrix of intersecting filters.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     *\n     * @extends BucketAggregationBase\n     */\n    export function adjacencyMatrixAggregation(\n        name: string\n    ): AdjacencyMatrixAggregation;\n\n    /**\n     * A special single bucket aggregation that enables aggregating\n     * from buckets on parent document types to buckets on child documents.\n     * This aggregation relies on the `_parent` field in the mapping.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @extends BucketAggregationBase\n     */\n    export class ChildrenAggregation extends BucketAggregationBase {\n        constructor(name: string);\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on ChildrenAggregation\n         */\n        field(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on ChildrenAggregation\n         */\n        script(): never;\n\n        /**\n         * Sets the child type/mapping for aggregation.\n         *\n         * @param {string} type The child type that the buckets in the parent space should be mapped to.\n         */\n        type(type: string): this;\n    }\n\n    /**\n     * A special single bucket aggregation that enables aggregating\n     * from buckets on parent document types to buckets on child documents.\n     * This aggregation relies on the `_parent` field in the mapping.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     */\n    export function childrenAggregation(name: string): ChildrenAggregation;\n\n    /**\n     * CompositeAggregation is a multi-bucket values source based aggregation that\n     * can be used to calculate unique composite values from source documents.\n     *\n     * Unlike the other multi-bucket aggregation the composite aggregation can be\n     * used to paginate **all** buckets from a multi-level aggregation efficiently.\n     * This aggregation provides a way to stream **all** buckets of a specific\n     * aggregation similarly to what scroll does for documents.\n     *\n     * NOTE: This query was added in elasticsearch v6.1.\n     *\n     * @param {string} name a valid aggregation name\n     *\n     * @extends Aggregation\n     */\n    export class CompositeAggregation extends Aggregation {\n        constructor(name: string);\n\n        /**\n         * Specifies the Composite Aggregation values sources to use in the\n         * aggregation.\n         *\n         * @param {...ValuesSourceBase} sources\n         * @throws {TypeError} If any of the rest parameters `sources` is not an\n         * instance of `ValuesSourceBase`\n         */\n        sources(...sources: CompositeAggregation.ValuesSourceBase[]): this;\n\n        /**\n         * Defines how many composite buckets should be returned. Each composite\n         * bucket is considered as a single bucket so setting a size of 10 will\n         * return the first 10 composite buckets created from the values source. The\n         * response contains the values for each composite bucket in an array\n         * containing the values extracted from each value source.\n         *\n         * @param {number} size\n         */\n        size(size: number): this;\n\n        /**\n         * The `after` parameter can be used to retrieve the composite buckets that\n         * are after the last composite buckets returned in a previous round.\n         *\n         * @param {Object} afterKey\n         */\n        after(afterKey: object): this;\n    }\n\n    /**\n     * CompositeAggregation is a multi-bucket values source based aggregation that\n     * can be used to calculate unique composite values from source documents.\n     *\n     * Unlike the other multi-bucket aggregation the composite aggregation can be\n     * used to paginate **all** buckets from a multi-level aggregation efficiently.\n     * This aggregation provides a way to stream **all** buckets of a specific\n     * aggregation similarly to what scroll does for documents.\n     *\n     * NOTE: This query was added in elasticsearch v6.1.\n     *\n     * @param {string} name a valid aggregation name\n     */\n    export function compositeAggregation(name: string): CompositeAggregation;\n\n    namespace CompositeAggregation {\n        /**\n         * Base class implementation for all Composite Aggregation values sources.\n         *\n         * **NOTE:** Instantiating this directly should not be required.\n         *\n         * @param {string} valueSrcType Type of value source.\n         * @param {string} refUrl Elasticsearch reference URL\n         * @param {string} name\n         * @param {string=} field The field to aggregate on\n         *\n         * @throws {Error} if `name` is empty\n         * @throws {Error} if `valueSrcType` is empty\n         */\n        class ValuesSourceBase {\n            constructor(\n                valueSrcType: string,\n                refUrl: string,\n                name: string,\n                field?: string\n            );\n\n            /**\n             * Field to use for this source.\n             *\n             * @param {string} field a valid field name\n             */\n            field(field: string): this;\n\n            /**\n             * Script to use for this source.\n             *\n             * @param {Script|Object|string} script\n             * @throws {TypeError} If `script` is not an instance of `Script`\n             */\n            script(script: Script | object | string): this;\n\n            /**\n             * Specifies the type of values produced by this source, e.g. `string` or\n             * `date`.\n             *\n             * @param {string} valueType\n             */\n            valueType(valueType: string): this;\n\n            /**\n             * Order specifies the order in the values produced by this source. It can\n             * be either `asc` or `desc`.\n             *\n             * @param {string} order The `order` option can have the following values.\n             * `asc`, `desc` to sort in ascending, descending order respectively..\n             */\n            order(order: 'asc' | 'desc'): this;\n\n            /**\n             * Missing specifies the value to use when the source finds a missing value\n             * in a document.\n             *\n             * Note: Thes option was deprecated in\n             * [Elasticsearch v6.0](https://www.elastic.co/guide/en/elasticsearch/reference/6.8/breaking-changes-6.0.html#_literal_missing_literal_is_deprecated_in_the_literal_composite_literal_aggregation).\n             * From 6.4 and later, use `missing_bucket` instead.\n             *\n             * @param {string|number} value\n             */\n            missing(value: string | number): this;\n\n            /**\n             * Specifies to include documents without a value for a given source in the\n             * response, or not. Defaults to `false` (not include).\n             *\n             * Note: This method is incompatible with elasticsearch 6.3 and older.\n             * Use it only with elasticsearch 6.4 and later.\n             *\n             * @param {boolean} value\n             */\n            missingBucket(value: boolean): this;\n\n            /**\n             * Override default `toJSON` to return DSL representation for the Composite\n             * Aggregation values source.\n             *\n             * @override\n             */\n            toJSON(): object;\n        }\n\n        /**\n         * `TermsValuesSource` is a source for the `CompositeAggregation` that handles\n         * terms. It works very similar to a terms aggregation with a slightly different\n         * syntax.\n         *\n         * @param {string} name\n         * @param {string=} field The field to aggregate on\n         *\n         * @extends ValuesSourceBase\n         */\n        export class TermsValuesSource extends ValuesSourceBase {\n            constructor(name: string, field?: string);\n        }\n\n        /**\n         * `TermsValuesSource` is a source for the `CompositeAggregation` that handles\n         * terms. It works very similar to a terms aggregation with a slightly different\n         * syntax.\n         *\n         * @param {string} name\n         * @param {string=} field The field to aggregate on\n         */\n        export function termsValuesSource(\n            name: string,\n            field?: string\n        ): TermsValuesSource;\n\n        /**\n         * `HistogramValuesSource` is a source for the `CompositeAggregation` that handles\n         * histograms. It works very similar to a histogram aggregation with a slightly\n         * different syntax.\n         *\n         * @param {string} name\n         * @param {string=} field The field to aggregate on\n         * @param {number=} interval Interval to generate histogram over.\n         *\n         * @extends ValuesSourceBase\n         */\n        export class HistogramValuesSource extends ValuesSourceBase {\n            constructor(name: string, field?: string, interval?: number);\n\n            /**\n             * Sets the histogram interval. Buckets are generated based on this interval value.\n             *\n             * @param {number} interval Interval to generate histogram over.\n             */\n            interval(interval: number): this;\n        }\n\n        /**\n         * `HistogramValuesSource` is a source for the `CompositeAggregation` that handles\n         * histograms. It works very similar to a histogram aggregation with a slightly\n         * different syntax.\n         *\n         * @param {string} name\n         * @param {string=} field The field to aggregate on\n         * @param {number=} interval Interval to generate histogram over.\n         */\n        export function histogramValuesSource(\n            name: string,\n            field?: string,\n            interval?: number\n        ): HistogramValuesSource;\n\n        /**\n         * `DateHistogramValuesSource` is a source for the `CompositeAggregation` that\n         * handles date histograms. It works very similar to a histogram aggregation\n         * with a slightly different syntax.\n         *\n         * @param {string} name\n         * @param {string=} field The field to aggregate on\n         * @param {string|number=} interval Interval to generate histogram over.\n         *\n         * @extends ValuesSourceBase\n         */\n        export class DateHistogramValuesSource extends ValuesSourceBase {\n            constructor(\n                name: string,\n                field?: string,\n                interval?: string | number\n            );\n\n            /**\n             * Sets the histogram interval. Buckets are generated based on this interval value.\n             *\n             * @param {string|number} interval Interval to generate histogram over.\n             */\n            interval(interval: string | number): this;\n\n            /**\n             * Calendar-aware intervals are configured with the calendarInterval parameter.\n             * The combined interval field for date histograms is deprecated from ES 7.2.\n             *\n             * @param {string} interval Interval to generate histogram over.\n             * You can specify calendar intervals using the unit name, such as month, or as\n             * a single unit quantity, such as 1M. For example, day and 1d are equivalent.\n             * Multiple quantities, such as 2d, are not supported.\n             */\n            calendarInterval(interval: string): this;\n\n            /**\n             * Fixed intervals are configured with the fixedInterval parameter.\n             * The combined interval field for date histograms is deprecated from ES 7.2.\n             *\n             * @param {string} interval Interval to generate histogram over.\n             * Intervals are a fixed number of SI units and never deviate, regardless\n             * of where they fall on the calendar. However, it means fixed intervals\n             * cannot express other units such as months, since the duration of a\n             * month is not a fixed quantity.\n             *\n             * The accepted units for fixed intervals are:\n             * millseconds (ms), seconds (s), minutes (m), hours (h) and days (d).\n             */\n            fixedInterval(interval: string): this;\n\n            /**\n             * Sets the date time zone\n             *\n             * Date-times are stored in Elasticsearch in UTC. By default, all bucketing\n             * and rounding is also done in UTC. The `time_zone` parameter can be used\n             * to indicate that bucketing should use a different time zone.\n             *\n             * @param {string} tz Time zone. Time zones may either be specified\n             * as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as a timezone id,\n             * an identifier used in the TZ database like America/Los_Angeles.\n             */\n            timeZone(tz: string): this;\n\n            /**\n             * Sets the format expression for `key_as_string` in response buckets.\n             * If no format is specified, then it will use the first format specified\n             * in the field mapping.\n             *\n             * @param {string} fmt Format mask to apply on aggregation response.\n             * For Date Histograms, supports expressive [date format pattern](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html#date-format-pattern)\n             */\n            format(fmt: string): this;\n        }\n\n        /**\n         * `DateHistogramValuesSource` is a source for the `CompositeAggregation` that\n         * handles date histograms. It works very similar to a histogram aggregation\n         * with a slightly different syntax.\n         *\n         * @param {string} name\n         * @param {string=} field The field to aggregate on\n         * @param {string|number=} interval Interval to generate histogram over.\n         */\n        export function dateHistogramValuesSource(\n            name: string,\n            field?: string,\n            interval?: string | number\n        ): DateHistogramValuesSource;\n    }\n\n    /**\n     * The `HistogramAggregationBase` provides support for common options used across\n     * various histogram `Aggregation` implementations like Histogram Aggregation,\n     * Date Histogram aggregation.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string} aggType Type of aggregation\n     * @param {string=} field The field to aggregate on\n     * @param {string|number=} interval Interval to generate histogram over.\n     * @extends BucketAggregationBase\n     */\n    export class HistogramAggregationBase extends BucketAggregationBase {\n        constructor(\n            name: string,\n            aggType: string,\n            field?: string,\n            interval?: string | number\n        );\n\n        /**\n         * Sets the histogram interval. Buckets are generated based on this interval value.\n         *\n         * @param {string} interval Interval to generate histogram over.\n         * For date histograms, available expressions for interval:\n         * year, quarter, month, week, day, hour, minute, second\n         */\n        interval(interval: string): this;\n\n        /**\n         * Sets the format expression for `key_as_string` in response buckets.\n         * If no format is specified, then it will use the first format specified in the field mapping.\n         *\n         * @param {string} fmt Format mask to apply on aggregation response. Example: ####.00.\n         * For Date Histograms, supports expressive date format pattern\n         */\n        format(fmt: string): this;\n\n        /**\n         * The offset parameter is used to change the start value of each bucket\n         * by the specified positive (+) or negative offset (-).\n         * Negative offset is not applicable on HistogramAggregation.\n         * In case of DateHistogramAggregation, duration can be\n         * a value such as 1h for an hour, or 1d for a day.\n         *\n         * @param {string} offset Time or bucket key offset for bucketing.\n         */\n        offset(offset: string): this;\n\n        /**\n         * Sets the ordering for buckets\n         *\n         * @param {string} key\n         * @param {string} direction `asc` or `desc`\n         */\n        order(key: string, direction?: 'asc' | 'desc'): this;\n\n        /**\n         * Sets the minimum number of matching documents in range to return the bucket.\n         *\n         * @param {number} minDocCnt Integer value for minimum number of documents\n         * required to return bucket in response\n         */\n        minDocCount(minDocCnt: number): this;\n\n        /**\n         * Set's the range/bounds for the histogram aggregation.\n         * Useful when you want to include buckets that might be\n         * outside the bounds of indexed documents.\n         *\n         * @param {number|string} min Start bound / minimum bound value\n         * For histogram aggregation, Integer value can be used.\n         * For Date histogram, date expression can be used.\n         * Available expressions for interval:\n         * year, quarter, month, week, day, hour, minute, second\n         * @param {number|string} max End bound / maximum bound value\n         * For histogram aggregation, Integer value can be used.\n         * For Date histogram, date expression can be used.\n         * Available expressions for interval:\n         * year, quarter, month, week, day, hour, minute, second\n         */\n        extendedBounds(min: number | string, max: number | string): this;\n\n        /**\n         * Set's the range/bounds for the histogram aggregation.\n         * Useful when you want to limit the range of buckets in the histogram.\n         * It is particularly useful in the case of open data ranges that can result in a very large number of buckets.\n         * NOTE: Only available in Elasticsearch v7.10.0+\n         *\n         * @example\n         * const agg = esb.histogramAggregation('prices', 'price', 50).hardBounds(0, 500);\n         *\n         * @param {number|string} min Start bound / minimum bound value\n         * For histogram aggregation, Integer value can be used.\n         * For Date histogram, date expression can be used.\n         * Available expressions for interval:\n         * year, quarter, month, week, day, hour, minute, second\n         * @param {number|string} max End bound / maximum bound value\n         * For histogram aggregation, Integer value can be used.\n         * For Date histogram, date expression can be used.\n         * Available expressions for interval:\n         * year, quarter, month, week, day, hour, minute, second\n         * @returns {HistogramAggregationBase} returns `this` so that calls can be chained\n         */\n        hardBounds(min: number | string, max: number | string): this;\n\n        /**\n         * Sets the missing parameter which defines how documents\n         * that are missing a value should be treated.\n         *\n         * @param {string} value\n         */\n        missing(value: string): this;\n\n        /**\n         * Enable the response to be returned as a keyed object where the key is the\n         * bucket interval.\n         *\n         * @param {boolean} keyed To enable keyed response or not.\n         */\n        keyed(keyed: boolean): this;\n    }\n\n    /**\n     * The `AutoDateHistogramAggregation` is similar to the Date histogram aggregation except\n     * instead of providing an interval to use as the width of each bucket, a target number\n     * of buckets is provided indicating the number of buckets needed and the interval of the\n     * buckets is automatically chosen to best achieve that target.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string} aggType Type of aggregation\n     * @param {string=} field The field to aggregate on\n     * @param {number=} buckets Bucket count to generate histogram over.\n     * @extends BucketAggregationBase\n     */\n    export class AutoDateHistogramAggregation extends BucketAggregationBase {\n        constructor(name: string, field?: string, buckets?: number);\n\n        /**\n         * Sets the bucket count. Buckets are generated based on this interval value.\n         *\n         * @param {number} buckets Bucket count to generate histogram over.\n         */\n        buckets(buckets: number): this;\n\n        /**\n         * Sets the format expression for `key_as_string` in response buckets.\n         * If no format is specified, then it will use the first format specified in the field mapping.\n         *\n         * @param {string} fmt Format mask to apply on aggregation response. Example: ####.00.\n         * For Date Histograms, supports expressive date format pattern\n         */\n        format(fmt: string): this;\n\n        /**\n         * Sets the missing parameter which defines how documents\n         * that are missing a value should be treated.\n         *\n         * @param {string} value\n         */\n        missing(value: string): this;\n\n        /**\n         * Sets the minimum rounding interval that should be used.\n         *\n         * @param {string} value\n         */\n        minimumInterval(value: string): this;\n\n        /**\n         * Date-times are stored in Elasticsearch in UTC.\n         * By default, all bucketing and rounding is also done in UTC.\n         * The `time_zone` parameter can be used to indicate that bucketing should use a different time zone.\n         * Sets the date time zone\n         *\n         * @param {string} tz Time zone. Time zones may either be specified\n         * as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as a timezone id,\n         * an identifier used in the TZ database like America/Los_Angeles.\n         */\n        timeZone(tz: string): this;\n    }\n\n    /**\n     * A multi-bucket aggregation similar to the histogram except it can only be applied on date values.\n     * The interval can be specified by date/time expressions.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @param {number=} buckets Bucket count to generate histogram over.\n     */\n    export function autoDateHistogramAggregation(\n        name: string,\n        field?: string,\n        buckets?: number\n    ): AutoDateHistogramAggregation;\n\n    /**\n     * A multi-bucket aggregation similar to Histogram, but the width of each bucket is not specified.\n     *\n     * NOTE: Only available in Elasticsearch v7.9.0+\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} [field] The field to aggregate on\n     * @param {number=} [buckets] Bucket count to generate histogram over.\n     * @extends BucketAggregationBase\n     */\n    export class VariableWidthHistogramAggregation extends BucketAggregationBase {\n        constructor(name: string, field?: string, buckets?: number);\n\n        /**\n         * Sets the histogram bucket count. Buckets are generated based on this value.\n         *\n         * @param {number} buckets Bucket count to generate histogram over.\n         * @returns {VariableWidthHistogramAggregation} returns `this` so that calls can be chained\n         */\n        buckets(buckets: number): this;\n    }\n\n    /**\n     * A multi-bucket aggregation similar to Histogram, but the width of each bucket is not specified.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} [field] The field to aggregate on\n     * @param {number=} [buckets] Bucket count to generate histogram over.\n     * @extends BucketAggregationBase\n     */\n    export function variableWidthHistogramAggregation(\n        name: string,\n        field?: string,\n        buckets?: number\n    ): VariableWidthHistogramAggregation;\n\n    /**\n     * A multi-bucket aggregation similar to the histogram except it can only be applied on date values.\n     * The interval can be specified by date/time expressions.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @param {string=} interval Interval to generate histogram over.\n     * Available expressions for interval: year, quarter, month, week, day, hour, minute, second\n     * @extends HistogramAggregationBase\n     */\n    export class DateHistogramAggregation extends HistogramAggregationBase {\n        constructor(name: string, field?: string, interval?: string);\n\n        /**\n         * Date-times are stored in Elasticsearch in UTC.\n         * By default, all bucketing and rounding is also done in UTC.\n         * The `time_zone` parameter can be used to indicate that bucketing should use a different time zone.\n         * Sets the date time zone\n         *\n         * @param {string} tz Time zone. Time zones may either be specified\n         * as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as a timezone id,\n         * an identifier used in the TZ database like America/Los_Angeles.\n         */\n        timeZone(tz: string): this;\n\n        /**\n         * Calendar-aware intervals are configured with the calendarInterval parameter.\n         * The combined interval field for date histograms is deprecated from ES 7.2.\n         *\n         * @param {string} interval Interval to generate histogram over.\n         * You can specify calendar intervals using the unit name, such as month, or as\n         * a single unit quantity, such as 1M. For example, day and 1d are equivalent.\n         * Multiple quantities, such as 2d, are not supported.\n         */\n        calendarInterval(interval: string): this;\n\n        /**\n         * Fixed intervals are configured with the fixedInterval parameter.\n         * The combined interval field for date histograms is deprecated from ES 7.2.\n         *\n         * @param {string} interval Interval to generate histogram over.\n         * Intervals are a fixed number of SI units and never deviate, regardless\n         * of where they fall on the calendar. However, it means fixed intervals\n         * cannot express other units such as months, since the duration of a\n         * month is not a fixed quantity.\n         *\n         * The accepted units for fixed intervals are:\n         * millseconds (ms), seconds (s), minutes (m), hours (h) and days (d).\n         */\n        fixedInterval(interval: string): this;\n    }\n\n    /**\n     * A multi-bucket aggregation similar to the histogram except it can only be applied on date values.\n     * The interval can be specified by date/time expressions.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @param {string=} interval Interval to generate histogram over.\n     * Available expressions for interval: year, quarter, month, week, day, hour, minute, second\n     */\n    export function dateHistogramAggregation(\n        name: string,\n        field?: string,\n        interval?: string\n    ): DateHistogramAggregation;\n\n    /**\n     * The `RangeAggregationBase` provides support for common options used across\n     * various range `Aggregation` implementations like Range Aggregation and\n     * Date Range aggregation.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string} aggType Type of aggregation\n     * @param {string=} field The field to aggregate on\n     * @extends BucketAggregationBase\n     */\n    export class RangeAggregationBase extends BucketAggregationBase {\n        constructor(name: string, aggType: string, field?: string);\n\n        /**\n         * Sets the format expression for `key_as_string` in response buckets.\n         * If no format is specified, then it will use the format specified in the field mapping.\n         *\n         * @param {string} fmt Supports expressive date format pattern for Date Histograms\n         */\n        format(fmt: string): this;\n\n        /**\n         * Adds a range to the list of existing range expressions.\n         *\n         * @param {object} range Range to aggregate over. Valid keys are `from`, `to` and `key`\n         * @throws {TypeError} If `range` is not an instance of object\n         * @throws {Error} If none of the required keys,\n         * `from`, `to` or `mask`(for IP range) is passed\n         */\n        range(range: object): this;\n\n        /**\n         * Adds the list of ranges to the list of existing range expressions.\n         *\n         * @param {Array<Object>} ranges Ranges to aggregate over.\n         * Each item must be an object with keys `from`, `to` and `key`.\n         * @throws {TypeError} If `ranges` is not an instance of an array or\n         * and item in the array is not an instance of object\n         * @throws {Error} If none of the required keys,\n         * `from`, `to` or `mask`(for IP range) is passed\n         */\n        ranges(ranges: object[]): this;\n\n        /**\n         * Sets the missing parameter ehich defines how documents\n         * that are missing a value should be treated.\n         *\n         * @param {string} value\n         */\n        missing(value: string): this;\n\n        /**\n         * Enable the response to be returned as a keyed object where the key is the\n         * bucket interval.\n         *\n         * @param {boolean} keyed To enable keyed response or not.\n         */\n        keyed(keyed: boolean): this;\n    }\n\n    /**\n     * A range aggregation that is dedicated for date values. The main difference\n     * between this aggregation and the normal range aggregation is that the from\n     * and to values can be expressed in Date Math expressions, and it is also\n     * possible to specify a date format by which the from and to response fields\n     * will be returned.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends RangeAggregationBase\n     */\n    export class DateRangeAggregation extends RangeAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * Sets the date time zone.\n         * Date-times are stored in Elasticsearch in UTC.\n         * By default, all bucketing and rounding is also done in UTC.\n         * The `time_zone` parameter can be used to indicate that\n         * bucketing should use a different time zone.\n         *\n         * @param {string} tz Time zone. Time zones may either be specified\n         * as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as a timezone id,\n         * an identifier used in the TZ database like America/Los_Angeles.\n         */\n        timeZone(tz: string): this;\n    }\n\n    /**\n     * A range aggregation that is dedicated for date values. The main difference\n     * between this aggregation and the normal range aggregation is that the from\n     * and to values can be expressed in Date Math expressions, and it is also\n     * possible to specify a date format by which the from and to response fields\n     * will be returned.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends RangeAggregationBase\n     */\n\n    export function dateRangeAggregation(\n        name: string,\n        field?: string\n    ): DateRangeAggregation;\n\n    /**\n     * A filtering aggregation used to limit any sub aggregations' processing\n     * to a sample of the top-scoring documents. Diversity settings\n     * are used to limit the number of matches that share a common value such as an \"author\".\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends BucketAggregationBase\n     */\n    export class DiversifiedSamplerAggregation extends BucketAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * The shard_size parameter limits how many top-scoring documents\n         * are collected in the sample processed on each shard. The default value is 100.\n         *\n         * @param {number} size Maximum number of documents to return from each shard(Integer)\n         */\n        shardSize(size: number): this;\n\n        /**\n         * Used to control the maximum number of documents collected\n         * on any one shard which share a common value.\n         * Applies on a per-shard basis only for the purposes of shard-local sampling.\n         *\n         * @param {number} maxDocsPerValue Default 1.(Integer)\n         */\n        maxDocsPerValue(maxDocsPerValue: number): this;\n\n        /**\n         * This setting can influence the management of the values used\n         * for de-duplication. Each option will hold up to shard_size\n         * values in memory while performing de-duplication but\n         * the type of value held can be controlled\n         *\n         * @param {string} hint the possible values are `map`, `global_ordinals`,\n         * `global_ordinals_hash` and `global_ordinals_low_cardinality`\n         * @throws {Error} If Execution Hint is outside the accepted set.\n         */\n        executionHint(\n            hint:\n                | 'map'\n                | 'global_ordinals'\n                | 'global_ordinals_hash'\n                | 'global_ordinals_low_cardinality'\n        ): this;\n    }\n\n    /**\n     * A filtering aggregation used to limit any sub aggregations' processing\n     * to a sample of the top-scoring documents. Diversity settings\n     * are used to limit the number of matches that share a common value such as an \"author\".\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function diversifiedSamplerAggregation(\n        name: string,\n        field?: string\n    ): DiversifiedSamplerAggregation;\n\n    /**\n     * Defines a single bucket of all the documents in the current document set\n     * context that match a specified filter. Often this will be used to narrow down\n     * the current aggregation context to a specific set of documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {Query=} filterQuery Query to filter on. Example - term query.\n     * @extends BucketAggregationBase\n     */\n    export class FilterAggregation extends BucketAggregationBase {\n        constructor(name: string, filterQuery?: Query);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on FilterAggregation\n         */\n        field(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on FilterAggregation\n         */\n        script(): never;\n\n        /**\n         * Set the filter query for Filter Aggregation.\n         *\n         * @param {Query} filterQuery Query to filter on. Example - term query.\n         * @throws {TypeError} If `filterQuery` is not an instance of `Query`\n         */\n        filter(filterQuery: Query): this;\n    }\n\n    /**\n     * Defines a single bucket of all the documents in the current document set\n     * context that match a specified filter. Often this will be used to narrow down\n     * the current aggregation context to a specific set of documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {Query=} filterQuery Query to filter on. Example - term query.\n     */\n    export function filterAggregation(\n        name: string,\n        filterQuery?: Query\n    ): FilterAggregation;\n\n    /**\n     * Defines a single bucket of all the documents in the current document set\n     * context that match a specified filter. Often this will be used to narrow down\n     * the current aggregation context to a specific set of documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @extends BucketAggregationBase\n     */\n    export class FiltersAggregation extends BucketAggregationBase {\n        constructor(name: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on FiltersAggregation\n         */\n        field(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on FiltersAggregation\n         */\n        script(): never;\n\n        /**\n         * Sets a named filter query.\n         * Does not mix with anonymous filters.\n         * If anonymous filters are present, they will be overwritten.\n         *\n         * @param {string} bucketName Name for bucket which will collect\n         * all documents that match its associated filter.\n         * @param {Query} filterQuery Query to filter on. Example - term query.\n         * @throws {TypeError} If `filterQuery` is not an instance of `Query`\n         */\n        filter(bucketName: string, filterQuery: Query): this;\n\n        /**\n         * Assigns filters to already added filters.\n         * Does not mix with anonymous filters.\n         * If anonymous filters are present, they will be overwritten.\n         *\n         * @param {object} filterQueries Object with multiple key value pairs\n         * where bucket name is the key and filter query is the value.\n         * @throws {TypeError} If `filterQueries` is not an instance of object\n         */\n        filters(filterQueries: object): this;\n\n        /**\n         * Appends an anonymous filter query.\n         * Does not mix with named filters.\n         * If named filters are present, they will be overwritten.\n         *\n         * @param {*} filterQuery Query to filter on. Example - term query.\n         * @throws {TypeError} If `filterQuery` is not an instance of `Query`\n         */\n        anonymousFilter(filterQuery: object): this;\n\n        /**\n         * Appends an array of anonymous filters.\n         * Does not mix with named filters.\n         * If named filters are present, they will be overwritten.\n         *\n         * @param {*} filterQueries Array of queries to filter on and generate buckets.\n         * Example - term query.\n         * @throws {TypeError} If `filterQueries` is not an instance of Array\n         */\n        anonymousFilters(filterQueries: object): this;\n\n        /**\n         * Adds a bucket to the response which will contain all documents\n         * that do not match any of the given filters.\n         * Returns the other bucket bucket either in a bucket\n         * (named `_other_` by default) if named filters are being used,\n         * or as the last bucket if anonymous filters are being used\n         *\n         * @param {boolean} enable `True` to return `other` bucket with documents\n         * that do not match any filters and `False` to disable computation\n         * @param {string=} otherBucketKey Optional key for the other bucket.\n         * Default is `_other_`.\n         */\n        otherBucket(enable: boolean, otherBucketKey?: string): this;\n\n        /**\n         * Sets the key for the other bucket to a value other than the default `_other_`.\n         * Setting this parameter will implicitly set the other_bucket parameter to true.\n         * If anonymous filters are being used, setting this parameter will not make sense.\n         *\n         * @param {string} otherBucketKey\n         */\n        otherBucketKey(otherBucketKey: string): this;\n    }\n\n    /**\n     * Defines a single bucket of all the documents in the current document set\n     * context that match a specified filter. Often this will be used to narrow down\n     * the current aggregation context to a specific set of documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     */\n    export function filtersAggregation(name: string): FiltersAggregation;\n\n    /**\n     * A multi-bucket aggregation that works on geo_point fields and conceptually\n     * works very similar to the range aggregation. The user can define a point of\n     * origin and a set of distance range buckets. The aggregation evaluate the\n     * distance of each document value from the origin point and determines the\n     * buckets it belongs to based on the ranges (a document belongs to a bucket\n     * if the distance between the document and the origin falls within the distance\n     * range of the bucket).\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends RangeAggregationBase\n     */\n    export class GeoDistanceAggregation extends RangeAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GeoDistanceAggregation\n         */\n        format(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GeoDistanceAggregation\n         */\n        script(): never;\n\n        /**\n         * Sets the point of origin from where distances will be measured.\n         *\n         * @param {GeoPoint} point A valid `GeoPoint` object.\n         * @throws {TypeError} If `point` is not an instance of `GeoPoint`\n         */\n        origin(point: GeoPoint): this;\n\n        /**\n         * Sets the distance unit.  Valid values are:\n         * mi (miles), in (inches), yd (yards),\n         * km (kilometers), cm (centimeters), mm (millimeters),\n         * ft(feet), NM(nauticalmiles)\n         *\n         * @param {string} unit Distance unit, default is `m`(meters).\n         * @throws {Error} If Unit is outside the accepted set.\n         */\n        unit(unit: string): this;\n\n        /**\n         * Sets the distance calculation mode, `arc` or `plane`.\n         * The `arc` calculation is the more accurate.\n         * The `plane` is the faster but least accurate.\n         *\n         * @param {string} type\n         * @throws {Error} If `type` is neither `plane` nor `arc`.\n         */\n        distanceType(type: 'arc' | 'plane'): this;\n    }\n\n    /**\n     * A multi-bucket aggregation that works on geo_point fields and conceptually\n     * works very similar to the range aggregation. The user can define a point of\n     * origin and a set of distance range buckets. The aggregation evaluate the\n     * distance of each document value from the origin point and determines the\n     * buckets it belongs to based on the ranges (a document belongs to a bucket\n     * if the distance between the document and the origin falls within the distance\n     * range of the bucket).\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function geoDistanceAggregation(\n        name: string,\n        field?: string\n    ): GeoDistanceAggregation;\n\n    /**\n     * A multi-bucket aggregation that works on geo_point fields and groups points\n     * into buckets that represent cells in a grid. The resulting grid can be sparse\n     * and only contains cells that have matching data. Each cell is labeled using a\n     * geohash which is of user-definable precision.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends BucketAggregationBase\n     */\n    export class GeoHashGridAggregation extends BucketAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GeoHashGridAggregation\n         */\n        format(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GeoHashGridAggregation\n         */\n        script(): never;\n\n        /**\n         * Sets the precision for the generated geohash.\n         *\n         * @param {number} precision Precision can be between 1 and 12\n         * @throws {Error} If precision is not between 1 and 12.\n         */\n        precision(precision: number): this;\n\n        /**\n         * Sets the maximum number of geohash buckets to return.\n         * When results are trimmed, buckets are prioritised\n         * based on the volumes of documents they contain.\n         *\n         * @param {number} size Optional. The maximum number of geohash\n         * buckets to return (defaults to 10,000).\n         */\n        size(size: number): this;\n\n        /**\n         * Determines how many geohash_grid the coordinating node\n         * will request from each shard.\n         *\n         * @param {number} shardSize Optional.\n         */\n        shardSize(shardSize: number): this;\n    }\n\n    /**\n     * A multi-bucket aggregation that works on geo_point fields and groups points\n     * into buckets that represent cells in a grid. The resulting grid can be sparse\n     * and only contains cells that have matching data. Each cell is labeled using a\n     * geohash which is of user-definable precision.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function geoHashGridAggregation(\n        name: string,\n        field?: string\n    ): GeoHashGridAggregation;\n\n    /**\n     * A multi-bucket aggregation that groups geo_point and geo_shape values into buckets\n     * that represent a grid. The resulting grid can be sparse and only contains cells\n     * that have matching data. Each cell corresponds to a H3 cell index and is labeled\n     * using the H3Index representation.\n     *\n     * NOTE: This aggregation was added in elasticsearch v8.1.0.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends BucketAggregationBase\n     */\n    export class GeoHexGridAggregation extends BucketAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GeoHexGridAggregation\n         */\n        format(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GeoHexGridAggregation\n         */\n        script(): never;\n\n        /**\n         * Sets the precision for the generated geohex.\n         *\n         * @param {number} precision Precision can be between 0 and 15\n         * @throws {Error} If precision is not between 0 and 15.\n         */\n        precision(precision: number): this;\n\n        /**\n         * Sets the maximum number of geohex buckets to return.\n         * When results are trimmed, buckets are prioritised\n         * based on the volumes of documents they contain.\n         *\n         * @param {number} size Optional. The maximum number of geohex\n         * buckets to return (defaults to 10,000).\n         */\n        size(size: number): this;\n\n        /**\n         * Determines how many geohex_grid the coordinating node\n         * will request from each shard.\n         *\n         * @param {number} shardSize Optional.\n         */\n        shardSize(shardSize: number): this;\n    }\n\n    /**\n     * A multi-bucket aggregation that groups geo_point and geo_shape values into buckets\n     * that represent a grid. The resulting grid can be sparse and only contains cells\n     * that have matching data. Each cell corresponds to a H3 cell index and is labeled\n     * using the H3Index representation.\n     *\n     * NOTE: This aggregation was added in elasticsearch v8.1.0.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function geoHexGridAggregation(\n        name: string,\n        field?: string\n    ): GeoHexGridAggregation;\n\n    /**\n     * A multi-bucket aggregation that works on geo_point fields and groups points\n     * into buckets that represent cells in a grid. The resulting grid can be sparse\n     * and only contains cells that have matching data. Each cell corresponds to a\n     * map tile as used by many online map sites. Each cell is labeled using a\n     * \"{zoom}/{x}/{y}\" format, where zoom is equal to the user-specified precision.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends BucketAggregationBase\n     */\n    export class GeoTileGridAggregation extends BucketAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GeoTileGridAggregation\n         */\n        format(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GeoTileGridAggregation\n         */\n        script(): never;\n\n        /**\n         * Sets the precision for the generated geotile.\n         *\n         * @param {number} precision Precision can be between 0 and 29\n         * @throws {Error} If precision is not between 0 and 29.\n         */\n        precision(precision: number): this;\n\n        /**\n         * Sets the maximum number of geotile buckets to return.\n         * When results are trimmed, buckets are prioritised\n         * based on the volumes of documents they contain.\n         *\n         * @param {number} size Optional. The maximum number of geotile\n         * buckets to return (defaults to 10,000).\n         */\n        size(size: number): this;\n\n        /**\n         * Determines how many geotile_grid the coordinating node\n         * will request from each shard.\n         *\n         * @param {number} shardSize Optional.\n         */\n        shardSize(shardSize: number): this;\n\n        /**\n         * Sets the top left coordinate for the bounding box used to filter the\n         * points in the bucket.\n         *\n         * @param {GeoPoint} point A valid `GeoPoint`\n         */\n        topLeft(point: GeoPoint): this;\n\n        /**\n         * Sets the bottom right coordinate for the bounding box used to filter the\n         * points in the bucket.\n         *\n         * @param {GeoPoint} point A valid `GeoPoint`\n         */\n        bottomRight(point: GeoPoint): this;\n\n        /**\n         * Sets the top right coordinate for the bounding box used to filter the\n         * points in the bucket.\n         *\n         * @param {GeoPoint} point A valid `GeoPoint`\n         */\n        topRight(point: GeoPoint): this;\n\n        /**\n         * Sets the bottom left coordinate for the bounding box used to filter the\n         * points in the bucket.\n         *\n         * @param {GeoPoint} point A valid `GeoPoint`\n         */\n        bottomLeft(point: GeoPoint): this;\n\n        /**\n         * Sets value for top of the bounding box.\n         *\n         * @param {number} val\n         */\n        top(val: number): this;\n\n        /**\n         * Sets value for left of the bounding box.\n         *\n         * @param {number} val\n         */\n        left(val: number): this;\n\n        /**\n         * Sets value for bottom of the bounding box.\n         *\n         * @param {number} val\n         */\n        bottom(val: number): this;\n\n        /**\n         * Sets value for right of the bounding box.\n         *\n         * @param {number} val\n         */\n        right(val: number): this;\n    }\n\n    /**\n     * A multi-bucket aggregation that works on geo_point fields and groups points\n     * into buckets that represent cells in a grid. The resulting grid can be sparse\n     * and only contains cells that have matching data. Each cell corresponds to a\n     * map tile as used by many online map sites. Each cell is labeled using a\n     * \"{zoom}/{x}/{y}\" format, where zoom is equal to the user-specified precision.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function geoTileGridAggregation(\n        name: string,\n        field?: string\n    ): GeoTileGridAggregation;\n\n    /**\n     * Defines a single bucket of all the documents within the search execution\n     * context. This context is defined by the indices and the document types you’re\n     * searching on, but is not influenced by the search query itself.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @extends BucketAggregationBase\n     */\n    export class GlobalAggregation extends BucketAggregationBase {\n        constructor(name: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GlobalAggregation\n         */\n        field(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on GlobalAggregation\n         */\n        script(): never;\n    }\n\n    /**\n     * Defines a single bucket of all the documents within the search execution\n     * context. This context is defined by the indices and the document types you’re\n     * searching on, but is not influenced by the search query itself.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     */\n    export function globalAggregation(name: string): GlobalAggregation;\n\n    /**\n     * A multi-bucket values source based aggregation that can be applied on\n     * numeric values extracted from the documents. It dynamically builds fixed\n     * size (a.k.a. interval) buckets over the values.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @param {number=} interval Interval to generate histogram over.\n     * @extends HistogramAggregationBase\n     */\n    export class HistogramAggregation extends HistogramAggregationBase {\n        constructor(name: string, field?: string, interval?: number);\n    }\n\n    /**\n     * A multi-bucket values source based aggregation that can be applied on\n     * numeric values extracted from the documents. It dynamically builds fixed\n     * size (a.k.a. interval) buckets over the values.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @param {number=} interval Interval to generate histogram over.\n     */\n    export function histogramAggregation(\n        name: string,\n        field?: string,\n        interval?: number\n    ): HistogramAggregation;\n\n    /**\n     * Dedicated range aggregation for IP typed fields.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends RangeAggregationBase\n     */\n    export class IpRangeAggregation extends RangeAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on IpRangeAggregation\n         */\n        format(): never;\n    }\n\n    /**\n     * Dedicated range aggregation for IP typed fields.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function ipRangeAggregation(\n        name: string,\n        field?: string\n    ): IpRangeAggregation;\n\n    /**\n     * A field data based single bucket aggregation, that creates a bucket of all\n     * documents in the current document set context that are missing a field value\n     * (effectively, missing a field or having the configured NULL value set).\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends BucketAggregationBase\n     */\n    export class MissingAggregation extends BucketAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on MissingAggregation\n         */\n        script(): never;\n    }\n\n    /**\n     * A field data based single bucket aggregation, that creates a bucket of all\n     * documents in the current document set context that are missing a field value\n     * (effectively, missing a field or having the configured NULL value set).\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function missingAggregation(\n        name: string,\n        field?: string\n    ): MissingAggregation;\n\n    /**\n     * A special single bucket aggregation that enables aggregating nested\n     * documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} path `path` of the nested document\n     * @extends BucketAggregationBase\n     */\n    export class NestedAggregation extends BucketAggregationBase {\n        constructor(name: string, path?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on NestedAggregation\n         */\n        field(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on NestedAggregation\n         */\n        script(): never;\n\n        /**\n         * Sets the nested path\n         *\n         * @param {string} path `path` of the nested document\n         */\n        path(path: string): this;\n    }\n\n    /**\n     * A special single bucket aggregation that enables aggregating nested\n     * documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} path `path` of the nested document\n     */\n    export function nestedAggregation(\n        name: string,\n        path?: string\n    ): NestedAggregation;\n\n    /**\n     * A special single bucket aggregation that enables aggregating\n     * from buckets on child document types to buckets on parent documents.\n     * This aggregation relies on the `_parent` field in the mapping.\n     *\n     * NOTE: This query was added in elasticsearch v6.6.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @extends BucketAggregationBase\n     */\n    export class ParentAggregation extends BucketAggregationBase {\n        constructor(name: string);\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on ParentAggregation\n         */\n        field(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on ParentAggregation\n         */\n        script(): never;\n\n        /**\n         * Sets the child type/mapping for aggregation.\n         *\n         * @param {string} type The child type that the buckets in the parent space should be mapped to.\n         */\n        type(type: string): this;\n    }\n\n    /**\n     * A special single bucket aggregation that enables aggregating\n     * from buckets on child document types to buckets on parent documents.\n     * This aggregation relies on the `_parent` field in the mapping.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     */\n    export function parentAggregation(name: string): ParentAggregation;\n\n    /**\n     * A multi-bucket value source based aggregation that enables the user to\n     * define a set of ranges - each representing a bucket. During the aggregation\n     * process, the values extracted from each document will be checked against each\n     * bucket range and \"bucket\" the relevant/matching document.\n     * Note that this aggregration includes the from value and excludes the to\n     * value for each range.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends RangeAggregationBase\n     */\n    export class RangeAggregation extends RangeAggregationBase {\n        constructor(name: string, field?: string);\n    }\n\n    /**\n     * A multi-bucket value source based aggregation that enables the user to\n     * define a set of ranges - each representing a bucket. During the aggregation\n     * process, the values extracted from each document will be checked against each\n     * bucket range and \"bucket\" the relevant/matching document.\n     * Note that this aggregration includes the from value and excludes the to\n     * value for each range.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function rangeAggregation(\n        name: string,\n        field?: string\n    ): RangeAggregation;\n\n    /**\n     * A multi-bucket value source based aggregation which finds\n     * \"rare\" terms — terms that are at the long-tail of the\n     * distribution and are not frequent. Conceptually, this is like\n     * a terms aggregation that is sorted by `_count` ascending.\n     * As noted in the terms aggregation docs, actually ordering\n     * a `terms` agg by count ascending has unbounded error.\n     * Instead, you should use the `rare_terms` aggregation\n     *\n     * NOTE: Only available in Elasticsearch 7.3.0+.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string} field The field we wish to find rare terms in\n     * @extends BucketAggregationBase\n     */\n    export class RareTermsAggregation extends BucketAggregationBase {\n        constructor(name: string, field: string);\n\n        /**\n         * Sets the maximum number of documents a term should appear in.\n         *\n         * @param {number} maxDocCnt Integer value for maximum number of documents a term should appear in.\n         * Max doc count can be between 1 and 100.\n         * @returns {RareTermsAggregation} returns `this` so that calls can be chained\n         */\n        maxDocCount(maxDocCnt: number): this;\n\n        /**\n         * Sets the precision of the internal CuckooFilters. Smaller precision\n         * leads to better approximation, but higher memory usage.\n         * Cannot be smaller than 0.00001\n         *\n         * @param {number} precision Float value for precision of the internal CuckooFilters. Default is 0.01\n         * @returns {RareTermsAggregation} returns `this` so that calls can be chained\n         */\n        precision(precision: number): this;\n\n        /**\n         * Sets terms that should be included in the aggregation\n         *\n         * @param {string} include Regular expression that will determine what values\n         * are \"allowed\" to be aggregated\n         * @returns {RareTermsAggregation} returns `this` so that calls can be chained\n         */\n        include(include: string): this;\n\n        /**\n         * Sets terms that should be excluded from the aggregation\n         *\n         * @param {string} exclude Regular expression that will determine what values\n         * should not be aggregated\n         * @returns {RareTermsAggregation} returns `this` so that calls can be chained\n         */\n        exclude(exclude: string): this;\n\n        /**\n         * Sets the missing parameter which defines how documents\n         * that are missing a value should be treated.\n         *\n         * @param {string} value\n         * @returns {RareTermsAggregation} returns `this` so that calls can be chained\n         */\n        missing(value: string): this;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on RareTermsAggregation\n         */\n        script(): never;\n    }\n\n    /**\n     * A multi-bucket value source based aggregation which finds\n     * \"rare\" terms — terms that are at the long-tail of the\n     * distribution and are not frequent. Conceptually, this is like\n     * a terms aggregation that is sorted by `_count` ascending.\n     * As noted in the terms aggregation docs, actually ordering\n     * a `terms` agg by count ascending has unbounded error.\n     * Instead, you should use the `rare_terms` aggregation\n     *\n     * NOTE: Only available in Elasticsearch 7.3.0+.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string} field The field we wish to find rare terms in\n     */\n    export function rareTermsAggregation(\n        name: string,\n        field: string\n    ): RareTermsAggregation;\n\n    /**\n     * A special single bucket aggregation that enables aggregating\n     * on parent docs from nested documents. Effectively this\n     * aggregation can break out of the nested block structure and\n     * link to other nested structures or the root document,\n     * which allows nesting other aggregations that aren’t part of\n     * the nested object in a nested aggregation.\n     * The `reverse_nested` aggregation must be defined inside a nested aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} path Defines to what nested object field should be joined back.\n     * The default is empty, which means that it joins back to the root / main document\n     * level.\n     * @extends BucketAggregationBase\n     */\n    export class ReverseNestedAggregation extends BucketAggregationBase {\n        constructor(name: string, path?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on ReverseNestedAggregation\n         */\n        field(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on ReverseNestedAggregation\n         */\n        script(): never;\n\n        /**\n         * Sets the level to join back for subsequent aggregations in a multiple\n         * layered nested object types\n         *\n         * @param {string} path Defines to what nested object field should be joined back.\n         * The default is empty, which means that it joins back to the root / main document\n         * level.\n         */\n        path(path: string): this;\n    }\n\n    /**\n     * A special single bucket aggregation that enables aggregating\n     * on parent docs from nested documents. Effectively this\n     * aggregation can break out of the nested block structure and\n     * link to other nested structures or the root document,\n     * which allows nesting other aggregations that aren’t part of\n     * the nested object in a nested aggregation.\n     * The `reverse_nested` aggregation must be defined inside a nested aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} path Defines to what nested object field should be joined back.\n     * The default is empty, which means that it joins back to the root / main document\n     * level.\n     */\n    export function reverseNestedAggregation(\n        name: string,\n        path?: string\n    ): ReverseNestedAggregation;\n\n    /**\n     * A filtering aggregation used to limit any sub aggregations'\n     * processing to a sample of the top-scoring documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends BucketAggregationBase\n     */\n    export class SamplerAggregation extends BucketAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on SamplerAggregation\n         */\n        field(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on SamplerAggregation\n         */\n        script(): never;\n\n        /**\n         * The shard_size parameter limits how many top-scoring documents\n         * are collected in the sample processed on each shard. The default value is 100.\n         *\n         * @param {number} size Maximum number of documents to return from each shard(Integer)\n         */\n        shardSize(size: number): this;\n    }\n\n    /**\n     * A filtering aggregation used to limit any sub aggregations'\n     * processing to a sample of the top-scoring documents.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function samplerAggregation(\n        name: string,\n        field?: string\n    ): SamplerAggregation;\n\n    /**\n     * The `TermsAggregationBase` provides support for common options used across\n     * various terms `Aggregation` implementations like Significant terms and\n     * Terms aggregation.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string} aggType Type of aggregation\n     * @param {string} refUrl Elasticsearch reference URL.\n     * @param {string=} field The field to aggregate on\n     * @extends BucketAggregationBase\n     */\n    export class TermsAggregationBase extends BucketAggregationBase {\n        constructor(\n            name: string,\n            aggType: string,\n            refUrl: string,\n            field?: string\n        );\n\n        /**\n         * Sets the format expression for `key_as_string` in response buckets.\n         * If no format is specified, then it will use the first format specified in the field mapping.\n         *\n         * @param {string} fmt Format mask to apply on aggregation response. Example: ####.00.\n         */\n        format(fmt: string): this;\n\n        /**\n         * Sets the minimum number of matching hits required to return the terms.\n         *\n         * @param {number} minDocCnt Integer value for minimum number of documents\n         * required to return bucket in response\n         */\n        minDocCount(minDocCnt: number): this;\n\n        /**\n         * Sets the parameter which regulates the _certainty_ a shard has if the term\n         * should actually be added to the candidate list or not with respect to\n         * the `min_doc_count`.\n         * Terms will only be considered if their local shard frequency within\n         * the set is higher than the `shard_min_doc_count`.\n         *\n         * @param {number} minDocCnt Sets the `shard_min_doc_count` parameter. Default is 1\n         * and has no effect unless you explicitly set it.\n         */\n        shardMinDocCount(minDocCnt: number): this;\n\n        /**\n         * Defines how many term buckets should be returned out of the overall terms list.\n         *\n         * @param {number} size\n         */\n        size(size: number): this;\n\n        /**\n         * Sets the `shard_size` parameter to control the volumes of candidate terms\n         * produced by each shard. For the default, -1, shard_size will be automatically\n         * estimated based on the number of shards and the size parameter.\n         * `shard_size` cannot be smaller than size (as it doesn’t make much sense).\n         * When it is, elasticsearch will override it and reset it to be equal to size.\n         *\n         * @param {number} size\n         */\n        shardSize(size: number): this;\n\n        /**\n         * Sets the missing parameter which defines how documents\n         * that are missing a value should be treated.\n         *\n         * @param {string} value\n         */\n        missing(value: string): this;\n\n        /**\n         * Filter the values for which buckets will be created.\n         *\n         * @param {RegExp|Array|string} clause Determine what values are \"allowed\" to be aggregated\n         */\n        include(clause: object | string[] | string): this;\n\n        /**\n         * Filter the values for which buckets will be created.\n         *\n         * @param {RegExp|Array|string} clause Determine the values that should not be aggregated\n         */\n        exclude(clause: object | string[] | string): this;\n\n        /**\n         * This setting can influence the management of the values used\n         * for de-duplication. Each option will hold up to shard_size\n         * values in memory while performing de-duplication but\n         * the type of value held can be controlled\n         *\n         * @param {string} hint the possible values are `map`, `global_ordinals`,\n         * `global_ordinals_hash` and `global_ordinals_low_cardinality`\n         * @throws {Error} If Execution Hint is outside the accepted set.\n         */\n        executionHint(\n            hint:\n                | 'map'\n                | 'global_ordinals'\n                | 'global_ordinals_hash'\n                | 'global_ordinals_low_cardinality'\n        ): this;\n    }\n\n    /**\n     * The `SignificantAggregationBase` provides support for common options used\n     * in `SignificantTermsAggregation` and `SignificantTextAggregation`.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @extends TermsAggregationBase\n     */\n    export class SignificantAggregationBase extends TermsAggregationBase {\n        /**\n         * Use JLH score as significance score.\n         */\n        jlh(): this;\n\n        /**\n         * Use `mutual_information` as significance score\n         *\n         * @param {boolean=} includeNegatives Default `true`. If set to `false`,\n         * filters out the terms that appear less often in the subset than in\n         * documents outside the subset\n         * @param {boolean=} backgroundIsSuperset `true`(default) if the documents in the bucket\n         * are also contained in the background. If instead you defined a custom background filter\n         * that represents a different set of documents that you want to compare to, pass `false`\n         */\n        mutualInformation(\n            includeNegatives?: boolean,\n            backgroundIsSuperset?: boolean\n        ): this;\n\n        /**\n         * Use `chi_square` as significance score\n         *\n         * @param {boolean} includeNegatives Default `true`. If set to `false`,\n         * filters out the terms that appear less often in the subset than in\n         * documents outside the subset\n         * @param {boolean} backgroundIsSuperset `true`(default) if the documents in the bucket\n         * are also contained in the background. If instead you defined a custom background filter\n         * that represents a different set of documents that you want to compare to, pass `false`\n         */\n        chiSquare(\n            includeNegatives?: boolean,\n            backgroundIsSuperset?: boolean\n        ): this;\n\n        /**\n         * Sets `gnd`, google normalized score to be used as significance score.\n         *\n         * @param {boolean} backgroundIsSuperset `true`(default) if the documents in the bucket\n         * are also contained in the background. If instead you defined a custom background filter\n         * that represents a different set of documents that you want to compare to, pass `false`\n         */\n        gnd(backgroundIsSuperset?: boolean): this;\n\n        /**\n         * Use a simple calculation of the number of documents in the foreground sample with a term\n         * divided by the number of documents in the background with the term. By default this\n         * produces a score greater than zero and less than one.\n         */\n        percentage(): this;\n\n        /**\n         * Sets script for customized score calculation.\n         *\n         * @param {Script} script\n         */\n        scriptHeuristic(script: Script): this;\n\n        /**\n         * Sets the `background_filter` to narrow the scope of statistical information\n         * for background term frequencies instead of using the entire index.\n         *\n         * @param {Query} filterQuery Filter query\n         */\n        backgroundFilter(filterQuery: Query): this;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on SignificantAggregationBase\n         */\n        script(): never;\n    }\n\n    /**\n     * An aggregation that returns interesting or unusual occurrences of terms in\n     * a set.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends SignificantAggregationBase\n     */\n    export class SignificantTermsAggregation extends SignificantAggregationBase {\n        constructor(name: string, field?: string);\n    }\n\n    /**\n     * An aggregation that returns interesting or unusual occurrences of terms in\n     * a set.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function significantTermsAggregation(\n        name: string,\n        field?: string\n    ): SignificantTermsAggregation;\n\n    /**\n     * An aggregation that returns interesting or unusual occurrences of free-text\n     * terms in a set. It is like the `SignificantTermsAggregation` but differs in\n     * that:\n     *   - It is specifically designed for use on type `text` fields\n     *   - It does not require field data or doc-values\n     *   - It re-analyzes text content on-the-fly meaning it can also filter\n     *     duplicate sections of noisy text that otherwise tend to skew statistics.\n     *\n     * NOTE: This query was added in elasticsearch v6.0.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends SignificantAggregationBase\n     */\n    export class SignificantTextAggregation extends SignificantAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * Control if duplicate paragraphs of text should try be filtered from the\n         * statistical text analysis. Can improve results but slows down analysis.\n         * Default is `false`.\n         *\n         * @param {boolean} enable\n         */\n        filterDuplicateText(enable: boolean): this;\n\n        /**\n         * Selects the fields to load from `_source` JSON and analyze. If none are\n         * specified, the indexed \"fieldName\" value is assumed to also be the name\n         * of the JSON field holding the value\n         *\n         * @param {Array<string>} srcFields Array of fields\n         */\n        sourceFields(srcFields: string[]): this;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on SignificantTextAggregation\n         */\n        missing(): never;\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on SignificantTextAggregation\n         */\n        executionHint(): never;\n    }\n\n    /**\n     * An aggregation that returns interesting or unusual occurrences of free-text\n     * terms in a set. It is like the `SignificantTermsAggregation` but differs in\n     * that:\n     *   - It is specifically designed for use on type `text` fields\n     *   - It does not require field data or doc-values\n     *   - It re-analyzes text content on-the-fly meaning it can also filter\n     *     duplicate sections of noisy text that otherwise tend to skew statistics.\n     *\n     * NOTE: This query was added in elasticsearch v6.0.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function significantTextAggregation(\n        name: string,\n        field?: string\n    ): SignificantTextAggregation;\n\n    /**\n     * A multi-bucket value source based aggregation where buckets are dynamically\n     * built - one per unique value.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     * @extends TermsAggregationBase\n     */\n    export class TermsAggregation extends TermsAggregationBase {\n        constructor(name: string, field?: string);\n\n        /**\n         * When set to `true`, shows an error value for each term returned by the aggregation\n         * which represents the _worst case error_ in the document count and can be useful\n         * when deciding on a value for the shard_size parameter.\n         *\n         * @param {boolean} enable\n         */\n        showTermDocCountError(enable: boolean): this;\n\n        /**\n         * Break the analysis up into multiple requests by grouping the field’s values\n         * into a number of partitions at query-time and processing only one\n         * partition in each request.\n         * Note that this method is a special case as the name doesn't map to the\n         * elasticsearch parameter name. This is required because there is already\n         * a method for `include` applicable for Terms aggregations. However, this\n         * could change depending on community interest.\n         *\n         * @param {number} partition\n         * @param {number} numPartitions\n         */\n        includePartition(partition: number, numPartitions: number): this;\n\n        /**\n         * Can be used for deferring calculation of child aggregations by using\n         * `breadth_first` mode. In `depth_first` mode all branches of the aggregation\n         * tree are expanded in one depth-first pass and only then any pruning occurs.\n         *\n         * @param {string} mode The possible values are `breadth_first` and `depth_first`.\n         */\n        collectMode(mode: 'breadth_first' | 'depth_first'): this;\n\n        /**\n         * Sets the ordering for buckets\n         *\n         * @param {string} key\n         * @param {string} direction `asc` or `desc`\n         */\n        order(key: string, direction?: 'asc' | 'desc'): this;\n    }\n\n    /**\n     * A multi-bucket value source based aggregation where buckets are dynamically\n     * built - one per unique value.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} field The field to aggregate on\n     */\n    export function termsAggregation(\n        name: string,\n        field?: string\n    ): TermsAggregation;\n\n    /**\n     * The `PipelineAggregationBase` provides support for common options used across\n     * various pipeline `Aggregation` implementations.\n     * Pipeline aggregations cannot have sub-aggregations but depending on the type\n     * it can reference another pipeline in the buckets_path allowing pipeline\n     * aggregations to be chained. For example, you can chain together two derivatives\n     * to calculate the second derivative (i.e. a derivative of a derivative).\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} name a valid aggregation name\n     * @param {string} aggType type of aggregation\n     * @param {string} refUrl Elasticsearch reference URL\n     * @param {string|Object=} bucketsPath The relative path of metric to aggregate over\n     * @extends Aggregation\n     */\n    export class PipelineAggregationBase extends Aggregation {\n        constructor(\n            name: string,\n            aggType: string,\n            refUrl: string,\n            bucketsPath?: string | object\n        );\n\n        /**\n         * Sets the relative path, `buckets_path`, which refers to the metric to aggregate over.\n         * Required.\n         *\n         * @param {string|Object} path\n         */\n        bucketsPath(path: string | any): this;\n\n        /**\n         * Set policy for missing data. Optional.\n         *\n         * @param {string} policy Can be `skip` or `insert_zeros`\n         */\n        gapPolicy(policy: 'skip' | 'insert_zeros'): this;\n\n        /**\n         * Sets the format expression if applicable. Optional.\n         *\n         * @param {string} fmt Format mask to apply on aggregation response. Example: ####.00\n         */\n        format(fmt: string): this;\n    }\n\n    /**\n     * A sibling pipeline aggregation which calculates the (mean) average value\n     * of a specified metric in a sibling aggregation. The specified metric must\n     * be numeric and the sibling aggregation must be a multi-bucket aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class AvgBucketAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n    }\n\n    /**\n     * A sibling pipeline aggregation which calculates the (mean) average value\n     * of a specified metric in a sibling aggregation. The specified metric must\n     * be numeric and the sibling aggregation must be a multi-bucket aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function avgBucketAggregation(\n        name: string,\n        bucketsPath?: string\n    ): AvgBucketAggregation;\n\n    /**\n     * A parent pipeline aggregation which calculates the derivative of a\n     * specified metric in a parent histogram (or date_histogram) aggregation.\n     * The specified metric must be numeric and the enclosing histogram must\n     * have min_doc_count set to 0 (default for histogram aggregations).\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class DerivativeAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n\n        /**\n         * Set the units of the derivative values. `unit` specifies what unit to use for\n         * the x-axis of the derivative calculation\n         *\n         * @param {string} unit `unit` specifies what unit to use for\n         * the x-axis of the derivative calculation\n         */\n        unit(unit: string): DerivativeAggregation;\n    }\n\n    /**\n     * A parent pipeline aggregation which calculates the derivative of a\n     * specified metric in a parent histogram (or date_histogram) aggregation.\n     * The specified metric must be numeric and the enclosing histogram must\n     * have min_doc_count set to 0 (default for histogram aggregations).\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function derivativeAggregation(\n        name: string,\n        bucketsPath?: string\n    ): DerivativeAggregation;\n\n    /**\n     * A sibling pipeline aggregation which identifies the bucket(s) with\n     * the maximum value of a specified metric in a sibling aggregation and\n     * outputs both the value and the key(s) of the bucket(s). The specified\n     * metric must be numeric and the sibling aggregation must be a multi-bucket\n     * aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class MaxBucketAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n    }\n\n    /**\n     * A sibling pipeline aggregation which identifies the bucket(s) with\n     * the maximum value of a specified metric in a sibling aggregation and\n     * outputs both the value and the key(s) of the bucket(s). The specified\n     * metric must be numeric and the sibling aggregation must be a multi-bucket\n     * aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function maxBucketAggregation(\n        name: string,\n        bucketsPath?: string\n    ): MaxBucketAggregation;\n\n    /**\n     * A sibling pipeline aggregation which identifies the bucket(s) with\n     * the minimum value of a specified metric in a sibling aggregation and\n     * outputs both the value and the key(s) of the bucket(s). The specified\n     * metric must be numeric and the sibling aggregation must be a multi-bucket\n     * aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class MinBucketAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n    }\n\n    /**\n     * A sibling pipeline aggregation which identifies the bucket(s) with\n     * the minimum value of a specified metric in a sibling aggregation and\n     * outputs both the value and the key(s) of the bucket(s). The specified\n     * metric must be numeric and the sibling aggregation must be a multi-bucket\n     * aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function minBucketAggregation(\n        name: string,\n        bucketsPath?: string\n    ): MinBucketAggregation;\n\n    /**\n     * A sibling pipeline aggregation which calculates the sum across all bucket\n     * of a specified metric in a sibling aggregation. The specified metric must\n     * be numeric and the sibling aggregation must be a multi-bucket aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class SumBucketAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n    }\n\n    /**\n     * A sibling pipeline aggregation which calculates the sum across all bucket\n     * of a specified metric in a sibling aggregation. The specified metric must\n     * be numeric and the sibling aggregation must be a multi-bucket aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function sumBucketAggregation(\n        name: string,\n        bucketsPath?: string\n    ): SumBucketAggregation;\n\n    /**\n     * A sibling pipeline aggregation which calculates a variety of stats across\n     * all bucket of a specified metric in a sibling aggregation. The specified\n     * metric must be numeric and the sibling aggregation must be a multi-bucket\n     * aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class StatsBucketAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n    }\n\n    /**\n     * A sibling pipeline aggregation which calculates a variety of stats across\n     * all bucket of a specified metric in a sibling aggregation. The specified\n     * metric must be numeric and the sibling aggregation must be a multi-bucket\n     * aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function statsBucketAggregation(\n        name: string,\n        bucketsPath?: string\n    ): StatsBucketAggregation;\n\n    /**\n     * A sibling pipeline aggregation which calculates a variety of stats across\n     * all bucket of a specified metric in a sibling aggregation. The specified\n     * metric must be numeric and the sibling aggregation must be a multi-bucket\n     * aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class ExtendedStatsBucketAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n\n        /**\n         * Sets the number of standard deviations above/below the mean to display.\n         * Optional.\n         *\n         * @param {number} sigma Default is 2.\n         */\n        sigma(sigma: number): this;\n    }\n\n    /**\n     * A sibling pipeline aggregation which calculates a variety of stats across\n     * all bucket of a specified metric in a sibling aggregation. The specified\n     * metric must be numeric and the sibling aggregation must be a multi-bucket\n     * aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function extendedStatsBucketAggregation(\n        name: string,\n        bucketsPath?: string\n    ): ExtendedStatsBucketAggregation;\n\n    /**\n     * A sibling pipeline aggregation which calculates percentiles across all\n     * bucket of a specified metric in a sibling aggregation. The specified\n     * metric must be numeric and the sibling aggregation must be a multi-bucket\n     * aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class PercentilesBucketAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n\n        /**\n         * Sets the list of percentiles to calculate\n         *\n         * @param {Array<number>} percents The list of percentiles to calculate\n         */\n        percents(percents: number[]): this;\n    }\n\n    /**\n     * A sibling pipeline aggregation which calculates percentiles across all\n     * bucket of a specified metric in a sibling aggregation. The specified\n     * metric must be numeric and the sibling aggregation must be a multi-bucket\n     * aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function percentilesBucketAggregation(\n        name: string,\n        bucketsPath?: string\n    ): PercentilesBucketAggregation;\n\n    /**\n     * Given an ordered series of data, the Moving Average aggregation will\n     * slide a window across the data and emit the average value of that window.\n     * `moving_avg` aggregations must be embedded inside of a histogram or\n     * date_histogram aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class MovingAverageAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on MovingAverageAggregation\n         */\n        format(): never;\n\n        /**\n         * Sets the moving average weighting model that we wish to use. Optional.\n         *\n         * @param {string} model Can be `simple`, `linear`,\n         * `ewma` (aka \"single-exponential\"), `holt` (aka \"double exponential\")\n         * or `holt_winters` (aka \"triple exponential\").\n         * Default is `simple`\n         */\n        model(\n            model: 'simple' | 'linear' | 'ewma' | 'holt' | 'holt_winters'\n        ): this;\n\n        /**\n         * Sets the size of window to \"slide\" across the histogram. Optional.\n         *\n         * @param {number} window Default is 5\n         */\n        window(window: number): this;\n\n        /**\n         * If the model should be algorithmically minimized. Optional.\n         * Applicable on EWMA, Holt-Linear, Holt-Winters.\n         * Minimization is disabled by default for `ewma` and `holt_linear`,\n         * while it is enabled by default for `holt_winters`.\n         *\n         * @param {boolean} enable `false` for most models\n         */\n        minimize(enable: boolean): this;\n\n        /**\n         * Model-specific settings, contents which differ depending on the model specified.\n         * Optional.\n         *\n         * @param {object} settingss\n         */\n        settings(settings: object): this;\n\n        /**\n         * Enable \"prediction\" mode, which will attempt to extrapolate into the future given\n         * the current smoothed, moving average\n         *\n         * @param {number} predict the number of predictions you would like appended to the\n         * end of the series\n         */\n        predict(predict: number): this;\n    }\n\n    /**\n     * Given an ordered series of data, the Moving Average aggregation will\n     * slide a window across the data and emit the average value of that window.\n     * `moving_avg` aggregations must be embedded inside of a histogram or\n     * date_histogram aggregation.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function movingAverageAggregation(\n        name: string,\n        bucketsPath?: string\n    ): MovingAverageAggregation;\n\n    /**\n     * Given an ordered series of data, the Moving Function aggregation\n     * will slide a window across the data and allow the user to specify\n     * a custom script that is executed on each window of data.\n     * For convenience, a number of common functions are predefined such as min/max, moving averages, etc.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over.\n     * @param {string=} window The size of window to \"slide\" across the histogram.\n     * @param {string=} script The script that should be executed on each window of data.\n     * @extends PipelineAggregationBase\n     */\n    export class MovingFunctionAggregation extends PipelineAggregationBase {\n        constructor(\n            name: string,\n            bucketsPath?: string,\n            window?: number,\n            script?: string\n        );\n\n        /**\n         * Sets the size of window to \"slide\" across the histogram. Required.\n         *\n         * @param {number} window\n         */\n        window(window: number): this;\n\n        /**\n         * Sets shift of window position. Optional.\n         *\n         * @param {number} window\n         */\n        shift(shift: number): this;\n\n        /**\n         * Sets the script that should be executed on each window of data. Required.\n         *\n         * @param {string} script\n         */\n        script(script: string): this;\n    }\n\n    /**\n     * Given an ordered series of data, the Moving Function aggregation\n     * will slide a window across the data and allow the user to specify\n     * a custom script that is executed on each window of data.\n     * For convenience, a number of common functions are predefined such as min/max, moving averages, etc.\n     *\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over.\n     * @param {string=} window The size of window to \"slide\" across the histogram.\n     * @param {string=} script The script that should be executed on each window of data.\n     */\n    export function movingFunctionAggregation(\n        name: string,\n        bucketsPath?: string,\n        window?: number,\n        script?: string\n    ): MovingFunctionAggregation;\n\n    /**\n     * A parent pipeline aggregation which calculates the cumulative sum of\n     * a specified metric in a parent histogram (or date_histogram) aggregation.\n     * The specified metric must be numeric and the enclosing histogram must\n     * have min_doc_count set to 0 (default for histogram aggregations).\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class CumulativeSumAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on CumulativeSumAggregation\n         */\n        gapPolicy(): never;\n    }\n\n    /**\n     * A parent pipeline aggregation which calculates the cumulative sum of\n     * a specified metric in a parent histogram (or date_histogram) aggregation.\n     * The specified metric must be numeric and the enclosing histogram must\n     * have min_doc_count set to 0 (default for histogram aggregations).\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function cumulativeSumAggregation(\n        name: string,\n        bucketsPath?: string\n    ): CumulativeSumAggregation;\n\n    /**\n     * A parent pipeline aggregation which executes a script which can perform\n     * per bucket computations on specified metrics in the parent multi-bucket\n     * aggregation. The specified metric must be numeric and the script must\n     * return a numeric value.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class BucketScriptAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n\n        /**\n         * Sets script parameter for aggregation.\n         *\n         * @param {Script|string} script\n         * @throws {TypeError} If `script` is not an instance of `Script`\n         */\n        script(script: Script | string): this;\n    }\n\n    /**\n     * A parent pipeline aggregation which executes a script which can perform\n     * per bucket computations on specified metrics in the parent multi-bucket\n     * aggregation. The specified metric must be numeric and the script must\n     * return a numeric value.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function bucketScriptAggregation(\n        name: string,\n        bucketsPath?: string\n    ): BucketScriptAggregation;\n\n    /**\n     * A parent pipeline aggregation which executes a script which determines whether\n     * the current bucket will be retained in the parent multi-bucket aggregation.\n     * The specified metric must be numeric and the script must return a boolean value.\n     * If the script language is expression then a numeric return value is permitted.\n     * In this case 0.0 will be evaluated as false and all other values will evaluate to true.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class BucketSelectorAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n\n        /**\n         * @override\n         * @throws {Error} This method cannot be called on BucketSelectorAggregation\n         */\n        format(): never;\n\n        /**\n         * Sets script parameter for aggregation. Required.\n         * @param {Script|string} script\n         * @throws {TypeError} If `script` is not an instance of `Script`\n         */\n        script(script: Script | string): this;\n    }\n\n    /**\n     * A parent pipeline aggregation which executes a script which determines whether\n     * the current bucket will be retained in the parent multi-bucket aggregation.\n     * The specified metric must be numeric and the script must return a boolean value.\n     * If the script language is expression then a numeric return value is permitted.\n     * In this case 0.0 will be evaluated as false and all other values will evaluate to true.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function bucketSelectorAggregation(\n        name: string,\n        bucketsPath?: string\n    ): BucketSelectorAggregation;\n\n    /**\n     * A parent pipeline aggregation which sorts the buckets of its parent\n     * multi-bucket aggregation. Zero or more sort fields may be specified\n     * together with the corresponding sort order. Each bucket may be sorted\n     * based on its _key, _count or its sub-aggregations. In addition, parameters\n     * from and size may be set in order to truncate the result buckets.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @extends PipelineAggregationBase\n     */\n    export class BucketSortAggregation extends PipelineAggregationBase {\n        constructor(name: string);\n\n        /**\n         * Sets the list of fields to sort on.\n         *\n         * @param {Array<Sort>} sort The list of fields to sort on\n         */\n        sort(sort: Array<Sort>): this;\n\n        /**\n         * Sets the value buckets in positions prior to which will be truncated.\n         *\n         * @param {number} from Buckets in positions prior to the set value will be truncated.\n         */\n        from(from: number): this;\n\n        /**\n         * Sets the number of buckets to return.\n         *\n         * @param {number} size The number of buckets to return.\n         */\n        size(size: number): this;\n    }\n\n    /**\n     * A parent pipeline aggregation which sorts the buckets of its parent\n     * multi-bucket aggregation. Zero or more sort fields may be specified\n     * together with the corresponding sort order. Each bucket may be sorted\n     * based on its _key, _count or its sub-aggregations. In addition, parameters\n     * from and size may be set in order to truncate the result buckets.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     */\n    export function bucketSortAggregation(name: string): BucketSortAggregation;\n\n    /**\n     * Serial differencing is a technique where values in a time series are\n     * subtracted from itself at different time lags or periods.\n     * Serial differences are built by first specifying a `histogram` or `date_histogram` over a field.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     * @extends PipelineAggregationBase\n     */\n    export class SerialDifferencingAggregation extends PipelineAggregationBase {\n        constructor(name: string, bucketsPath?: string);\n\n        /**\n         * The historical bucket to subtract from the current value.\n         * Optional.\n         *\n         * @param {number} lag Default is 1.\n         */\n        lag(lag: number): this;\n    }\n\n    /**\n     * Serial differencing is a technique where values in a time series are\n     * subtracted from itself at different time lags or periods.\n     * Serial differences are built by first specifying a `histogram` or `date_histogram` over a field.\n     *\n     * @param {string} name The name which will be used to refer to this aggregation.\n     * @param {string=} bucketsPath The relative path of metric to aggregate over\n     */\n    export function serialDifferencingAggregation(\n        name: string,\n        bucketsPath?: string\n    ): SerialDifferencingAggregation;\n\n    /**\n     * The `matrix_stats` aggregation is a numeric aggregation that computes\n     * statistics over a set of document fields\n     *\n     * @param {string} name A valid aggregation name\n     * @param {Array=} fields Array of fields\n     * @extends Aggregation\n     */\n    export class MatrixStatsAggregation extends Aggregation {\n        constructor(name: string, fields?: object);\n\n        /**\n         * The `fields` setting defines the set of fields (as an array) for computing\n         * the statistics.\n         *\n         * @param {Array<string>} fields Array of fields\n         */\n        fields(fields: string[]): this;\n\n        /**\n         * The `mode` parameter controls what array value the aggregation will use for\n         * array or multi-valued fields\n         *\n         * @param {string} mode One of `avg`, `min`, `max`, `sum` and `median`\n         */\n        mode(mode: string): this;\n\n        /**\n         * The missing parameter defines how documents that are missing a value should\n         * be treated. By default they will be ignored but it is also possible to treat\n         * them as if they had a value.\n         *\n         * @param {object} missing Set of fieldname : value mappings to specify default\n         * values per field\n         */\n        missing(missing: object): this;\n    }\n\n    /**\n     * The `matrix_stats` aggregation is a numeric aggregation that computes\n     * statistics over a set of document fields\n     *\n     * @param {string} name A valid aggregation name\n     * @param {Array=} fields Array of fields\n     */\n    export function matrixStatsAggregation(\n        name: string,\n        fields?: object\n    ): MatrixStatsAggregation;\n\n    /**\n     * `ScoreFunction` provides support for common options used across\n     * various `ScoreFunction` implementations.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} name\n     */\n    class ScoreFunction {\n        constructor(name: string);\n\n        /**\n         * Adds a filter query whose matching documents will have the score function applied.\n         *\n         * @param {Query} filterQry A valid `Query` object.\n         */\n        filter(filterQry: Query): this;\n\n        /**\n         * Sets the weight of the score function\n         *\n         * @param {number} weight The weight of this score function.\n         */\n        weight(weight: number): this;\n\n        /**\n         * Overrides default `toJSON` to return DSL representation of the score function\n         * class instance.\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * The `script_score` function allows you to wrap another query and customize\n     * the scoring of it optionally with a computation derived from other numeric\n     * field values in the doc using a script expression.\n     *\n     * @param {Script|string} script\n     * @extends ScoreFunction\n     */\n    export class ScriptScoreFunction extends ScoreFunction {\n        constructor(script: Script | string);\n\n        /**\n         * @param {Script|string} script\n         */\n        script(script: Script | string): this;\n    }\n\n    /**\n     * The `script_score` function allows you to wrap another query and customize\n     * the scoring of it optionally with a computation derived from other numeric\n     * field values in the doc using a script expression.\n     *\n     * @param {Script|string} script\n     */\n    export function scriptScoreFunction(\n        script: Script | string\n    ): ScriptScoreFunction;\n\n    /**\n     * The `weight` score allows you to multiply the score by the provided `weight`.\n     * This can sometimes be desired since boost value set on specific queries gets\n     * normalized, while for this score function it does not.\n     * The number value is of type float.\n     *\n     * @param {number=} weight\n     * @extends ScoreFunction\n     */\n    export class WeightScoreFunction extends ScoreFunction {\n        constructor(weight?: number);\n\n        /**\n         * Overrides default `toJSON` to return DSL representation of the score function\n         * class instance.\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * The `weight` score allows you to multiply the score by the provided `weight`.\n     * This can sometimes be desired since boost value set on specific queries gets\n     * normalized, while for this score function it does not.\n     * The number value is of type float.\n     *\n     * @param {number=} weight\n     */\n    export function weightScoreFunction(weight?: number): WeightScoreFunction;\n\n    /**\n     * The `random_score` generates scores using a hash of the `_uid` field,\n     * with a `seed` for variation. If `seed` is not specified, the current time is used.\n     *\n     * @extends ScoreFunction\n     */\n    export class RandomScoreFunction extends ScoreFunction {\n        constructor();\n        /**\n         * Sets random seed value.\n         *\n         * @param {number} seed A seed value.\n         */\n        seed(seed: number): this;\n    }\n\n    /**\n     * The `random_score` generates scores using a hash of the `_uid` field,\n     * with a `seed` for variation. If `seed` is not specified, the current time is used.\n     */\n    export function randomScoreFunction(): RandomScoreFunction;\n\n    /**\n     * The `field_value_factor` function allows you to use a field from a document\n     * to influence the score. It's similar to using the `script_score` function, however,\n     * it avoids the overhead of scripting. If used on a multi-valued field, only the\n     * first value of the field is used in calculations.\n     *\n     * @param {string=} field the field to be extracted from the document.\n     * @extends ScoreFunction\n     */\n    export class FieldValueFactorFunction extends ScoreFunction {\n        constructor(field?: string);\n\n        /**\n         * Sets the field to be extracted from the document.\n         *\n         * @param {string} field the field to be extracted from the document.\n         */\n        field(field: string): this;\n\n        /**\n         * Optional factor to multiply the field value with, defaults to `1`.\n         *\n         * @param {number} factor Factor to multiply the field with.\n         */\n        factor(factor: number): this;\n\n        /**\n         * Modifier to apply to the field value, can be one of: `none`, `log`,\n         * `log1p`, `log2p`, `ln`, `ln1p`, `ln2p`, `square`, `sqrt`, or `reciprocal`.\n         * Defaults to `none`.\n         *\n         * @param {string} mod Modified to apply on field. Can be one of: `none`, `log`,\n         * `log1p`, `log2p`, `ln`, `ln1p`, `ln2p`, `square`, `sqrt`, or `reciprocal`.\n         * Defaults to `none`.\n         */\n        modifier(\n            mod:\n                | 'none'\n                | 'log'\n                | 'log1p'\n                | 'log2p'\n                | 'ln'\n                | 'ln1p'\n                | 'ln2p'\n                | 'square'\n                | 'sqrt'\n                | 'reciprocal'\n        ): this;\n\n        /**\n         * Value used if the document doesn’t have that field. The modifier and factor\n         * are still applied to it as though it were read from the document.\n         *\n         * @param {number} val To be used with documents which do not have field value.\n         */\n        missing(val: number): this;\n    }\n\n    /**\n     * The `field_value_factor` function allows you to use a field from a document\n     * to influence the score. It's similar to using the `script_score` function, however,\n     * it avoids the overhead of scripting. If used on a multi-valued field, only the\n     * first value of the field is used in calculations.\n     *\n     * @param {string=} field the field to be extracted from the document.\n     */\n    export function fieldValueFactorFunction(\n        field?: string\n    ): FieldValueFactorFunction;\n\n    /**\n     * Decay functions score a document with a function that decays depending on\n     * the distance of a numeric field value of the document from a user given\n     * origin. This is similar to a range query, but with smooth edges instead of\n     * boxes.\n     * Supported decay functions are: `linear`, `exp`, and `gauss`.\n     *\n     * If no `mode` is supplied, `gauss` will be used.\n     *\n     * @param {string=} mode Can be one of `linear`, `exp`, and `gauss`.\n     * Defaults to `gauss`.\n     * @param {string=} field the document field to run decay function against.\n     * @extends ScoreFunction\n     */\n    export class DecayScoreFunction extends ScoreFunction {\n        // I dunno how to denote the default value with a union type\n        constructor(mode?: 'linear' | 'exp' | 'gauss', field?: string);\n\n        /**\n         * Set the decay mode.\n         *\n         * @param {string} mode  Can be one of `linear`, `exp`, and `gauss`.\n         * Defaults to `gauss`.\n         */\n        mode(mode: 'linear' | 'exp' | 'gauss'): this;\n\n        /**\n         * Sets the decay mode to linear.\n         * Alias for `mode('linear')`\n         */\n        linear(): this;\n\n        /**\n         * Sets the decay mode to exp.\n         * Alias for `mode('exp')`\n         */\n        exp(): this;\n\n        /**\n         * Sets the decay mode to gauss.\n         * Alias for `mode('gauss')`\n         */\n        gauss(): this;\n\n        /**\n         * Sets the document field to run decay function against.\n         *\n         * @param {string} field the document field to run decay function against.\n         */\n        field(field: string): this;\n\n        /**\n         * The point of origin used for calculating distance. Must be given as a number\n         * for numeric field, date for date fields and geo point for geo fields.\n         * Required for geo and numeric field. For date fields the default is `now`.\n         * Date math (for example `now-1h`) is supported for origin.\n         *\n         * @param {number|string|Object} origin A valid origin value for the field type.\n         */\n        origin(origin: number | string | object): this;\n\n        /**\n         * Required for all types. Defines the distance from origin + offset at which\n         * the computed score will equal decay parameter. For geo fields: Can be defined\n         * as number+unit (`1km`, `12m`,…). Default unit is meters. For date fields: Can be\n         * defined as a number+unit (`1h`, `10d`,…). Default unit is milliseconds.\n         * For numeric field: Any number.\n         *\n         * @param {number|string} scale A valid scale value for the field type.\n         */\n        scale(scale: number | string): this;\n\n        /**\n         * If an `offset` is defined, the decay function will only compute the decay function\n         * for documents with a distance greater that the defined offset. The default is `0`.\n         *\n         * @param {number|string} offset A valid offset value for the field type.\n         */\n        offset(offset: number | string): this;\n\n        /**\n         * The `decay` parameter defines how documents are scored at the distance given at `scale`.\n         * If no `decay` is defined, documents at the distance `scale` will be scored `0.5`.\n         *\n         * @param {number} decay A decay value as a double.\n         */\n        decay(decay: number): this;\n    }\n\n    /**\n     * Decay functions score a document with a function that decays depending on\n     * the distance of a numeric field value of the document from a user given\n     * origin. This is similar to a range query, but with smooth edges instead of\n     * boxes.\n     * Supported decay functions are: `linear`, `exp`, and `gauss`.\n     *\n     * If no `mode` is supplied, `gauss` will be used.\n     *\n     * @param {string=} mode Can be one of `linear`, `exp`, and `gauss`.\n     * Defaults to `gauss`.\n     * @param {string=} field the document field to run decay function against.\n     */\n    export function decayScoreFunction(\n        mode?: 'linear' | 'exp' | 'gauss',\n        field?: string\n    ): DecayScoreFunction;\n\n    /**\n     * Base class implementation for all suggester types.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class should be extended and used, as validation against the class\n     * type is present in various places.\n     *\n     * @param {string} suggesterType The type of suggester.\n     * Can be one of `term`, `phrase`, `completion`\n     * @param {string} name The name of the Suggester, an arbitrary identifier\n     * @param {string=} field The field to fetch the candidate suggestions from.\n     *\n     * @throws {Error} if `name` is empty\n     * @throws {Error} if `suggesterType` is empty\n     */\n    class Suggester {\n        constructor(suggesterType: string, name: string, field?: string);\n\n        /**\n         * Sets field to fetch the candidate suggestions from. This is a required option\n         * that either needs to be set globally or per suggestion.\n         *\n         * @param {string} field a valid field name\n         */\n        field(field: string): this;\n\n        /**\n         * Sets the number of suggestions to return (defaults to `5`).\n         *\n         * @param {number} size\n         */\n        size(size: number): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the `suggester`\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * The `AnalyzedSuggesterBase` provides support for common options used\n     * in `TermSuggester` and `PhraseSuggester`.\n     *\n     * **NOTE:** Instantiating this directly should not be required.\n     * However, if you wish to add a custom implementation for whatever reason,\n     * this class could be extended.\n     *\n     * @param {string} suggesterType The type of suggester.\n     * Can be one of `term`, `phrase`\n     * @param {string} name The name of the Suggester, an arbitrary identifier\n     * @param {string=} field The field to fetch the candidate suggestions from.\n     * @param {string=} txt A string to get suggestions for.\n     *\n     * @throws {Error} if `name` is empty\n     * @throws {Error} if `suggesterType` is empty\n     *\n     * @extends Suggester\n     */\n    class AnalyzedSuggesterBase extends Suggester {\n        constructor(\n            suggesterType: string,\n            name: string,\n            field?: string,\n            txt?: string\n        );\n\n        /**\n         * Sets the text to get suggestions for. If not set, the global\n         * suggestion text will be used.\n         *\n         * @param {string} txt A string to get suggestions for.\n         */\n        text(txt: string): this;\n\n        /**\n         * Sets the analyzer to analyse the suggest text with. Defaults to\n         * the search analyzer of the suggest field.\n         *\n         * @param {string} analyzer The analyzer to analyse the suggest text with.\n         */\n        analyzer(analyzer: string): this;\n\n        /**\n         * Sets the maximum number of suggestions to be retrieved from each individual shard.\n         * During the reduce phase only the top N suggestions are returned based on the `size`\n         * option. Defaults to the `size` option. Setting this to a value higher than the `size`\n         * can be useful in order to get a more accurate document frequency for spelling\n         * corrections at the cost of performance. Due to the fact that terms are partitioned\n         * amongst shards, the shard level document frequencies of spelling corrections\n         * may not be precise. Increasing this will make these document frequencies\n         * more precise.\n         *\n         * @param {number} size\n         */\n        shardSize(size: number): this;\n    }\n\n    /**\n     * The term suggester suggests terms based on edit distance.\n     * The provided suggest text is analyzed before terms are suggested.\n     * The suggested terms are provided per analyzed suggest text token.\n     * The term suggester doesn’t take the query into account that is part of request.\n     *\n     * @param {string} name The name of the Suggester, an arbitrary identifier\n     * @param {string=} field The field to fetch the candidate suggestions from.\n     * @param {string=} txt A string to get suggestions for.\n     *\n     * @throws {Error} if `name` is empty\n     *\n     * @extends AnalyzedSuggesterBase\n     */\n    export class TermSuggester extends AnalyzedSuggesterBase {\n        constructor(name: string, field?: string, txt?: string);\n\n        /**\n         * Sets the sort to control how suggestions should be sorted per\n         * suggest text term.\n         * Two possible values:\n         *   - `score`: Sort by score first, then document frequency and\n         *     then the term itself.\n         *   - `frequency`: Sort by document frequency first, then similarity\n         *     score and then the term itself.\n         * @param {string} sort Can be `score` or `frequency`\n         * @throws {Error} If `sort` is neither `score` nor `frequency`.\n         */\n        sort(sort: 'score' | 'frequency'): this;\n\n        /**\n         * Sets the suggest mode which controls what suggestions are included\n         * or controls for what suggest text terms, suggestions should be suggested.\n         *\n         * Three possible values can be specified:\n         *   - `missing`: Only provide suggestions for suggest text terms that\n         *     are not in the index. This is the default.\n         *   - `popular`:  Only suggest suggestions that occur in more docs\n         *     than the original suggest text term.\n         *   - `always`: Suggest any matching suggestions based on terms in the suggest text.\n         *\n         * @param {string} mode Can be `missing`, `popular` or `always`\n         * @throws {Error} If `mode` is not one of `missing`, `popular` or `always`.\n         */\n        suggestMode(mode: 'missing' | 'popular' | 'always'): this;\n\n        /**\n         * Sets the maximum edit distance candidate suggestions can have\n         * in order to be considered as a suggestion. Can only be a value\n         * between 1 and 2. Any other value result in an bad request\n         * error being thrown. Defaults to 2.\n         *\n         * @param {number} maxEdits Value between 1 and 2. Defaults to 2.\n         */\n        maxEdits(maxEdits: number): this;\n\n        /**\n         * Sets the number of minimal prefix characters that must match in order\n         * to be a candidate suggestions. Defaults to 1.\n         * Increasing this number improves spellcheck performance.\n         * Usually misspellings don't occur in the beginning of terms.\n         *\n         * @param {number} len The number of minimal prefix characters that must match in order\n         * to be a candidate suggestions. Defaults to 1.\n         */\n        prefixLength(len: number): this;\n\n        /**\n         * Sets the minimum length a suggest text term must have in order to be included.\n         * Defaults to 4.\n         *\n         * @param {number} len The minimum length a suggest text term must have in order\n         * to be included. Defaults to 4.\n         */\n        minWordLength(len: number): this;\n\n        /**\n         * Sets factor that is used to multiply with the `shards_size` in order to inspect\n         * more candidate spell corrections on the shard level.\n         * Can improve accuracy at the cost of performance. Defaults to 5.\n         *\n         * @param {number} maxInspections Factor used to multiple with `shards_size` in\n         * order to inspect more candidate spell corrections on the shard level.\n         * Defaults to 5\n         */\n        maxInspections(maxInspections: number): this;\n\n        /**\n         * Sets the minimal threshold in number of documents a suggestion should appear in.\n         * This can be specified as an absolute number or as a relative percentage of\n         * number of documents. This can improve quality by only suggesting high\n         * frequency terms. Defaults to 0f and is not enabled. If a value higher than 1\n         * is specified then the number cannot be fractional. The shard level document\n         * frequencies are used for this option.\n         *\n         * @param {number} limit Threshold in number of documents a suggestion\n         * should appear in. Defaults to 0f and is not enabled.\n         */\n        minDocFreq(limit: number): this;\n\n        /**\n         * Sets the maximum threshold in number of documents a suggest text token can\n         * exist in order to be included. Can be a relative percentage number (e.g 0.4)\n         * or an absolute number to represent document frequencies. If an value higher\n         * than 1 is specified then fractional can not be specified. Defaults to 0.01f.\n         * This can be used to exclude high frequency terms from being spellchecked.\n         * High frequency terms are usually spelled correctly on top of this also\n         * improves the spellcheck performance. The shard level document frequencies are\n         * used for this option.\n         *\n         * @param {number} limit Maximum threshold in number of documents a suggest text\n         * token can exist in order to be included. Defaults to 0.01f.\n         */\n        maxTermFreq(limit: number): this;\n\n        /**\n         * Sets the string distance implementation to use for comparing how similar\n         * suggested terms are.\n         *\n         * Five possible values can be specified:\n         *   - `internal`: The default based on `damerau_levenshtein` but highly optimized for\n         *     comparing string distance for terms inside the index.\n         *   - `damerau_levenshtein`: String distance algorithm based on Damerau-Levenshtein\n         *     algorithm.\n         *   - `levenstein`: String distance algorithm based on Levenstein edit distance\n         *     algorithm.\n         *   - `jarowinkler`: String distance algorithm based on Jaro-Winkler algorithm.\n         *   - `ngram`: String distance algorithm based on character n-grams.\n         *\n         * @param {string} implMethod One of `internal`, `damerau_levenshtein`, `levenstein`,\n         * `jarowinkler`, `ngram`\n         *\n         * @throws {Error} If `implMethod` is not one of `internal`, `damerau_levenshtein`,\n         * `levenstein`, `jarowinkler` or ngram`.\n         */\n        stringDistance(\n            implMethod:\n                | 'internal'\n                | 'damerau_levenshtein'\n                | 'levenstein'\n                | 'jarowinkler'\n                | 'ngram'\n        ): this;\n    }\n\n    /**\n     * The term suggester suggests terms based on edit distance.\n     * The provided suggest text is analyzed before terms are suggested.\n     * The suggested terms are provided per analyzed suggest text token.\n     * The term suggester doesn’t take the query into account that is part of request.\n     *\n     * @param {string} name The name of the Suggester, an arbitrary identifier\n     * @param {string=} field The field to fetch the candidate suggestions from.\n     * @param {string=} txt A string to get suggestions for.\n     *\n     * @throws {Error} if `name` is empty\n     */\n    export function termSuggester(\n        name: string,\n        field?: string,\n        txt?: string\n    ): TermSuggester;\n\n    /**\n     * The `phrase` suggester uses candidate generators to produce a list of possible\n     * terms per term in the given text. A single candidate generator is similar\n     * to a `term` suggester called for each individual term in the text. The output\n     * of the generators is subsequently scored in combination with the candidates\n     * from the other terms to for suggestion candidates.\n     * The Phrase suggest API accepts a list of generators under the key `direct_generator`\n     * each of the generators in the list are called per term in the original text.\n     *\n     * @param {string=} field The field to fetch the candidate suggestions from.\n     */\n    export class DirectGenerator {\n        constructor(field?: string);\n\n        /**\n         * Sets field to fetch the candidate suggestions from. This is a required option\n         * that either needs to be set globally or per suggestion.\n         *\n         * @param {string} field a valid field name\n         */\n        field(field: string): this;\n\n        /**\n         * Sets the number of suggestions to return (defaults to `5`).\n         *\n         * @param {number} size\n         */\n        size(size: number): this;\n\n        /**\n         * Sets the suggest mode which controls what suggestions are included\n         * or controls for what suggest text terms, suggestions should be suggested.\n         *\n         * All values other than `always` can be thought of as an optimization to\n         * generate fewer suggestions to test on each shard and are not rechecked\n         * when combining the suggestions generated on each shard. Thus `missing`\n         * will generate suggestions for terms on shards that do not contain them\n         * even other shards do contain them. Those should be filtered out\n         * using `confidence`.\n         *\n         * Three possible values can be specified:\n         *   - `missing`: Only provide suggestions for suggest text terms that\n         *     are not in the index. This is the default.\n         *   - `popular`:  Only suggest suggestions that occur in more docs\n         *     than the original suggest text term.\n         *   - `always`: Suggest any matching suggestions based on terms in the suggest text.\n         *\n         * @param {string} mode Can be `missing`, `popular` or `always`\n         *\n         * @throws {Error} If `mode` is not one of `missing`, `popular` or `always`.\n         */\n        suggestMode(mode: 'missing' | 'popular' | 'always'): this;\n\n        /**\n         * Sets the maximum edit distance candidate suggestions can have\n         * in order to be considered as a suggestion. Can only be a value\n         * between 1 and 2. Any other value result in an bad request\n         * error being thrown. Defaults to 2.\n         *\n         * @param {number} maxEdits Value between 1 and 2. Defaults to 2.\n         */\n        maxEdits(maxEdits: number): this;\n\n        /**\n         * Sets the number of minimal prefix characters that must match in order\n         * to be a candidate suggestions. Defaults to 1.\n         * Increasing this number improves spellcheck performance.\n         * Usually misspellings don't occur in the beginning of terms.\n         *\n         * @param {number} len The number of minimal prefix characters that must match in order\n         * to be a candidate suggestions. Defaults to 1.\n         */\n        prefixLength(len: number): this;\n\n        /**\n         * Sets the minimum length a suggest text term must have in order to be included.\n         * Defaults to 4.\n         *\n         * @param {number} len The minimum length a suggest text term must have in order\n         * to be included. Defaults to 4.\n         */\n        minWordLength(len: number): this;\n\n        /**\n         * Sets factor that is used to multiply with the `shards_size` in order to inspect\n         * more candidate spell corrections on the shard level.\n         * Can improve accuracy at the cost of performance. Defaults to 5.\n         *\n         * @param {number} maxInspections Factor used to multiple with `shards_size` in\n         * order to inspect more candidate spell corrections on the shard level.\n         * Defaults to 5\n         */\n        maxInspections(maxInspections: number): this;\n\n        /**\n         * Sets the minimal threshold in number of documents a suggestion should appear in.\n         * This can be specified as an absolute number or as a relative percentage of\n         * number of documents. This can improve quality by only suggesting high\n         * frequency terms. Defaults to 0f and is not enabled. If a value higher than 1\n         * is specified then the number cannot be fractional. The shard level document\n         * frequencies are used for this option.\n         *\n         * @param {number} limit Threshold in number of documents a suggestion\n         * should appear in. Defaults to 0f and is not enabled.\n         */\n        minDocFreq(limit: number): this;\n\n        /**\n         * Sets the maximum threshold in number of documents a suggest text token can\n         * exist in order to be included. Can be a relative percentage number (e.g 0.4)\n         * or an absolute number to represent document frequencies. If an value higher\n         * than 1 is specified then fractional can not be specified. Defaults to 0.01f.\n         * This can be used to exclude high frequency terms from being spellchecked.\n         * High frequency terms are usually spelled correctly on top of this also\n         * improves the spellcheck performance. The shard level document frequencies are\n         * used for this option.\n         *\n         * @param {number} limit Maximum threshold in number of documents a suggest text\n         * token can exist in order to be included. Defaults to 0.01f.\n         */\n        maxTermFreq(limit: number): this;\n\n        /**\n         * Sets the filter (analyzer) that is applied to each of the tokens passed to this\n         * candidate generator. This filter is applied to the original token before\n         * candidates are generated.\n         *\n         * @param {string} filter a filter (analyzer) that is applied to each of the\n         * tokens passed to this candidate generator.\n         */\n        preFilter(filter: string): this;\n\n        /**\n         * Sets the filter (analyzer) that is applied to each of the generated tokens\n         * before they are passed to the actual phrase scorer.\n         *\n         * @param {string} filter a filter (analyzer) that is applied to each of the\n         * generated tokens before they are passed to the actual phrase scorer.\n         */\n        postFilter(filter: string): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the `direct_generator`\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * The `phrase` suggester uses candidate generators to produce a list of possible\n     * terms per term in the given text. A single candidate generator is similar\n     * to a `term` suggester called for each individual term in the text. The output\n     * of the generators is subsequently scored in combination with the candidates\n     * from the other terms to for suggestion candidates.\n     * The Phrase suggest API accepts a list of generators under the key `direct_generator`\n     * each of the generators in the list are called per term in the original text.\n     *\n     * @param {string=} field The field to fetch the candidate suggestions from.\n     */\n    export function directGenerator(field?: string): DirectGenerator;\n\n    /**\n     * The phrase suggester adds additional logic on top of the `term` suggester\n     * to select entire corrected phrases instead of individual tokens weighted\n     * based on `ngram-language` models. In practice this suggester will be able\n     * to make better decisions about which tokens to pick based on co-occurrence\n     * and frequencies.\n     *\n     * @param {string} name The name of the Suggester, an arbitrary identifier\n     * @param {string=} field The field to fetch the candidate suggestions from.\n     * @param {string=} txt A string to get suggestions for.\n     *\n     * @throws {Error} if `name` is empty\n     *\n     * @extends AnalyzedSuggesterBase\n     */\n    export class PhraseSuggester extends AnalyzedSuggesterBase {\n        constructor(name: string, field?: string, txt?: string);\n\n        /**\n         * Sets max size of the n-grams (shingles) in the `field`. If the field\n         * doesn't contain n-grams (shingles) this should be omitted or set to `1`.\n         * Note: Elasticsearch tries to detect the gram size based on\n         * the specified `field`. If the field uses a `shingle` filter the `gram_size`\n         * is set to the `max_shingle_size` if not explicitly set.\n         *\n         * @param {number} size Max size of the n-grams (shingles) in the `field`.\n         */\n        gramSize(size: number): this;\n\n        /**\n         * Sets the likelihood of a term being a misspelled even if the term exists\n         * in the dictionary. The default is `0.95` corresponding to 5% of the\n         * real words are misspelled.\n         *\n         * @param {number} factor Likelihood of a term being misspelled. Defaults to `0.95`\n         */\n        realWordErrorLikelihood(factor: number): this;\n\n        /**\n         * Sets the confidence level defines a factor applied to the input phrases score\n         * which is used as a threshold for other suggest candidates. Only candidates\n         * that score higher than the threshold will be included in the result.\n         * For instance a confidence level of `1.0` will only return suggestions\n         * that score higher than the input phrase. If set to `0.0` the top N candidates\n         * are returned. The default is `1.0`.\n         *\n         * @param {number} level Factor applied to the input phrases score, used as\n         * a threshold for other suggest candidates.\n         */\n        confidence(level: number): this;\n\n        /**\n         * Sets the maximum percentage of the terms that at most considered to be\n         * misspellings in order to form a correction. This method accepts a float\n         * value in the range `[0..1)` as a fraction of the actual query terms or a\n         * number `>=1` as an absolute number of query terms. The default is set\n         * to `1.0` which corresponds to that only corrections with at most\n         * 1 misspelled term are returned. Note that setting this too high can\n         * negatively impact performance. Low values like 1 or 2 are recommended\n         * otherwise the time spend in suggest calls might exceed the time spend\n         * in query execution.\n         *\n         * @param {number} limit The maximum percentage of the terms that at most considered\n         * to be misspellings in order to form a correction.\n         */\n        maxErrors(limit: number): this;\n\n        /**\n         * Sets the separator that is used to separate terms in the bigram field.\n         * If not set the whitespace character is used as a separator.\n         *\n         * @param {string} sep The separator that is used to separate terms in the\n         * bigram field.\n         */\n        separator(sep: string): this;\n\n        /**\n         * Sets up suggestion highlighting. If not provided then no `highlighted` field\n         * is returned. If provided must contain exactly `pre_tag` and `post_tag` which\n         * are wrapped around the changed tokens. If multiple tokens in a row are changed\n         * the entire phrase of changed tokens is wrapped rather than each token.\n         *\n         * @param {string} preTag Pre-tag to wrap token\n         * @param {string} postTag Post-tag to wrap token\n         */\n        highlight(preTag: string, postTag: string): this;\n\n        /**\n         * Checks each suggestion against the specified `query` to prune suggestions\n         * for which no matching docs exist in the index. The collate query for\n         * a suggestion is run only on the local shard from which the suggestion\n         * has been generated from. The `query` must be specified, and it is run\n         * as a `template` query.\n         *\n         * The current suggestion is automatically made available as the\n         * `{{suggestion}}` variable, which should be used in your query.\n         * Additionally, you can specify a `prune` to control if all phrase\n         * suggestions will be returned, when set to `true` the suggestions will\n         * have an additional option `collate_match`, which will be true if matching\n         * documents for the phrase was found, `false` otherwise. The default value\n         * for prune is `false`.\n         *\n         * @param {object} opts The options for `collate`. Can include the following:\n         *   - `query`: The `query` to prune suggestions for which\n         *      no matching docs exist in the index. It is run as a `template` query.\n         *   - `params`: The parameters to be passed to the template. The suggestion\n         *      value will be added to the variables you specify.\n         *   - `prune`: When set to `true`, the suggestions will\n         *      have an additional option `collate_match`, which will be true if matching\n         *      documents for the phrase was found, `false` otherwise. The default value\n         *      for prune is `false`.\n         */\n        collate(opts: object): this;\n\n        /**\n         * Sets the smoothing model to balance weight between infrequent grams\n         * (grams (shingles) are not existing in the index) and frequent grams\n         * (appear at least once in the index).\n         *\n         * Three possible values can be specified:\n         *   - `stupid_backoff`: a simple backoff model that backs off to lower order\n         *     n-gram models if the higher order count is 0 and discounts the lower order\n         *     n-gram model by a constant factor. The default `discount` is `0.4`.\n         *     Stupid Backoff is the default model\n         *   - `laplace`: a smoothing model that uses an additive smoothing where a\n         *     constant (typically `1.0` or smaller) is added to all counts to balance weights,\n         *     The default `alpha` is `0.5`.\n         *   - `linear_interpolation`: a smoothing model that takes the weighted mean of the\n         *     unigrams, bigrams and trigrams based on user supplied weights (lambdas).\n         *     Linear Interpolation doesn’t have any default values.\n         *     All parameters (`trigram_lambda`, `bigram_lambda`, `unigram_lambda`)\n         *     must be supplied.\n         *\n         * @param {string} model One of `stupid_backoff`, `laplace`, `linear_interpolation`\n         */\n        smoothing(\n            model: 'stupid_backoff' | 'laplace' | 'linear_interpolation'\n        ): this;\n\n        /**\n         * Sets the given list of candicate generators which produce a list of possible terms\n         * per term in the given text. Each of the generators in the list are\n         * called per term in the original text.\n         * The output of the generators is subsequently scored in combination with the\n         * candidates from the other terms to for suggestion candidates.\n         *\n         * @param {Array<DirectGenerator>|DirectGenerator} dirGen Array of `DirectGenerator`\n         * instances or a single instance of `DirectGenerator`\n         */\n        directGenerator(dirGen: DirectGenerator[] | DirectGenerator): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the `phrase suggester`\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * The phrase suggester adds additional logic on top of the `term` suggester\n     * to select entire corrected phrases instead of individual tokens weighted\n     * based on `ngram-language` models. In practice this suggester will be able\n     * to make better decisions about which tokens to pick based on co-occurrence\n     * and frequencies.\n     *\n     * @param {string} name The name of the Suggester, an arbitrary identifier\n     * @param {string=} field The field to fetch the candidate suggestions from.\n     * @param {string=} txt A string to get suggestions for.\n     *\n     * @throws {Error} if `name` is empty\n     */\n    export function phraseSuggester(\n        name: string,\n        field?: string,\n        txt?: string\n    ): PhraseSuggester;\n\n    /**\n     * The completion suggester provides auto-complete/search-as-you-type\n     * functionality. This is a navigational feature to guide users to relevant\n     * results as they are typing, improving search precision. It is not meant\n     * for spell correction or did-you-mean functionality like the term or\n     * phrase suggesters.\n     *\n     * Ideally, auto-complete functionality should be as fast as a user types to\n     * provide instant feedback relevant to what a user has already typed in.\n     * Hence, completion suggester is optimized for speed. The suggester uses\n     * data structures that enable fast lookups, but are costly to build\n     * and are stored in-memory.\n     *\n     * @param {string} name The name of the Suggester, an arbitrary identifier\n     * @param {string=} field The field to fetch the candidate suggestions from.\n     *\n     * @throws {Error} if `name` is empty\n     *\n     * @extends Suggester\n     */\n    export class CompletionSuggester extends Suggester {\n        constructor(name: string, field?: string);\n\n        /**\n         * Sets the `prefix` for the `CompletionSuggester` query.\n         *\n         * @param {string} prefix\n         */\n        prefix(prefix: string): this;\n\n        /**\n         * Sets whether duplicate suggestions should be filtered out (defaults to false).\n         *\n         * NOTE: This option was added in elasticsearch v6.1.\n         *\n         * @param {boolean} skip Enable/disable skipping duplicates\n         */\n        skipDuplicates(skip?: boolean): this;\n\n        /**\n         * Sets the `fuzzy` parameter. Can be customised with specific fuzzy parameters.\n         *\n         * @param {boolean|Object=} fuzzy Enable/disable `fuzzy` using boolean or\n         * object(with params)\n         */\n        fuzzy(fuzzy?: boolean | object): this;\n\n        /**\n         * Sets the `fuzziness` parameter which is interpreted as a Levenshtein Edit Distance —\n         * the number of one character changes that need to be made to one string to make it\n         * the same as another string.\n         *\n         * @param {number|string} factor Can be specified either as a number, or the maximum\n         * number of edits, or as `AUTO` which generates an edit distance based on the length\n         * of the term.\n         */\n        fuzziness(factor: number | string): this;\n\n        /**\n         * Transpositions (`ab` → `ba`) are allowed by default but can be disabled\n         * by setting `transpositions` to false.\n         *\n         * @param {boolean} enable\n         */\n        transpositions(enable: boolean): this;\n\n        /**\n         * Sets the minimum length of the input before fuzzy suggestions are returned,\n         * defaults 3\n         *\n         * @param {number} len Minimum length of the input before fuzzy suggestions\n         * are returned, defaults 3\n         */\n        minLength(len: number): this;\n\n        /**\n         * The number of initial characters which will not be \"fuzzified\".\n         * This helps to reduce the number of terms which must be examined. Defaults to `1`.\n         *\n         * @param {number} len Characters to skip fuzzy for. Defaults to `1`.\n         */\n        prefixLength(len: number): this;\n\n        /**\n         * If `true`, all measurements (like fuzzy edit distance, transpositions,\n         * and lengths) are measured in Unicode code points instead of in bytes.\n         * This is slightly slower than raw bytes, so it is set to `false` by default.\n         *\n         * @param {boolean} enable Measure in Unicode code points instead of in bytes.\n         * `false` by default.\n         */\n        unicodeAware(enable: boolean): this;\n\n        /**\n         * Sets the regular expression for completion suggester which supports regex queries.\n         *\n         * @param {string} expr Regular expression\n         */\n        regex(expr: string): this;\n\n        /**\n         * Set special flags. Possible flags are `ALL` (default),\n         * `ANYSTRING`, `COMPLEMENT`, `EMPTY`, `INTERSECTION`, `INTERVAL`, or `NONE`.\n         *\n         * @param {string} flags `|` separated flags. Possible flags are `ALL` (default),\n         * `ANYSTRING`, `COMPLEMENT`, `EMPTY`, `INTERSECTION`, `INTERVAL`, or `NONE`.\n         */\n        flags(flags: string): this;\n\n        /**\n         * Limit on how many automaton states regexp queries are allowed to create.\n         * This protects against too-difficult (e.g. exponentially hard) regexps.\n         * Defaults to 10000. You can raise this limit to allow more complex regular\n         * expressions to execute.\n         *\n         * @param {number} limit\n         */\n        maxDeterminizedStates(limit: number): this;\n\n        /**\n         * The completion suggester considers all documents in the index, but it is often\n         * desirable to serve suggestions filtered and/or boosted by some criteria.\n         * To achieve suggestion filtering and/or boosting, you can add context mappings\n         * while configuring a completion field. You can define multiple context mappings\n         * for a completion field. Every context mapping has a unique name and a type.\n         *\n         * @param {string} name\n         * @param {Array|Object} ctx\n         */\n        contexts(name: string, ctx: object[] | string[] | object): this;\n    }\n\n    /**\n     * The completion suggester provides auto-complete/search-as-you-type\n     * functionality. This is a navigational feature to guide users to relevant\n     * results as they are typing, improving search precision. It is not meant\n     * for spell correction or did-you-mean functionality like the term or\n     * phrase suggesters.\n     *\n     * Ideally, auto-complete functionality should be as fast as a user types to\n     * provide instant feedback relevant to what a user has already typed in.\n     * Hence, completion suggester is optimized for speed. The suggester uses\n     * data structures that enable fast lookups, but are costly to build\n     * and are stored in-memory.\n     *\n     * @param {string} name The name of the Suggester, an arbitrary identifier\n     * @param {string=} field The field to fetch the candidate suggestions from.\n     *\n     * @throws {Error} if `name` is empty\n     *\n     * @extends Suggester\n     */\n    export function completionSuggester(\n        name: string,\n        field?: string\n    ): CompletionSuggester;\n\n    /**\n     * Allows to highlight search results on one or more fields. In order to\n     * perform highlighting, the actual content of the field is required. If the\n     * field in question is stored (has store set to yes in the mapping), it will\n     * be used, otherwise, the actual _source will be loaded and the relevant\n     * field will be extracted from it.\n     *\n     * If no term_vector information is provided (by setting it to\n     * `with_positions_offsets` in the mapping), then the plain highlighter will be\n     * used. If it is provided, then the fast vector highlighter will be used.\n     * When term vectors are available, highlighting will be performed faster at\n     * the cost of bigger index size.\n     *\n     *\n     * @param {string|Array=} fields An optional field or array of fields to highlight.\n     */\n    export class Highlight {\n        constructor(fields?: string | string[]);\n\n        /**\n         * Allows you to set a field that will be highlighted. The field is\n         * added to the current list of fields.\n         *\n         * @param {string} field A field name.\n         */\n        field(field: string): this;\n\n        /**\n         * Allows you to set the fields that will be highlighted. All fields are\n         * added to the current list of fields.\n         *\n         * @param {Array<string>} fields Array of field names.\n         * @throws {TypeError} If `fields` is not an instance of Array\n         */\n        fields(fields: string[]): this;\n\n        /**\n         * Sets the pre tags for highlighted fragments. You can apply the\n         * tags to a specific field by passing the optional field name parameter.\n         *\n         * @param {string|Array} tags\n         * @param {string=} field\n         */\n        preTags(tags: string | any, field?: string): this;\n\n        /**\n         * Sets the post tags for highlighted fragments. You can apply the\n         * tags to a specific field by passing the optional field name parameter.\n         *\n         * @param {string|Array} tags\n         * @param {string=} field\n         */\n        postTags(tags: string | any, field?: string): this;\n\n        /**\n         * Sets the styled schema to be used for the tags.\n         * styled - 10 `<em>` pre tags with css class of hltN, where N is 1-10\n         */\n        styledTagsSchema(): this;\n\n        /**\n         * Sets the order of highlight fragments to be sorted by score. You can apply the\n         * score order to a specific field by passing the optional field name parameter.\n         *\n         * @param {string=} field An optional field name\n         */\n        scoreOrder(field?: string): this;\n\n        /**\n         * Sets the size of each highlight fragment in characters. You can apply the\n         * option to a specific field by passing the optional field name parameter.\n         *\n         * @param {number} size The fragment size in characters. Defaults to 100.\n         * @param {string=} field An optional field name\n         */\n        fragmentSize(size: number, field?: string): this;\n\n        /**\n         * Sets the maximum number of fragments to return. You can apply the\n         * option to a specific field by passing the optional field name parameter.\n         *\n         * @param {number} count The maximum number of fragments to return\n         * @param {string=} field An optional field name\n         */\n        numberOfFragments(count: number, field?: string): this;\n\n        /**\n         * If `no_match_size` is set, in the case where there is no matching fragment\n         * to highlight, a snippet of text, with the specified length, from the beginning\n         * of the field will be returned.\n         * The actual length may be shorter than specified as it tries to break on a word boundary.\n         * Default is `0`.\n         *\n         * @param {number} size\n         * @param {string} field\n         */\n        noMatchSize(size: number, field: string): this;\n\n        /**\n         * Highlight against a query other than the search query.\n         * Useful if you use a rescore query because those\n         * are not taken into account by highlighting by default.\n         *\n         * @param {Query} query\n         * @param {string=} field An optional field name\n         * @throws {TypeError} If `query` is not an instance of `Query`\n         */\n        highlightQuery(query: Query, field?: string): this;\n\n        /**\n         * Combine matches on multiple fields to highlight a single field.\n         * Useful for multifields that analyze the same string in different ways.\n         * Sets the highlight type to Fast Vector Highlighter(`fvh`).\n         *\n         * @param {Array<string>} fields\n         * @param {string} field Field name\n         * @throws {Error} field parameter should be valid field name\n         * @throws {TypeError} If `fields` is not an instance of Array\n         */\n        matchedFields(fields: string[], field: string): this;\n\n        /**\n         * The fast vector highlighter has a phrase_limit parameter that prevents\n         * it from analyzing too many phrases and eating tons of memory. It defaults\n         * to 256 so only the first 256 matching phrases in the document scored\n         * considered. You can raise the limit with the phrase_limit parameter.\n         * If using `matched_fields`, `phrase_limit` phrases per matched field\n         * are considered.\n         *\n         * @param {number} limit Defaults to 256.\n         */\n        phraseLimit(limit: number): this;\n\n        /**\n         * Can be used to define how highlighted text will be encoded.\n         *\n         * @param {string} encoder It can be either `default` (no encoding)\n         * or `html` (will escape html, if you use html highlighting tags)\n         * @throws {Error} Encoder can be either `default` or `html`\n         */\n        encoder(encoder: 'default' | 'html'): this;\n\n        /**\n         * By default only fields that hold a query match will be highlighted.\n         * This can be set to false to highlight the field regardless of whether\n         * the query matched specifically on them. You can apply the\n         * option to a specific field by passing the optional field name parameter.\n         *\n         * @param {boolean} requireFieldMatch\n         * @param {string=} field An optional field name\n         */\n        requireFieldMatch(requireFieldMatch: boolean, field?: string): this;\n\n        /**\n         * Allows to control how far to look for boundary characters, and defaults to 20.\n         * You can apply the option to a specific field by passing the optional field name parameter.\n         *\n         * @param {number} count The max characters to scan.\n         * @param {string=} field An optional field name\n         */\n        boundaryMaxScan(count: number, field?: string): this;\n\n        /**\n         * Defines what constitutes a boundary for highlighting.\n         * It is a single string with each boundary character defined in it.\n         * It defaults to `.,!? \\t\\n`. You can apply the\n         * option to a specific field by passing the optional field name parameter.\n         *\n         * @param {string} charStr\n         * @param {string=} field An optional field name\n         */\n        boundaryChars(charStr: string, field?: string): this;\n\n        /**\n         * Allows to force a specific highlighter type.\n         * This is useful for instance when needing to use\n         * the plain highlighter on a field that has term_vectors enabled.\n         * You can apply the option to a specific field by passing the optional field name parameter.\n         *\n         * Note: The `postings` highlighter has been removed in elasticsearch 6.0. The `unified`\n         * highlighter outputs the same highlighting when `index_options` is set to `offsets`.\n         *\n         * @param {string} type The allowed values are: `plain`, `postings`, `unified` and `fvh`.\n         * @param {string=} field An optional field name\n         * @throws {Error} Type can be one of `plain`, `postings`, `unified` or `fvh`.\n         */\n        type(type: 'plain' | 'postings' | 'unified' | 'fvh', field?: string): this;\n\n        /**\n         * Forces the highlighting to highlight fields based on the source\n         * even if fields are stored separately. Defaults to false.\n         *\n         * @param {boolean} forceSource\n         * @param {string=} field An optional field name\n         */\n        forceSource(forceSource: boolean, field?: string): this;\n\n        /**\n         * Sets the fragmenter type. You can apply the\n         * option to a specific field by passing the optional field name parameter.\n         * Valid values for order are:\n         *  - `simple` - breaks text up into same-size fragments with no concerns\n         *      over spotting sentence boundaries.\n         *  - `span` - breaks text up into same-size fragments but does not split\n         *      up Spans.\n         *\n         * @param {string} fragmenter The fragmenter.\n         * @param {string=} field An optional field name\n         * @throws {Error} Fragmenter can be either `simple` or `span`\n         */\n        fragmenter(fragmenter: 'simple' | 'span', field?: string): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the `highlight` request\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * Allows to highlight search results on one or more fields. In order to\n     * perform highlighting, the actual content of the field is required. If the\n     * field in question is stored (has store set to yes in the mapping), it will\n     * be used, otherwise, the actual _source will be loaded and the relevant\n     * field will be extracted from it.\n     *\n     * If no term_vector information is provided (by setting it to\n     * `with_positions_offsets` in the mapping), then the plain highlighter will be\n     * used. If it is provided, then the fast vector highlighter will be used.\n     * When term vectors are available, highlighting will be performed faster at\n     * the cost of bigger index size.\n     *\n     *\n     * @param {string|Array=} fields An optional field or array of fields to highlight.\n     */\n    export function highlight(fields?: string | string[]): Highlight;\n\n    /**\n    * Class supporting the Elasticsearch runtime field.\n    *\n    * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/runtime.html)\n    *\n    * Added in Elasticsearch v7.11.0\n    * [Release note](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/release-notes-7.11.0.html)\n    *\n    * @param {string=} type One of `boolean`, `composite`, `date`, `double`, `geo_point`, `ip`, `keyword`, `long`, `lookup`.\n    * @param {string=} script Source of the script.\n    *\n    * @example\n    * const field = esb.runtimeField('keyword', `emit(doc['sessionId'].value + '::' + doc['name'].value)`);\n    */\n    export class RuntimeField {\n        constructor(type?: string, script?: string);\n\n        /**\n         * Sets the type of the runtime field.\n         *\n         * @param {string} type One of `boolean`, `composite`, `date`, `double`, `geo_point`, `ip`, `keyword`, `long`, `lookup`.\n         * @returns {RuntimeField} returns `this` so that calls can be chained.\n         */\n        type(type: 'boolean' | 'composite' | 'date' | 'double' | 'geo_point' | 'ip' | 'keyword' | 'long' | 'lookup'): this;\n\n        /**\n         * Sets the source of the script.\n         *\n         * @param {string} script\n         * @returns {RuntimeField} returns `this` so that calls can be chained.\n         */\n        script(script: string): this;\n\n        /**\n         * Specifies the language the script is written in. Defaults to `painless` but\n         * may be set to any of languages listed in [Scripting](https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html).\n         *\n         * @param {string} lang The language for the script.\n         * @returns {RuntimeField} returns `this` so that calls can be chained.\n         */\n        lang(lang: string): this;\n    \n        /**\n         * Specifies any named parameters that are passed into the script as variables.\n         *\n         * @param {object} params Named parameters to be passed to script.\n         * @returns {RuntimeField} returns `this` so that calls can be chained.\n         */\n        params(params: object): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the `script`.\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n    * Class supporting the Elasticsearch runtime field.\n    *\n    * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/runtime.html)\n    *\n    * Added in Elasticsearch v7.11.0\n    * [Release note](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/release-notes-7.11.0.html)\n    *\n    * @param {string=} type One of `boolean`, `composite`, `date`, `double`, `geo_point`, `ip`, `keyword`, `long`, `lookup`.\n    * @param {string=} script Source of the script.\n    *\n    * @example\n    * const field = esb.runtimeField('keyword', `emit(doc['sessionId'].value + '::' + doc['name'].value)`);\n    */\n    export function runtimeField(type?: 'boolean' | 'composite' | 'date' | 'double' | 'geo_point' | 'ip' | 'keyword' | 'long' | 'lookup', script?: string): RuntimeField;\n\n    /**\n     * Class supporting the Elasticsearch scripting API.\n     *\n     * Note: `inline` script type was deprecated in [elasticsearch v5.0](https://www.elastic.co/guide/en/elasticsearch/reference/5.6/breaking_50_scripting.html).\n     * `source` should be used instead. And similarly for `stored` scripts, type\n     * `id` must be used instead. `file` scripts were removed as part of the\n     * breaking changes in [elasticsearch v6.0](https://www.elastic.co/guide/en/elasticsearch/reference/6.0/breaking_60_scripting_changes.html#_file_scripts_removed)\n     *\n     * @param {string=} type One of `inline`, `stored`, `file`, `source`, `id`.\n     * @param {string=} source Source of the script.\n     * This needs to be specified if optional argument `type` is passed.\n     */\n    export class Script {\n        constructor(type?: string, source?: string);\n\n        /**\n         * Sets the type of script to be `inline` and specifies the source of the script.\n         *\n         * Note: This type was deprecated in elasticsearch v5.0. Use `source`\n         * instead if you are using elasticsearch `>= 5.0`.\n         *\n         * @param {string} scriptCode\n         */\n        inline(scriptCode: string): this;\n\n        /**\n         * Sets the type of script to be `source` and specifies the source of the script.\n         *\n         * Note: `source` is an alias for the `inline` type which was deprecated\n         * in elasticsearch v5.0. So this type is supported only in versions\n         * `>= 5.0`.\n         *\n         * @param {string} scriptCode\n         */\n        source(scriptCode: string): this;\n\n        /**\n         * Specify the `stored` script by `id` which will be retrieved from cluster state.\n         *\n         * Note: This type was deprecated in elasticsearch v5.0. Use `id`\n         * instead if you are using elasticsearch `>= 5.0`.\n         *\n         * @param {string} scriptId The unique identifier for the stored script.\n         */\n        stored(scriptId: string): this;\n\n        /**\n         * Specify the stored script to be used by it's `id` which will be retrieved\n         * from cluster state.\n         *\n         * Note: `id` is an alias for the `stored` type which was deprecated in\n         * elasticsearch v5.0. So this type is supported only in versions `>= 5.0`.\n         *\n         * @param {string} scriptId The unique identifier for the stored script.\n         */\n        id(scriptId: string): this;\n\n        /**\n         * Specify the `stored` script by `id` which will be retrieved from cluster state.\n         *\n         * For script file `config/scripts/calculate-score.groovy`,\n         * `fileName` should be `calculate-score`.\n         *\n         * Note: File scripts have been removed in elasticsearch 6.0. Instead, use stored scripts.\n         *\n         * @param {string} fileName The name of the script stored as a file in the scripts folder.\n         */\n        file(fileName: string): this;\n\n        /**\n         * Specifies the language the script is written in. Defaults to `painless` but\n         * may be set to any of languages listed in Scripting elasticsearch documentation.\n         * The default language may be changed in the `elasticsearch.yml` config file by setting\n         * `script.default_lang` to the appropriate language.\n         *\n         * For a `file` script,  it should correspond with the script file suffix.\n         * `groovy` for `config/scripts/calculate-score.groovy`.\n         *\n         * Note: The Groovy, JavaScript, and Python scripting languages were deprecated in\n         * elasticsearch 5.0 and removed in 6.0. Use painless instead.\n         *\n         * @param {string} lang The language for the script.\n         */\n        lang(lang: string): this;\n\n        /**\n         * Specifies any named parameters that are passed into the script as variables.\n         *\n         * @param {object} params Named parameters to be passed to script.\n         */\n        params(params: object): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the `script`.\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * Class supporting the Elasticsearch scripting API.\n     *\n     * @param {string=} type One of `inline`, `stored`, `file`\n     * @param {string=} source Source of the script.\n     * This needs to be specified if optional argument `type` is passed.\n     */\n    export function script(type?: string, source?: string): Script;\n\n    /**\n     * A `GeoPoint` object that can be used in queries and filters that\n     * take a `GeoPoint`.  `GeoPoint` supports various input formats.\n     */\n    export class GeoPoint {\n        /**\n         * Sets the latitude for the object representation.\n         *\n         * @param {number} lat Latitude\n         */\n        lat(lat: number): this;\n\n        /**\n         * Sets the longitude for the object representation.\n         *\n         * @param {number} lon Longitude\n         */\n        lon(lon: number): this;\n\n        /**\n         * Sets the Geo Point value expressed as an object,\n         * with `lat` and `lon` keys.\n         *\n         * @param {object} point\n         * @throws {TypeError} If `point` is not an instance of object\n         */\n        object(point: object): this;\n\n        /**\n         * Sets the Geo Point value expressed as an array\n         * with the format: `[ lon, lat ]`.\n         *\n         * @param {Array<number>} point Array in format `[ lon, lat ]`(`GeoJson` standard)\n         * @throws {TypeError} If `point` is not an instance of Array\n         */\n        array(point: number[]): this;\n\n        /**\n         * Sets Geo-point expressed as a string with the format: `\"lat,lon\"`\n         * or as a geo hash\n         *\n         * @param {string} point\n         */\n        string(point: string): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the `GeoPoint`\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * A `GeoPoint` object that can be used in queries and filters that\n     * take a `GeoPoint`.  `GeoPoint` supports various input formats.\n     */\n    export function geoPoint(): GeoPoint;\n\n    /**\n     * Shape object that can be used in queries and filters that\n     * take a Shape. Shape uses the GeoJSON format.\n     *\n     * @param {string=} type A valid shape type.\n     * Can be one of `point`, `linestring`, `polygon`, `multipoint`, `multilinestring`,\n     * `multipolygon`, `geometrycollection`, `envelope` and `circle`\n     * @param {Array=} coords A valid coordinat definition for the given shape.\n     */\n    export class GeoShape {\n        constructor(\n            type?:\n                | 'point'\n                | 'linestring'\n                | 'polygon'\n                | 'multipoint'\n                | 'multilinestring'\n                | 'multipolygon'\n                | 'geometrycollection'\n                | 'envelope'\n                | 'circle',\n            coords?: object\n        );\n\n        /**\n         * Sets the GeoJSON format type used to represent shape.\n         *\n         * @param {string} type A valid shape type.\n         * Can be one of `point`, `linestring`, `polygon`, `multipoint`, `multilinestring`,\n         * `multipolygon`, `geometrycollection`, `envelope`, `circle`\n         */\n        type(\n            type:\n                | 'point'\n                | 'linestring'\n                | 'polygon'\n                | 'multipoint'\n                | 'multilinestring'\n                | 'multipolygon'\n                | 'geometrycollection'\n                | 'envelope'\n                | 'circle'\n        ): this;\n\n        /**\n         * Sets the coordinates for the shape definition. Note, the coordinates\n         * are not validated in this api. Please see `GeoJSON`\n         * and ElasticSearch documentation for correct coordinate definitions.\n         *\n         * @param {Array<Array<number>>|Array<number>} coords\n         */\n        coordinates(coords: number[][] | number[]): this;\n\n        /**\n         * Sets the radius for parsing a circle `GeoShape`.\n         *\n         * @param {string|number} radius The radius for shape circle.\n         */\n        radius(radius: string | number): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation of the geo shape\n         * class instance.\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * Shape object that can be used in queries and filters that\n     * take a Shape. Shape uses the GeoJSON format.\n     *\n     * @param {string=} type A valid shape type.\n     * Can be one of `point`, `linestring`, `polygon`, `multipoint`, `multilinestring`,\n     * `multipolygon`, `geometrycollection`, `envelope` and `circle`\n     * @param {Array=} coords A valid coordinat definition for the given shape.\n     */\n    export function geoShape(\n        type?:\n            | 'point'\n            | 'linestring'\n            | 'polygon'\n            | 'multipoint'\n            | 'multilinestring'\n            | 'multipolygon'\n            | 'geometrycollection'\n            | 'envelope'\n            | 'circle',\n        coords?: object\n    ): GeoShape;\n\n    /**\n     * A shape which has already been indexed in another index and/or index\n     * type. This is particularly useful for when you have a pre-defined list of\n     * shapes which are useful to your application and you want to reference this\n     * using a logical name (for example 'New Zealand') rather than having to\n     * provide their coordinates each time.\n     *\n     * @param {string=} id The document id of the shape.\n     * @param {string=} type The name of the type where the shape is indexed.\n     */\n    export class IndexedShape {\n        constructor(id?: string, type?: string);\n\n        /**\n         * Sets the ID of the document that containing the pre-indexed shape.\n         *\n         * @param {string} id The document id of the shape.\n         */\n        id(id: string): this;\n\n        /**\n         * Sets the index type where the pre-indexed shape is.\n         *\n         * @param {string} type The name of the type where the shape is indexed.\n         */\n        type(type: string): this;\n\n        /**\n         * Sets the name of the index where the pre-indexed shape is. Defaults to `shapes`.\n         *\n         * @param {string} index A valid index name\n         */\n        index(index: string): this;\n\n        /**\n         * Sets the field specified as path containing the pre-indexed shape.\n         * Defaults to `shape`.\n         *\n         * @param {string} path field name.\n         */\n        path(path: string): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation of the geo shape\n         * class instance.\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * A shape which has already been indexed in another index and/or index\n     * type. This is particularly useful for when you have a pre-defined list of\n     * shapes which are useful to your application and you want to reference this\n     * using a logical name (for example 'New Zealand') rather than having to\n     * provide their coordinates each time.\n     *\n     * @param {string=} id The document id of the shape.\n     * @param {string=} type The name of the type where the shape is indexed.\n     */\n    export function indexedShape(id?: string, type?: string): IndexedShape;\n\n    /**\n     * Allows creating and configuring sort on specified field.\n     *\n     * @param {string=} field The field to sort on.\n     * If a script is used to specify the sort order, `field` should be omitted.\n     * @param {string=} order The `order` option can have the following values.\n     * `asc`, `desc` to sort in ascending, descending order respectively.\n     */\n    export class Sort {\n        constructor(field?: string, order?: string);\n\n        /**\n         * Set order for sorting. The order defaults to `desc` when sorting on the `_score`,\n         * and defaults to `asc` when sorting on anything else.\n         *\n         * @param {string} order The `order` option can have the following values.\n         * `asc`, `desc` to sort in ascending, descending order respectively.\n         */\n        order(order: 'asc' | 'desc'): this;\n\n        /**\n         * Elasticsearch supports sorting by array or multi-valued fields.\n         * The `mode` option controls what array value is picked for sorting the\n         * document it belongs to.\n         * The `mode` option can have the following values:\n         * - `min` - Pick the lowest value.\n         * - `max` - Pick the highest value.\n         * - `sum` - Use the sum of all values as sort value.\n         *   Only applicable for number based array fields.\n         * - `avg` - Use the average of all values as sort value.\n         *   Only applicable for number based array fields.\n         * - `median` - Use the median of all values as sort value.\n         *   Only applicable for number based array fields.\n         *\n         * @param {string} mode One of `avg`, `min`, `max`, `sum` and `median`.\n         */\n        mode(mode: 'min' | 'max' | 'sum' | 'avg' | 'median'): this;\n\n        /**\n         * Defines on which nested object to sort. The actual sort field must be a direct\n         * field inside this nested object. When sorting by nested field, this field\n         * is mandatory.\n         *\n         * Note: This method has been deprecated in elasticsearch 6.1. From 6.1 and\n         * later, use `nested` method instead.\n         *\n         * @param {string} path Nested object to sort on\n         */\n        nestedPath(path: string): this;\n\n        /**\n         * A filter that the inner objects inside the nested path should match with in order\n         * for its field values to be taken into account by sorting. By default no\n         * `nested_filter` is active.\n         *\n         * Note: This method has been deprecated in elasticsearch 6.1. From 6.1 and\n         * later, use `nested` method instead.\n         *\n         * @param {Query} filterQuery\n         * @throws {TypeError} If filter query is not an instance of `Query`\n         */\n        nestedFilter(filterQuery: Query): this;\n\n        /**\n         * Defines on which nested object to sort and the filter that the inner objects inside\n         * the nested path should match with in order for its field values to be taken into\n         * account by sorting\n         *\n         * Note: This method is incompatible with elasticsearch 6.0 and older.\n         * Use it only with elasticsearch 6.1 and later.\n         *\n         * @param {Object} nested Nested config that contains path and filter\n         * @param {string} nested.path Nested object to sort on\n         * @param {Query} nested.filter Filter query\n         */\n        nested(nested: { path: string; filter: Query }): this;\n\n        /**\n         * The missing parameter specifies how docs which are missing the field should\n         * be treated: The missing value can be set to `_last`, `_first`, or a custom value\n         * (that will be used for missing docs as the sort value). The default is `_last`.\n         *\n         * @param {string|number} value\n         */\n        missing(value: string | number): this;\n\n        /**\n         * By default, the search request will fail if there is no mapping associated with\n         * a field. The `unmapped_type` option allows to ignore fields that have no mapping\n         * and not sort by them. The value of this parameter is used to determine what sort\n         * values to emit.\n         *\n         * @param {string} type\n         */\n        unmappedType(type: string): this;\n\n        /**\n         * Sorts documents by distance of the geo point field from reference point.\n         * If multiple reference points are specified, the final distance for a\n         * document will then be `min`/`max`/`avg` (defined via `mode`) distance of all\n         * points contained in the document to all points given in the sort request.\n         *\n         * @param {GeoPoint|Object|Array|string} geoPoint Reference point or array of\n         * points to calculate distance from. Can be expressed using the `GeoPoint` class,\n         * `Object` with `lat`, `lon` keys, as a string either `lat,lon` or geohash\n         * or as Array with GeoJSON format `[lon, lat]`\n         */\n        geoDistance(geoPoint: GeoPoint | object | number[] | string): this;\n\n        /**\n         * Sets the distance calculation mode, `arc` or `plane`.\n         * The `arc` calculation is the more accurate.\n         * The `plane` is the faster but least accurate.\n         *\n         * @param {string} type\n         * @throws {Error} If `type` is neither `plane` nor `arc`.\n         */\n        distanceType(type: 'arc' | 'plane'): this;\n\n        /**\n         * Sets the distance unit.  Valid values are:\n         * mi (miles), in (inches), yd (yards),\n         * km (kilometers), cm (centimeters), mm (millimeters),\n         * ft(feet), NM(nauticalmiles)\n         *\n         * @param {string} unit Distance unit, default is `m`(meters).\n         * @throws {Error} If Unit is outside the accepted set.\n         */\n        unit(unit: string): this;\n\n        /**\n         * Sorts based on custom script. When sorting on a field, scores are not computed.\n         *\n         * @param {Script} script\n         * @throws {TypeError} If `script` is not an instance of `Script`\n         */\n        script(script: Script): this;\n\n        /**\n         * Sets the data type for field generated by script.\n         *\n         * @param {string} type\n         */\n        type(type: string): this;\n\n        /**\n         * Sets the format of the date when sorting a date field.\n         *\n         * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/7.17/mapping-date-format.html#built-in-date-formats)\n         *\n         * Note: The format argument is [supported since version 7.13](https://www.elastic.co/guide/en/elasticsearch/reference/7.13/release-notes-7.13.0.html) of ElasticSearch.\n         *\n         * @param {string} fmt\n         */\n        format(\n            fmt:\n                | 'epoch_millis'\n                | 'epoch_second'\n                | 'date_optional_time'\n                | 'strict_date_optional_time'\n                | 'strict_date_optional_time_nanos'\n                | 'basic_date'\n                | 'basic_date_time'\n                | 'basic_date_time_no_millis'\n                | 'basic_ordinal_date'\n                | 'basic_ordinal_date_time'\n                | 'basic_ordinal_date_time_no_millis'\n                | 'basic_time'\n                | 'basic_time_no_millis'\n                | 'basic_t_time'\n                | 'basic_t_time_no_millis'\n                | 'basic_week_date'\n                | 'strict_basic_week_date'\n                | 'basic_week_date_time'\n                | 'strict_basic_week_date_time'\n                | 'basic_week_date_time_no_millis'\n                | 'strict_basic_week_date_time_no_millis'\n                | 'date'\n                | 'strict_date'\n                | 'date_hour'\n                | 'strict_date_hour'\n                | 'date_hour_minute'\n                | 'strict_date_hour_minute'\n                | 'date_hour_minute_second'\n                | 'strict_date_hour_minute_second'\n                | 'date_hour_minute_second_fraction'\n                | 'strict_date_hour_minute_second_fraction'\n                | 'date_hour_minute_second_millis'\n                | 'strict_date_hour_minute_second_millis'\n                | 'date_time'\n                | 'strict_date_time'\n                | 'date_time_no_millis'\n                | 'strict_date_time_no_millis'\n                | 'hour'\n                | 'strict_hour'\n                | 'hour_minute'\n                | 'strict_hour_minute'\n                | 'hour_minute_second'\n                | 'strict_hour_minute_second'\n                | 'hour_minute_second_fraction'\n                | 'strict_hour_minute_second_fraction'\n                | 'hour_minute_second_millis'\n                | 'strict_hour_minute_second_millis'\n                | 'ordinal_date'\n                | 'strict_ordinal_date'\n                | 'ordinal_date_time'\n                | 'strict_ordinal_date_time'\n                | 'ordinal_date_time_no_millis'\n                | 'strict_ordinal_date_time_no_millis'\n                | 'time'\n                | 'strict_time'\n                | 'time_no_millis'\n                | 'strict_time_no_millis'\n                | 't_time'\n                | 'strict_t_time'\n                | 't_time_no_millis'\n                | 'strict_t_time_no_millis'\n                | 'week_date'\n                | 'strict_week_date'\n                | 'week_date_time'\n                | 'strict_week_date_time'\n                | 'week_date_time_no_millis'\n                | 'strict_week_date_time_no_millis'\n                | 'weekyear'\n                | 'strict_weekyear'\n                | 'weekyear_week'\n                | 'strict_weekyear_week'\n                | 'weekyear_week_day'\n                | 'strict_weekyear_week_day'\n                | 'year'\n                | 'strict_year'\n                | 'year_month'\n                | 'strict_year_month'\n                | 'year_month_day'\n                | 'strict_year_month_day'\n        ): this;\n\n        /**\n         * Reverse the sort order. Valid during sort types: field, geo distance, and script.\n         *\n         * @param {boolean} reverse If sort should be in reverse order.\n         */\n        reverse(reverse: boolean): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for `sort` parameter.\n         *\n         * @override\n         */\n        toJSON(): object | string;\n    }\n\n    /**\n     * Allows creating and configuring sort on specified field.\n     *\n     * @param {string=} field The field to sort on.\n     * If a script is used to specify the sort order, `field` should be omitted.\n     * @param {string=} order The `order` option can have the following values.\n     * `asc`, `desc` to sort in ascending, descending order respectively.\n     */\n    export function sort(field?: string, order?: string): Sort;\n\n    /**\n     * A `rescore` request can help to improve precision by reordering just\n     * the top (eg 100 - 500) documents returned by the `query` and `post_filter`\n     * phases, using a secondary (usually more costly) algorithm, instead of\n     * applying the costly algorithm to all documents in the index.\n     * The rescore phase is not executed when sort is used.\n     *\n     * @param {number=} windowSize\n     * @param {Query=} rescoreQuery\n     */\n    export class Rescore {\n        constructor(windowSize?: number, rescoreQuery?: Query);\n\n        /**\n         * The number of docs which will be examined on each shard can be controlled\n         * by the window_size parameter, which defaults to `from` and `size`.\n         *\n         * @param {number} windowSize\n         */\n        windowSize(windowSize: number): this;\n\n        /**\n         * The query to execute on the Top-K results by the `query` and `post_filter` phases.\n         *\n         * @param {Query} rescoreQuery\n         * @throws {TypeError} If `rescoreQuery` is not an instance of `Query`\n         */\n        rescoreQuery(rescoreQuery: Query): this;\n\n        /**\n         * Control the relative importance of the original query.\n         *\n         * @param {number} weight Defaults to 1\n         */\n        queryWeight(weight: number): this;\n\n        /**\n         * Control the relative importance of the rescore query.\n         *\n         * @param {number} weight Defaults to 1\n         */\n        rescoreQueryWeight(weight: number): this;\n\n        /**\n         * Controls the way the scores are combined.\n         *\n         * @param {string} mode Can be one of `total`, `multiply`, `min`, `max`, `avg`.\n         * Defaults to `total`.\n         */\n        scoreMode(mode: 'total' | 'multiply' | 'min' | 'max' | 'avg'): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for `rescore` request\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * A `rescore` request can help to improve precision by reordering just\n     * the top (eg 100 - 500) documents returned by the `query` and `post_filter`\n     * phases, using a secondary (usually more costly) algorithm, instead of\n     * applying the costly algorithm to all documents in the index.\n     * The rescore phase is not executed when sort is used.\n     *\n     * @param {number=} windowSize\n     * @param {Query=} rescoreQuery\n     */\n    export function rescore(windowSize?: number, rescoreQuery?: Query): Rescore;\n\n    /**\n     * Inner hits returns per search hit in the search response additional\n     * nested hits that caused a search hit to match in a different scope.\n     * Inner hits can be used by defining an `inner_hits` definition on a\n     * `nested`, `has_child` or `has_parent` query and filter.\n     *\n     * @param {string=} name The name to be used for the particular inner hit definition\n     * in the response. Useful when multiple inner hits have been defined in a single\n     * search request. The default depends in which query the inner hit is defined.\n     */\n    export class InnerHits {\n        constructor(name?: string);\n\n        /**\n         * The name to be used for the particular inner hit definition\n         * in the response. Useful when multiple inner hits have been defined in a single\n         * search request. The default depends in which query the inner hit is defined.\n         *\n         * @param {number} name\n         */\n        name(name: number): this;\n\n        /**\n         * The offset from where the first hit to fetch for each `inner_hits` in the returned\n         * regular search hits.\n         *\n         * @param {number} from\n         */\n        from(from: number): this;\n\n        /**\n         * The maximum number of hits to return per inner_hits.\n         * By default the top three matching hits are returned.\n         *\n         * @param {number} size Defaults to 10.\n         */\n        size(size: number): this;\n\n        /**\n         * How the inner hits should be sorted per inner_hits.\n         * By default the hits are sorted by the score.\n         *\n         * @param {Sort} sort\n         * @throws {TypeError} If parameter `sort` is not an instance of `Sort`.\n         */\n        sort(sort: Sort): this;\n\n        /**\n         * Allows to add multiple sort on specific fields. Each sort can be reversed as well.\n         * The sort is defined on a per field level, with special field name for _score to\n         * sort by score, and _doc to sort by index order.\n         *\n         * @param {Array<Sort>} sorts Array of sort\n         * @throws {TypeError} If any item in parameter `sorts` is not an instance of `Sort`.\n         */\n        sorts(sorts: Sort[]): this;\n\n        /**\n         * Allows to highlight search results on one or more fields. The implementation\n         * uses either the lucene `plain` highlighter, the fast vector highlighter (`fvh`)\n         * or `postings` highlighter.\n         *\n         * Note: The `postings` highlighter has been removed in elasticsearch 6.0. The `unified`\n         * highlighter outputs the same highlighting when `index_options` is set to `offsets`.\n         *\n         * @param {Highlight} highlight\n         */\n        highlight(highlight: Highlight): this;\n\n        /**\n         * Enables explanation for each hit on how its score was computed.\n         *\n         * @param {boolean} enable\n         */\n        explain(enable: boolean): this;\n\n        /**\n         * Allows to control how the `_source` field is returned with every hit.\n         * You can turn off `_source` retrieval by passing `false`.\n         * It also accepts one(string) or more wildcard(array) patterns to control\n         * what parts of the `_source` should be returned\n         * An object can also be used to specify the wildcard patterns for `includes` and `excludes`.\n         *\n         * @param {boolean|string|Array|Object} source\n         */\n        source(source: boolean | string | string[] | object): this;\n\n        /**\n         * Include specific stored fields\n         *\n         * @param {Array|string} fields\n         */\n        storedFields(fields: object | string): this;\n\n        /**\n         * Computes a document property dynamically based on the supplied `Script`.\n         *\n         * @param {string} scriptFieldName\n         * @param {string|Script} script string or instance of `Script`\n         */\n        scriptField(scriptFieldName: string, script: string | Script): this;\n\n        /**\n         * Sets given dynamic document properties to be computed using supplied `Script`s.\n         * Object should have `scriptFieldName` as key and `script` as the value.\n         *\n         * @param {object} scriptFields Object with `scriptFieldName` as key and `script` as the value.\n         */\n        scriptFields(scriptFields: object): this;\n\n        /**\n         * Allows to return the doc value representation of a field for each hit.\n         * Doc value fields can work on fields that are not stored.\n         *\n         * @param {Array<string>} fields\n         */\n        docvalueFields(fields: string[]): this;\n\n        /**\n         * Returns a version for each search hit.\n         * @param {boolean} enable\n         */\n        version(enable: boolean): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the inner hits request\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * Inner hits returns per search hit in the search response additional\n     * nested hits that caused a search hit to match in a different scope.\n     * Inner hits can be used by defining an `inner_hits` definition on a\n     * `nested`, `has_child` or `has_parent` query and filter.\n     *\n     * @param {string=} name The name to be used for the particular inner hit definition\n     * in the response. Useful when multiple inner hits have been defined in a single\n     * search request. The default depends in which query the inner hit is defined.\n     */\n    export function innerHits(name?: string): InnerHits;\n\n    /**\n     * Class supporting the Elasticsearch search template API.\n     *\n     * The `/_search/template` endpoint allows to use the mustache language to\n     * pre render search requests, before they are executed and fill existing\n     * templates with template parameters.\n     *\n     * @param {string=} type One of `inline`, `id`, `file`. `id` is also\n     * aliased as `indexed`\n     * @param {string|Object=} source Source of the search template.\n     * This needs to be specified if optional argument `type` is passed.\n     */\n    export class SearchTemplate {\n        constructor(type?: string, source?: string | object);\n\n        /**\n         * Sets the type of search template to be `inline` and specifies the query.\n         *\n         * @param {string|Query} query Either a `Query` object or a string.\n         */\n        inline(query: string | object): this;\n\n        /**\n         * Specify the indexed search template by `templateName` which will be\n         * retrieved from cluster state.\n         *\n         * @param {string} templId The unique identifier for the indexed template\n         */\n        id(templId: string): this;\n\n        /**\n         * Specify the indexed search template by `templateName` which will be\n         * retrieved from cluster state.\n         *\n         * Alias for `SearchTemplate.id`\n         *\n         * @param {string} templId The unique identifier for the indexed template\n         */\n        indexed(templId: string): this;\n\n        /**\n         * Specify the search template by filename stored in the scripts folder,\n         * with `mustache` extension.\n         *\n         * @param {string} fileName The name of the search template stored as a file\n         * in the scripts folder.\n         * For file `config/scripts/storedTemplate.mustache`,\n         * `fileName` should be `storedTemplate`\n         */\n        file(fileName: string): this;\n\n        /**\n         * Specifies any named parameters that are used to render the search template.\n         *\n         * @param {Object} params Named parameters to be used for rendering.\n         */\n        params(params: object): this;\n\n        /**\n         * Override default `toJSON` to return DSL representation for the Search Template.\n         *\n         * @override\n         */\n        toJSON(): object;\n    }\n\n    /**\n     * Class supporting the Elasticsearch search template API.\n     *\n     * The `/_search/template` endpoint allows to use the mustache language to\n     * pre render search requests, before they are executed and fill existing\n     * templates with template parameters.\n     *\n     * @param {string=} type One of `inline`, `id`, `file`. `id` is also\n     * aliased as `indexed`\n     * @param {string|Object=} source Source of the search template.\n     * This needs to be specified if optional argument `type` is passed.\n     */\n    export function searchTemplate(\n        type?: string,\n        source?: string | object\n    ): SearchTemplate;\n\n    export namespace recipes {\n        /**\n         * Recipe for the now removed `missing` query.\n         * Can be accessed using `esb.recipes.missingQuery` OR `esb.cookMissingQuery`.\n         *\n         * @param {string} field The field which should be missing the value.\n         */\n        export function missingQuery(field: string): BoolQuery;\n\n        /**\n         * Recipe for random sort query. Takes a query and returns the same\n         * wrapped in a random scoring query.\n         * Can be accessed using `esb.recipes.randomSortQuery` OR `esb.cookRandomSortQuery`.\n         *\n         * @param {Query=} query The query to fetch documents for. Defaults to `match_all` query.\n         * @param {number=} seed A seed value for the random score function.\n         * @throws {TypeError} If `query` is not an instance of `Query`.\n         */\n        export function randomSortQuery(\n            query?: Query,\n            seed?: number\n        ): FunctionScoreQuery;\n\n        /**\n         * Recipe for constructing a filter query using `bool` query.\n         * Optionally, scoring can be enabled.\n         * Can be accessed using `esb.recipes.filterQuery` OR `esb.cookFilterQuery`.\n         *\n         * @param {Query} query The query to fetch documents for.\n         * @param {boolean=} scoring Optional flag for enabling/disabling scoring. Disabled by default.\n         * If enabled, a score of `1.0` will be assigned to all documents.\n         * @throws {TypeError} If `query` is not an instance of `Query`.\n         */\n        export function filterQuery(query: Query, scoring?: boolean): BoolQuery;\n    }\n\n    // Dunno how to define alias for namespaced functions\n\n    /**\n     * Recipe for the now removed `missing` query.\n     * Can be accessed using `esb.recipes.missingQuery` OR `esb.cookMissingQuery`.\n     *\n     * @param {string} field The field which should be missing the value.\n     */\n    export function cookMissingQuery(field: string): BoolQuery;\n\n    /**\n     * Recipe for random sort query. Takes a query and returns the same\n     * wrapped in a random scoring query.\n     * Can be accessed using `esb.recipes.randomSortQuery` OR `esb.cookRandomSortQuery`.\n     *\n     * @param {Query=} query The query to fetch documents for. Defaults to `match_all` query.\n     * @param {number=} seed A seed value for the random score function.\n     * @throws {TypeError} If `query` is not an instance of `Query`.\n     */\n    export function cookRandomSortQuery(\n        query?: Query,\n        seed?: number\n    ): FunctionScoreQuery;\n\n    /**\n     * Recipe for constructing a filter query using `bool` query.\n     * Optionally, scoring can be enabled.\n     * Can be accessed using `esb.recipes.filterQuery` OR `esb.cookFilterQuery`.\n     *\n     * @param {Query} query The query to fetch documents for.\n     * @param {boolean=} scoring Optional flag for enabling/disabling scoring. Disabled by default.\n     * If enabled, a score of `1.0` will be assigned to all documents.\n     * @throws {TypeError} If `query` is not an instance of `Query`.\n     */\n    export function cookFilterQuery(query: Query, scoring?: boolean): BoolQuery;\n\n    /**\n     * Utility function to pretty print objects to console.\n     * To be used in development.\n     *\n     * @param {Object} obj\n     */\n    export function prettyPrint(obj: any): void;\n}\n"
  },
  {
    "path": "src/index.js",
    "content": "/* eslint-disable max-lines */\n\n'use strict';\n\nconst {\n    RequestBodySearch,\n    Highlight,\n    Script,\n    GeoPoint,\n    GeoShape,\n    IndexedShape,\n    Sort,\n    Rescore,\n    InnerHits,\n    RuntimeField,\n    SearchTemplate,\n    Query,\n    KNN,\n    util: { constructorWrapper }\n} = require('./core');\n\nconst {\n    MatchAllQuery,\n    MatchNoneQuery,\n    fullTextQueries: {\n        MatchQuery,\n        MatchPhraseQuery,\n        MatchPhrasePrefixQuery,\n        MultiMatchQuery,\n        CommonTermsQuery,\n        QueryStringQuery,\n        SimpleQueryStringQuery,\n        CombinedFieldsQuery\n    },\n    termLevelQueries: {\n        TermQuery,\n        TermsQuery,\n        TermsSetQuery,\n        RangeQuery,\n        ExistsQuery,\n        PrefixQuery,\n        WildcardQuery,\n        RegexpQuery,\n        FuzzyQuery,\n        TypeQuery,\n        IdsQuery\n    },\n    compoundQueries: {\n        ConstantScoreQuery,\n        BoolQuery,\n        DisMaxQuery,\n        FunctionScoreQuery,\n        BoostingQuery,\n        scoreFunctions: {\n            ScriptScoreFunction,\n            WeightScoreFunction,\n            RandomScoreFunction,\n            FieldValueFactorFunction,\n            DecayScoreFunction\n        }\n    },\n    joiningQueries: {\n        NestedQuery,\n        HasChildQuery,\n        HasParentQuery,\n        ParentIdQuery\n    },\n    geoQueries: {\n        GeoShapeQuery,\n        GeoBoundingBoxQuery,\n        GeoDistanceQuery,\n        GeoPolygonQuery\n    },\n    specializedQueries: {\n        MoreLikeThisQuery,\n        ScriptQuery,\n        ScriptScoreQuery,\n        PercolateQuery,\n        DistanceFeatureQuery,\n        RankFeatureQuery\n    },\n    spanQueries: {\n        SpanTermQuery,\n        SpanMultiTermQuery,\n        SpanFirstQuery,\n        SpanNearQuery,\n        SpanOrQuery,\n        SpanNotQuery,\n        SpanContainingQuery,\n        SpanWithinQuery,\n        SpanFieldMaskingQuery\n    },\n    vectorQueries: { SparseVectorQuery, SemanticQuery }\n} = require('./queries');\n\nconst {\n    metricsAggregations: {\n        AvgAggregation,\n        CardinalityAggregation,\n        ExtendedStatsAggregation,\n        GeoBoundsAggregation,\n        GeoCentroidAggregation,\n        MaxAggregation,\n        MinAggregation,\n        PercentilesAggregation,\n        PercentileRanksAggregation,\n        ScriptedMetricAggregation,\n        StatsAggregation,\n        SumAggregation,\n        TopHitsAggregation,\n        ValueCountAggregation,\n        WeightedAverageAggregation\n    },\n    bucketAggregations: {\n        AdjacencyMatrixAggregation,\n        ChildrenAggregation,\n        CompositeAggregation,\n        DateHistogramAggregation,\n        AutoDateHistogramAggregation,\n        VariableWidthHistogramAggregation,\n        DateRangeAggregation,\n        DiversifiedSamplerAggregation,\n        FilterAggregation,\n        FiltersAggregation,\n        GeoDistanceAggregation,\n        GeoHashGridAggregation,\n        GeoHexGridAggregation,\n        GeoTileGridAggregation,\n        GlobalAggregation,\n        HistogramAggregation,\n        IpRangeAggregation,\n        MissingAggregation,\n        NestedAggregation,\n        ParentAggregation,\n        RangeAggregation,\n        RareTermsAggregation,\n        ReverseNestedAggregation,\n        SamplerAggregation,\n        SignificantTermsAggregation,\n        SignificantTextAggregation,\n        TermsAggregation\n    },\n    pipelineAggregations: {\n        AvgBucketAggregation,\n        DerivativeAggregation,\n        MaxBucketAggregation,\n        MinBucketAggregation,\n        SumBucketAggregation,\n        StatsBucketAggregation,\n        ExtendedStatsBucketAggregation,\n        PercentilesBucketAggregation,\n        MovingAverageAggregation,\n        MovingFunctionAggregation,\n        CumulativeSumAggregation,\n        BucketScriptAggregation,\n        BucketSelectorAggregation,\n        SerialDifferencingAggregation,\n        BucketSortAggregation\n    },\n    matrixAggregations: { MatrixStatsAggregation }\n} = require('./aggregations');\n\nconst {\n    TermSuggester,\n    DirectGenerator,\n    PhraseSuggester,\n    CompletionSuggester\n} = require('./suggesters');\n\nconst recipes = require('./recipes');\n\nexports.RequestBodySearch = RequestBodySearch;\nexports.requestBodySearch = constructorWrapper(RequestBodySearch);\n\n/* ============ ============ ============ */\n/* ============== Queries =============== */\n/* ============ ============ ============ */\nexports.Query = Query;\nexports.query = constructorWrapper(Query);\n\nexports.MatchAllQuery = MatchAllQuery;\nexports.matchAllQuery = constructorWrapper(MatchAllQuery);\n\nexports.MatchNoneQuery = MatchNoneQuery;\nexports.matchNoneQuery = constructorWrapper(MatchNoneQuery);\n\n/* ============ ============ ============ */\n/* ========== Full Text Queries ========= */\n/* ============ ============ ============ */\nexports.MatchQuery = MatchQuery;\nexports.matchQuery = constructorWrapper(MatchQuery);\n\nexports.MatchPhraseQuery = MatchPhraseQuery;\nexports.matchPhraseQuery = constructorWrapper(MatchPhraseQuery);\n\nexports.MatchPhrasePrefixQuery = MatchPhrasePrefixQuery;\nexports.matchPhrasePrefixQuery = constructorWrapper(MatchPhrasePrefixQuery);\n\nexports.MultiMatchQuery = MultiMatchQuery;\nexports.multiMatchQuery = constructorWrapper(MultiMatchQuery);\n\nexports.CommonTermsQuery = CommonTermsQuery;\nexports.commonTermsQuery = constructorWrapper(CommonTermsQuery);\n\nexports.QueryStringQuery = QueryStringQuery;\nexports.queryStringQuery = constructorWrapper(QueryStringQuery);\n\nexports.SimpleQueryStringQuery = SimpleQueryStringQuery;\nexports.simpleQueryStringQuery = constructorWrapper(SimpleQueryStringQuery);\n\nexports.CombinedFieldsQuery = CombinedFieldsQuery;\nexports.combinedFieldsQuery = constructorWrapper(CombinedFieldsQuery);\n\n/* ============ ============ ============ */\n/* ========= Term Level Queries ========= */\n/* ============ ============ ============ */\nexports.TermQuery = TermQuery;\nexports.termQuery = constructorWrapper(TermQuery);\n\nexports.TermsQuery = TermsQuery;\nexports.termsQuery = constructorWrapper(TermsQuery);\n\nexports.TermsSetQuery = TermsSetQuery;\nexports.termsSetQuery = constructorWrapper(TermsSetQuery);\n\nexports.RangeQuery = RangeQuery;\nexports.rangeQuery = constructorWrapper(RangeQuery);\n\nexports.ExistsQuery = ExistsQuery;\nexports.existsQuery = constructorWrapper(ExistsQuery);\n\nexports.PrefixQuery = PrefixQuery;\nexports.prefixQuery = constructorWrapper(PrefixQuery);\n\nexports.WildcardQuery = WildcardQuery;\nexports.wildcardQuery = constructorWrapper(WildcardQuery);\n\nexports.RegexpQuery = RegexpQuery;\nexports.regexpQuery = constructorWrapper(RegexpQuery);\n\nexports.FuzzyQuery = FuzzyQuery;\nexports.fuzzyQuery = constructorWrapper(FuzzyQuery);\n\nexports.TypeQuery = TypeQuery;\nexports.typeQuery = constructorWrapper(TypeQuery);\n\nexports.IdsQuery = IdsQuery;\nexports.idsQuery = constructorWrapper(IdsQuery);\n\n/* ============ ============ ============ */\n/* ========== Compound Queries ========== */\n/* ============ ============ ============ */\nexports.ConstantScoreQuery = ConstantScoreQuery;\nexports.constantScoreQuery = constructorWrapper(ConstantScoreQuery);\n\nexports.BoolQuery = BoolQuery;\nexports.boolQuery = constructorWrapper(BoolQuery);\n\nexports.DisMaxQuery = DisMaxQuery;\nexports.disMaxQuery = constructorWrapper(DisMaxQuery);\n\nexports.FunctionScoreQuery = FunctionScoreQuery;\nexports.functionScoreQuery = constructorWrapper(FunctionScoreQuery);\n\nexports.BoostingQuery = BoostingQuery;\nexports.boostingQuery = constructorWrapper(BoostingQuery);\n\n/* ============ ============ ============ */\n/* =========== Joining Queries ========== */\n/* ============ ============ ============ */\nexports.NestedQuery = NestedQuery;\nexports.nestedQuery = constructorWrapper(NestedQuery);\n\nexports.HasChildQuery = HasChildQuery;\nexports.hasChildQuery = constructorWrapper(HasChildQuery);\n\nexports.HasParentQuery = HasParentQuery;\nexports.hasParentQuery = constructorWrapper(HasParentQuery);\n\nexports.ParentIdQuery = ParentIdQuery;\nexports.parentIdQuery = constructorWrapper(ParentIdQuery);\n\n/* ============ ============ ============ */\n/* ============ Geo Queries ============= */\n/* ============ ============ ============ */\nexports.GeoShapeQuery = GeoShapeQuery;\nexports.geoShapeQuery = constructorWrapper(GeoShapeQuery);\n\nexports.GeoBoundingBoxQuery = GeoBoundingBoxQuery;\nexports.geoBoundingBoxQuery = constructorWrapper(GeoBoundingBoxQuery);\n\nexports.GeoDistanceQuery = GeoDistanceQuery;\nexports.geoDistanceQuery = constructorWrapper(GeoDistanceQuery);\n\nexports.GeoPolygonQuery = GeoPolygonQuery;\nexports.geoPolygonQuery = constructorWrapper(GeoPolygonQuery);\n\n/* ============ ============ ============ */\n/* ======== Specialized Queries ========= */\n/* ============ ============ ============ */\nexports.MoreLikeThisQuery = MoreLikeThisQuery;\nexports.moreLikeThisQuery = constructorWrapper(MoreLikeThisQuery);\n\nexports.ScriptQuery = ScriptQuery;\nexports.scriptQuery = constructorWrapper(ScriptQuery);\n\nexports.ScriptScoreQuery = ScriptScoreQuery;\nexports.scriptScoreQuery = constructorWrapper(ScriptScoreQuery);\n\nexports.PercolateQuery = PercolateQuery;\nexports.percolateQuery = constructorWrapper(PercolateQuery);\n\nexports.DistanceFeatureQuery = DistanceFeatureQuery;\nexports.distanceFeatureQuery = constructorWrapper(DistanceFeatureQuery);\n\nexports.RankFeatureQuery = RankFeatureQuery;\nexports.rankFeatureQuery = constructorWrapper(RankFeatureQuery);\n\n/* ============ ============ ============ */\n/* ============ Span Queries ============ */\n/* ============ ============ ============ */\nexports.SpanTermQuery = SpanTermQuery;\nexports.spanTermQuery = constructorWrapper(SpanTermQuery);\n\nexports.SpanMultiTermQuery = SpanMultiTermQuery;\nexports.spanMultiTermQuery = constructorWrapper(SpanMultiTermQuery);\n\nexports.SpanFirstQuery = SpanFirstQuery;\nexports.spanFirstQuery = constructorWrapper(SpanFirstQuery);\n\nexports.SpanNearQuery = SpanNearQuery;\nexports.spanNearQuery = constructorWrapper(SpanNearQuery);\n\nexports.SpanOrQuery = SpanOrQuery;\nexports.spanOrQuery = constructorWrapper(SpanOrQuery);\n\nexports.SpanNotQuery = SpanNotQuery;\nexports.spanNotQuery = constructorWrapper(SpanNotQuery);\n\nexports.SpanContainingQuery = SpanContainingQuery;\nexports.spanContainingQuery = constructorWrapper(SpanContainingQuery);\n\nexports.SpanWithinQuery = SpanWithinQuery;\nexports.spanWithinQuery = constructorWrapper(SpanWithinQuery);\n\nexports.SpanFieldMaskingQuery = SpanFieldMaskingQuery;\nexports.spanFieldMaskingQuery = constructorWrapper(SpanFieldMaskingQuery);\n\nexports.SparseVectorQuery = SparseVectorQuery;\nexports.sparseVectorQuery = constructorWrapper(SparseVectorQuery);\n\nexports.SemanticQuery = SemanticQuery;\nexports.semanticQuery = constructorWrapper(SemanticQuery);\n\n/* ============ ============ ============ */\n/* ======== KNN ======== */\n/* ============ ============ ============ */\nexports.KNN = KNN;\nexports.kNN = constructorWrapper(KNN);\n\n/* ============ ============ ============ */\n/* ======== Metrics Aggregations ======== */\n/* ============ ============ ============ */\nexports.AvgAggregation = AvgAggregation;\nexports.avgAggregation = constructorWrapper(AvgAggregation);\n\nexports.WeightedAverageAggregation = WeightedAverageAggregation;\nexports.weightedAverageAggregation = constructorWrapper(\n    WeightedAverageAggregation\n);\n\nexports.CardinalityAggregation = CardinalityAggregation;\nexports.cardinalityAggregation = constructorWrapper(CardinalityAggregation);\n\nexports.ExtendedStatsAggregation = ExtendedStatsAggregation;\nexports.extendedStatsAggregation = constructorWrapper(ExtendedStatsAggregation);\n\nexports.GeoBoundsAggregation = GeoBoundsAggregation;\nexports.geoBoundsAggregation = constructorWrapper(GeoBoundsAggregation);\n\nexports.GeoCentroidAggregation = GeoCentroidAggregation;\nexports.geoCentroidAggregation = constructorWrapper(GeoCentroidAggregation);\n\nexports.MaxAggregation = MaxAggregation;\nexports.maxAggregation = constructorWrapper(MaxAggregation);\n\nexports.MinAggregation = MinAggregation;\nexports.minAggregation = constructorWrapper(MinAggregation);\n\nexports.PercentilesAggregation = PercentilesAggregation;\nexports.percentilesAggregation = constructorWrapper(PercentilesAggregation);\n\nexports.PercentileRanksAggregation = PercentileRanksAggregation;\nexports.percentileRanksAggregation = constructorWrapper(\n    PercentileRanksAggregation\n);\n\nexports.ScriptedMetricAggregation = ScriptedMetricAggregation;\nexports.scriptedMetricAggregation = constructorWrapper(\n    ScriptedMetricAggregation\n);\n\nexports.StatsAggregation = StatsAggregation;\nexports.statsAggregation = constructorWrapper(StatsAggregation);\n\nexports.SumAggregation = SumAggregation;\nexports.sumAggregation = constructorWrapper(SumAggregation);\n\nexports.TopHitsAggregation = TopHitsAggregation;\nexports.topHitsAggregation = constructorWrapper(TopHitsAggregation);\n\nexports.ValueCountAggregation = ValueCountAggregation;\nexports.valueCountAggregation = constructorWrapper(ValueCountAggregation);\n\n/* ============ ============ ============ */\n/* ========= Bucket Aggregations ======== */\n/* ============ ============ ============ */\nexports.AdjacencyMatrixAggregation = AdjacencyMatrixAggregation;\nexports.adjacencyMatrixAggregation = constructorWrapper(\n    AdjacencyMatrixAggregation\n);\n\nexports.ChildrenAggregation = ChildrenAggregation;\nexports.childrenAggregation = constructorWrapper(ChildrenAggregation);\n\nexports.CompositeAggregation = CompositeAggregation;\nexports.compositeAggregation = constructorWrapper(CompositeAggregation);\n\nexports.DateHistogramAggregation = DateHistogramAggregation;\nexports.dateHistogramAggregation = constructorWrapper(DateHistogramAggregation);\n\nexports.AutoDateHistogramAggregation = AutoDateHistogramAggregation;\nexports.autoDateHistogramAggregation = constructorWrapper(\n    AutoDateHistogramAggregation\n);\n\nexports.VariableWidthHistogramAggregation = VariableWidthHistogramAggregation;\nexports.variableWidthHistogramAggregation = constructorWrapper(\n    VariableWidthHistogramAggregation\n);\n\nexports.DateRangeAggregation = DateRangeAggregation;\nexports.dateRangeAggregation = constructorWrapper(DateRangeAggregation);\n\nexports.DiversifiedSamplerAggregation = DiversifiedSamplerAggregation;\nexports.diversifiedSamplerAggregation = constructorWrapper(\n    DiversifiedSamplerAggregation\n);\n\nexports.FilterAggregation = FilterAggregation;\nexports.filterAggregation = constructorWrapper(FilterAggregation);\n\nexports.FiltersAggregation = FiltersAggregation;\nexports.filtersAggregation = constructorWrapper(FiltersAggregation);\n\nexports.GeoDistanceAggregation = GeoDistanceAggregation;\nexports.geoDistanceAggregation = constructorWrapper(GeoDistanceAggregation);\n\nexports.GeoHashGridAggregation = GeoHashGridAggregation;\nexports.geoHashGridAggregation = constructorWrapper(GeoHashGridAggregation);\n\nexports.GeoHexGridAggregation = GeoHexGridAggregation;\nexports.geoHexGridAggregation = constructorWrapper(GeoHexGridAggregation);\n\nexports.GeoTileGridAggregation = GeoTileGridAggregation;\nexports.geoTileGridAggregation = constructorWrapper(GeoTileGridAggregation);\n\nexports.GlobalAggregation = GlobalAggregation;\nexports.globalAggregation = constructorWrapper(GlobalAggregation);\n\nexports.HistogramAggregation = HistogramAggregation;\nexports.histogramAggregation = constructorWrapper(HistogramAggregation);\n\nexports.IpRangeAggregation = IpRangeAggregation;\nexports.ipRangeAggregation = constructorWrapper(IpRangeAggregation);\n\nexports.MissingAggregation = MissingAggregation;\nexports.missingAggregation = constructorWrapper(MissingAggregation);\n\nexports.NestedAggregation = NestedAggregation;\nexports.nestedAggregation = constructorWrapper(NestedAggregation);\n\nexports.ParentAggregation = ParentAggregation;\nexports.parentAggregation = constructorWrapper(ParentAggregation);\n\nexports.RangeAggregation = RangeAggregation;\nexports.rangeAggregation = constructorWrapper(RangeAggregation);\n\nexports.RareTermsAggregation = RareTermsAggregation;\nexports.rareTermsAggregation = constructorWrapper(RareTermsAggregation);\n\nexports.ReverseNestedAggregation = ReverseNestedAggregation;\nexports.reverseNestedAggregation = constructorWrapper(ReverseNestedAggregation);\n\nexports.SamplerAggregation = SamplerAggregation;\nexports.samplerAggregation = constructorWrapper(SamplerAggregation);\n\nexports.SignificantTermsAggregation = SignificantTermsAggregation;\nexports.significantTermsAggregation = constructorWrapper(\n    SignificantTermsAggregation\n);\n\nexports.SignificantTextAggregation = SignificantTextAggregation;\nexports.significantTextAggregation = constructorWrapper(\n    SignificantTextAggregation\n);\n\nexports.TermsAggregation = TermsAggregation;\nexports.termsAggregation = constructorWrapper(TermsAggregation);\n\n/* ============ ============ ============ */\n/* ======== Pipeline Aggregations ======= */\n/* ============ ============ ============ */\nexports.AvgBucketAggregation = AvgBucketAggregation;\nexports.avgBucketAggregation = constructorWrapper(AvgBucketAggregation);\n\nexports.DerivativeAggregation = DerivativeAggregation;\nexports.derivativeAggregation = constructorWrapper(DerivativeAggregation);\n\nexports.MaxBucketAggregation = MaxBucketAggregation;\nexports.maxBucketAggregation = constructorWrapper(MaxBucketAggregation);\n\nexports.MinBucketAggregation = MinBucketAggregation;\nexports.minBucketAggregation = constructorWrapper(MinBucketAggregation);\n\nexports.BucketSortAggregation = BucketSortAggregation;\nexports.bucketSortAggregation = constructorWrapper(BucketSortAggregation);\n\nexports.SumBucketAggregation = SumBucketAggregation;\nexports.sumBucketAggregation = constructorWrapper(SumBucketAggregation);\n\nexports.StatsBucketAggregation = StatsBucketAggregation;\nexports.statsBucketAggregation = constructorWrapper(StatsBucketAggregation);\n\nexports.ExtendedStatsBucketAggregation = ExtendedStatsBucketAggregation;\nexports.extendedStatsBucketAggregation = constructorWrapper(\n    ExtendedStatsBucketAggregation\n);\n\nexports.PercentilesBucketAggregation = PercentilesBucketAggregation;\nexports.percentilesBucketAggregation = constructorWrapper(\n    PercentilesBucketAggregation\n);\n\nexports.MovingAverageAggregation = MovingAverageAggregation;\nexports.movingAverageAggregation = constructorWrapper(MovingAverageAggregation);\n\nexports.MovingFunctionAggregation = MovingFunctionAggregation;\nexports.movingFunctionAggregation = constructorWrapper(\n    MovingFunctionAggregation\n);\n\nexports.CumulativeSumAggregation = CumulativeSumAggregation;\nexports.cumulativeSumAggregation = constructorWrapper(CumulativeSumAggregation);\n\nexports.BucketScriptAggregation = BucketScriptAggregation;\nexports.bucketScriptAggregation = constructorWrapper(BucketScriptAggregation);\n\nexports.BucketSelectorAggregation = BucketSelectorAggregation;\nexports.bucketSelectorAggregation = constructorWrapper(\n    BucketSelectorAggregation\n);\n\nexports.SerialDifferencingAggregation = SerialDifferencingAggregation;\nexports.serialDifferencingAggregation = constructorWrapper(\n    SerialDifferencingAggregation\n);\n\n/* ============ ============ ============ */\n/* ========= Matrix Aggregations ======== */\n/* ============ ============ ============ */\nexports.MatrixStatsAggregation = MatrixStatsAggregation;\nexports.matrixStatsAggregation = constructorWrapper(MatrixStatsAggregation);\n\n/* ============ ============ ============ */\n/* ========== Score Functions =========== */\n/* ============ ============ ============ */\nexports.ScriptScoreFunction = ScriptScoreFunction;\nexports.scriptScoreFunction = constructorWrapper(ScriptScoreFunction);\n\nexports.WeightScoreFunction = WeightScoreFunction;\nexports.weightScoreFunction = constructorWrapper(WeightScoreFunction);\n\nexports.RandomScoreFunction = RandomScoreFunction;\nexports.randomScoreFunction = constructorWrapper(RandomScoreFunction);\n\nexports.FieldValueFactorFunction = FieldValueFactorFunction;\nexports.fieldValueFactorFunction = constructorWrapper(FieldValueFactorFunction);\n\nexports.DecayScoreFunction = DecayScoreFunction;\nexports.decayScoreFunction = constructorWrapper(DecayScoreFunction);\n\n/* ============ ============ ============ */\n/* ============= Suggesters ============= */\n/* ============ ============ ============ */\n\nexports.TermSuggester = TermSuggester;\nexports.termSuggester = constructorWrapper(TermSuggester);\n\nexports.DirectGenerator = DirectGenerator;\nexports.directGenerator = constructorWrapper(DirectGenerator);\n\nexports.PhraseSuggester = PhraseSuggester;\nexports.phraseSuggester = constructorWrapper(PhraseSuggester);\n\nexports.CompletionSuggester = CompletionSuggester;\nexports.completionSuggester = constructorWrapper(CompletionSuggester);\n\n/* ============ ============ ============ */\n/* ============== Recipes =============== */\n/* ============ ============ ============ */\n\n/**\n * Helper recipes for common query use cases.\n *\n * If you have any recipes, please do share or better yet, create a [pull request](https://help.github.com/articles/creating-a-pull-request-from-a-fork/).\n *\n * Recipes:\n * - [`missingQuery`](/#missingquery)\n * - [`randomSortQuery`](/#randomsortquery)\n * - [`filterQuery`](/#filterquery)\n *\n * These can be accessed under the `recipes` namespace or\n * using the `cook[Recipe Name]` alias for ease of use.\n *\n * @example\n * // `recipes` namespace\n * const qry = esb.recipes.missingQuery('user');\n *\n * @example\n * // `cookMissingQuery` alias\n * const qry = esb.cookMissingQuery('user');\n */\nexports.recipes = recipes;\nexports.cookMissingQuery = recipes.missingQuery;\nexports.cookRandomSortQuery = recipes.randomSortQuery;\nexports.cookFilterQuery = recipes.filterQuery;\n\n/* ============ ============ ============ */\n/* ============ Miscellaneous =========== */\n/* ============ ============ ============ */\nexports.Highlight = Highlight;\nexports.highlight = constructorWrapper(Highlight);\n\nexports.Script = Script;\nexports.script = constructorWrapper(Script);\n\nexports.GeoPoint = GeoPoint;\nexports.geoPoint = constructorWrapper(GeoPoint);\n\nexports.GeoShape = GeoShape;\nexports.geoShape = constructorWrapper(GeoShape);\n\nexports.IndexedShape = IndexedShape;\nexports.indexedShape = constructorWrapper(IndexedShape);\n\nexports.Sort = Sort;\nexports.sort = constructorWrapper(Sort);\n\nexports.Rescore = Rescore;\nexports.rescore = constructorWrapper(Rescore);\n\nexports.InnerHits = InnerHits;\nexports.innerHits = constructorWrapper(InnerHits);\n\nexports.SearchTemplate = SearchTemplate;\nexports.searchTemplate = constructorWrapper(SearchTemplate);\n\nexports.RuntimeField = RuntimeField;\nexports.runtimeField = constructorWrapper(RuntimeField);\n\nexports.prettyPrint = function prettyPrint(obj) {\n    console.log(JSON.stringify(obj, null, 2));\n};\n\n/* eslint-enable */\n"
  },
  {
    "path": "src/queries/compound-queries/bool-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Query,\n    util: { checkType, setDefault, recursiveToJSON }\n} = require('../../core');\n\n/**\n * A query that matches documents matching boolean combinations of other queries.\n * The bool query maps to Lucene `BooleanQuery`. It is built using one or more\n * boolean clauses, each clause with a typed occurrence.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-bool-query.html)\n *\n * @example\n * const qry = esb.boolQuery()\n *     .must(esb.termQuery('user', 'kimchy'))\n *     .filter(esb.termQuery('tag', 'tech'))\n *     .mustNot(esb.rangeQuery('age').gte(10).lte(20))\n *     .should([\n *         esb.termQuery('tag', 'wow'),\n *         esb.termQuery('tag', 'elasticsearch')\n *     ])\n *     .minimumShouldMatch(1)\n *     .boost(1.0);\n *\n * @extends Query\n */\nclass BoolQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('bool');\n    }\n\n    /**\n     * Add given query to list of queries under given clause.\n     *\n     * @private\n     * @param {string} clause\n     * @param {Query} query\n     * @throws {TypeError} If query is not an instance of `Query`\n     */\n    _addQuery(clause, query) {\n        checkType(query, Query);\n\n        this._queryOpts[clause].push(query);\n    }\n\n    /**\n     * Add given query array or query to list of queries under given clause.\n     *\n     * @private\n     * @param {string} clause\n     * @param {Array<Query>|Query} queries List of valid `Query` objects or a `Query` object\n     * @throws {TypeError} If Array item or query is not an instance of `Query`\n     */\n    _addQueries(clause, queries) {\n        setDefault(this._queryOpts, clause, []);\n\n        if (Array.isArray(queries))\n            queries.forEach(qry => this._addQuery(clause, qry));\n        else this._addQuery(clause, queries);\n    }\n\n    /**\n     * Adds `must` query to boolean container.\n     * The clause (query) **must** appear in matching documents and will contribute to the score.\n     *\n     * @param {Array<Query>|Query} queries List of valid `Query` objects or a `Query` object\n     * @returns {BoolQuery} returns `this` so that calls can be chained.\n     * @throws {TypeError} If Array item or query is not an instance of `Query`\n     */\n    must(queries) {\n        this._addQueries('must', queries);\n        return this;\n    }\n\n    /**\n     * Adds `filter` query to boolean container.\n     * The clause (query) **must** appear in matching documents. However unlike `must` the score\n     * of the query will be ignored. Filter clauses are executed in filter context, meaning that\n     * scoring is ignored and clauses are considered for caching.\n     *\n     * @example\n     * // Assign score of `0` to all documents\n     * const qry = esb.boolQuery().filter(esb.termQuery('status', 'active'));\n     *\n     * // Assign a score of `1.0` to all documents\n     * const qry = esb.boolQuery()\n     *     .must(esb.matchAllQuery())\n     *     .filter(esb.termQuery('status', 'active'));\n     *\n     * @param {Array<Query>|Query} queries List of valid `Query` objects or a `Query` object\n     * @returns {BoolQuery} returns `this` so that calls can be chained.\n     * @throws {TypeError} If Array item or query is not an instance of `Query`\n     */\n    filter(queries) {\n        this._addQueries('filter', queries);\n        return this;\n    }\n\n    /**\n     * Adds `must_not` query to boolean container.\n     * The clause (query) **must not** appear in the matching documents.\n     * Clauses are executed in filter context meaning that scoring is ignored\n     * and clauses are considered for caching. Because scoring is ignored,\n     * a score of 0 for all documents is returned.\n     *\n     * @param {Array<Query>|Query} queries List of valid `Query` objects or a `Query` object\n     * @returns {BoolQuery} returns `this` so that calls can be chained.\n     * @throws {TypeError} If Array item or query is not an instance of `Query`\n     */\n    mustNot(queries) {\n        this._addQueries('must_not', queries);\n        return this;\n    }\n\n    /**\n     * Adds `should` query to boolean container.\n     * The clause (query) **should** appear in the matching document. In a boolean query with\n     * no must or filter clauses, one or more should clauses must match a document.\n     * The minimum number of should clauses to match can be set using the\n     * `minimum_should_match` parameter.\n     *\n     * @param {Array<Query>|Query} queries List of valid `Query` objects or a `Query` object\n     * @returns {BoolQuery} returns `this` so that calls can be chained.\n     * @throws {TypeError} If Array item or query is not an instance of `Query`\n     */\n    should(queries) {\n        this._addQueries('should', queries);\n        return this;\n    }\n\n    /**\n     * Enables or disables similarity coordinate scoring of documents\n     * commoning the `CommonTermsQuery`. Default: `false`.\n     *\n     * **NOTE**: This has been removed in elasticsearch 6.0. If provided,\n     * it will be ignored and a deprecation warning will be issued.\n     *\n     * @param {boolean} enable\n     * @returns {BoolQuery} returns `this` so that calls can be chained.\n     */\n    disableCoord(enable) {\n        this._queryOpts.disable_coord = enable;\n        return this;\n    }\n\n    /**\n     * Sets the value controlling how many `should` clauses in the boolean\n     * query should match. It can be an absolute value (2), a percentage (30%)\n     * or a combination of both. By default no optional clauses are necessary for a match.\n     * However, if the bool query is used in a filter context and it has `should` clauses then,\n     * at least one `should` clause is required to match.\n     *\n     * @param {string|number} minimumShouldMatch An absolute value (2), a percentage (30%)\n     * or a combination of both.\n     * @returns {BoolQuery} returns `this` so that calls can be chained.\n     */\n    minimumShouldMatch(minimumShouldMatch) {\n        this._queryOpts.minimum_should_match = minimumShouldMatch;\n        return this;\n    }\n\n    /**\n     * Sets if the `Query` should be enhanced with a `MatchAllQuery` in order\n     * to act as a pure exclude when only negative (mustNot) clauses exist. Default: true.\n     *\n     * @param {boolean} enable\n     * @returns {BoolQuery} returns `this` so that calls can be chained.\n     */\n    adjustPureNegative(enable) {\n        this._queryOpts.adjust_pure_negative = enable;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation of the `bool` compound query\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        const clauseKeys = ['must', 'filter', 'must_not', 'should'];\n\n        // Pick the clauses which have some queries\n        const cleanQryOpts = clauseKeys\n            .filter(clause => _.has(this._queryOpts, clause))\n            .reduce(\n                // Unwrap array and put into qryOpts if required\n                (qryOpts, clause) => {\n                    const clauseQueries = this._queryOpts[clause];\n                    qryOpts[clause] = recursiveToJSON(\n                        clauseQueries.length === 1\n                            ? _.head(clauseQueries)\n                            : clauseQueries\n                    );\n                    return qryOpts;\n                },\n                // initial value - all key-value except clauses\n                _.omit(this._queryOpts, clauseKeys)\n            );\n\n        return {\n            [this.queryType]: cleanQryOpts\n        };\n    }\n}\n\nmodule.exports = BoolQuery;\n"
  },
  {
    "path": "src/queries/compound-queries/boosting-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Query,\n    util: { checkType }\n} = require('../../core');\n\n/**\n * The boosting query can be used to effectively demote results that match\n * a given query. Unlike the \"NOT\" clause in bool query, this still selects\n * documents that contain undesirable terms, but reduces their overall\n * score.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-boosting-query.html)\n *\n * @example\n * const qry = esb.boostingQuery(\n *     esb.termQuery('field1', 'value1'), // positiveQry\n *     esb.termQuery('field2', 'value2'), // negativeQry\n *     0.2 // negativeBoost\n * );\n *\n * @param {Query=} positiveQry A valid `Query` object.\n * @param {Query=} negativeQry A valid `Query` object.\n * @param {number=} negativeBoost A positive `double` value where `0 < n < 1`.\n *\n * @extends Query\n */\nclass BoostingQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(positiveQry, negativeQry, negativeBoost) {\n        super('boosting');\n\n        if (!_.isNil(positiveQry)) this.positive(positiveQry);\n        if (!_.isNil(negativeQry)) this.negative(negativeQry);\n        if (!_.isNil(negativeBoost))\n            this._queryOpts.negative_boost = negativeBoost;\n    }\n\n    /**\n     * Sets the \"master\" query that determines which results are returned.\n     *\n     * @param {Query} query A valid `Query` object.\n     * @returns {BoostingQuery} returns `this` so that calls can be chained.\n     */\n    positive(query) {\n        checkType(query, Query);\n\n        this._queryOpts.positive = query;\n        return this;\n    }\n\n    /**\n     * Sets the query used to match documents in the `positive`\n     * query that will be negatively boosted.\n     *\n     * @param {Query} query A valid `Query` object.\n     * @returns {BoostingQuery} returns `this` so that calls can be chained.\n     */\n    negative(query) {\n        checkType(query, Query);\n\n        this._queryOpts.negative = query;\n        return this;\n    }\n\n    /**\n     * Sets the negative boost value.\n     *\n     * @param {number} factor A positive `double` value where `0 < n < 1`.\n     * @returns {BoostingQuery} returns `this` so that calls can be chained.\n     */\n    negativeBoost(factor) {\n        this._queryOpts.negative_boost = factor;\n        return this;\n    }\n}\n\nmodule.exports = BoostingQuery;\n"
  },
  {
    "path": "src/queries/compound-queries/constant-score-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Query,\n    util: { checkType }\n} = require('../../core');\n\n/**\n * A query that wraps another query and simply returns a constant score\n * equal to the query boost for every document in the filter.\n * Maps to Lucene `ConstantScoreQuery`.\n *\n * Constructs a query where each documents returned by the internal\n * query or filter have a constant score equal to the boost factor.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-constant-score-query.html)\n *\n * @example\n * const qry = esb.constantScoreQuery(esb.termQuery('user', 'kimchy')).boost(1.2);\n *\n * @param {Query=} filterQuery Query to filter on.\n *\n * @extends Query\n */\nclass ConstantScoreQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(filterQuery) {\n        super('constant_score');\n\n        if (!_.isNil(filterQuery)) this.filter(filterQuery);\n    }\n\n    /**\n     * Adds the query to apply a constant score to.\n     *\n     * @param {Query} filterQuery  Query to filter on.\n     * @returns {ConstantScoreQuery} returns `this` so that calls can be chained.\n     */\n    filter(filterQuery) {\n        checkType(filterQuery, Query);\n\n        this._queryOpts.filter = filterQuery;\n        return this;\n    }\n\n    /**\n     * Adds the query to apply a constant score to.\n     * Alias for method `filter`.\n     *\n     * Note: This parameter has been removed in elasticsearch 6.0. Use `filter` instead.\n     *\n     * @param {Query} filterQuery  Query to filter on.\n     * @returns {ConstantScoreQuery} returns `this` so that calls can be chained.\n     */\n    query(filterQuery) {\n        return this.filter(filterQuery);\n    }\n}\n\nmodule.exports = ConstantScoreQuery;\n"
  },
  {
    "path": "src/queries/compound-queries/dis-max-query.js",
    "content": "'use strict';\n\nconst {\n    Query,\n    util: { checkType, setDefault }\n} = require('../../core');\n\n/**\n * A query that generates the union of documents produced by its subqueries,\n * and that scores each document with the maximum score for that document\n * as produced by any subquery, plus a tie breaking increment for\n * any additional matching subqueries.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-dis-max-query.html)\n *\n * @example\n * const qry = esb.disMaxQuery()\n *     .queries([esb.termQuery('age', 34), esb.termQuery('age', 35)])\n *     .tieBreaker(0.7)\n *     .boost(1.2);\n *\n * @example\n * const qry = esb.disMaxQuery()\n *     .queries([\n *         esb.matchQuery('subject', 'brown fox'),\n *         esb.matchQuery('message', 'brown fox')\n *     ])\n *     .tieBreaker(0.3);\n *\n * @extends Query\n */\nclass DisMaxQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('dis_max');\n    }\n\n    /**\n     * Add given query to list of queries under given clause.\n     *\n     * @private\n     * @param {Query} query\n     * @throws {TypeError} If query is not an instance of `Query`\n     */\n    _addQuery(query) {\n        checkType(query, Query);\n\n        this._queryOpts.queries.push(query);\n    }\n\n    /**\n     * The tie breaker value. The tie breaker capability allows results\n     * that include the same term in multiple fields to be judged better than\n     * results that include this term in only the best of those multiple\n     * fields, without confusing this with the better case of two different\n     * terms in the multiple fields. Default: `0.0`.\n     *\n     * @param {number} factor\n     * @returns {DisMaxQuery} returns `this` so that calls can be chained.\n     */\n    tieBreaker(factor) {\n        this._queryOpts.tie_breaker = factor;\n        return this;\n    }\n\n    /**\n     * Add given query array or query to list of queries\n     *\n     * @param {Array<Query>|Query} queries Array of valid `Query` objects or a `Query` object\n     * @returns {DisMaxQuery} returns `this` so that calls can be chained.\n     */\n    queries(queries) {\n        setDefault(this._queryOpts, 'queries', []);\n\n        if (Array.isArray(queries)) queries.forEach(qry => this._addQuery(qry));\n        else this._addQuery(queries);\n\n        return this;\n    }\n}\n\nmodule.exports = DisMaxQuery;\n"
  },
  {
    "path": "src/queries/compound-queries/function-score-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Query,\n    util: { checkType, invalidParam },\n    consts: { SCORE_MODE_SET, BOOST_MODE_SET }\n} = require('../../core');\n\nconst { ScoreFunction } = require('./score-functions');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html';\n\nconst invalidScoreModeParam = invalidParam(\n    ES_REF_URL,\n    'score_mode',\n    SCORE_MODE_SET\n);\nconst invalidBoostModeParam = invalidParam(\n    ES_REF_URL,\n    'boost_mode',\n    BOOST_MODE_SET\n);\n\n/**\n * The `function_score` allows you to modify the score of documents that are\n * retrieved by a query. This can be useful if, for example, a score function\n * is computationally expensive and it is sufficient to compute the score on\n * a filtered set of documents.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html)\n *\n * @example\n * // `function_score` with only one function\n * const qry = esb.functionScoreQuery()\n *     .query(esb.matchAllQuery())\n *     .function(esb.randomScoreFunction())\n *     .boostMode('multiply')\n *     .boost('5');\n *\n * @example\n * // Several functions combined\n * const qry = esb.functionScoreQuery()\n *     .query(esb.matchAllQuery())\n *     .functions([\n *         esb.randomScoreFunction()\n *             .filter(esb.matchQuery('test', 'bar'))\n *             .weight(23),\n *         esb.weightScoreFunction()\n *             .filter(esb.matchQuery('test', 'cat'))\n *             .weight(42)\n *     ])\n *     .maxBoost(42)\n *     .scoreMode('max')\n *     .boostMode('multiply')\n *     .minScore(42)\n *     .boost('5');\n *\n * @example\n * // Combine decay functions\n * const qry = esb.functionScoreQuery()\n *     .functions([\n *         esb.decayScoreFunction('gauss', 'price').origin('0').scale('20'),\n *         esb.decayScoreFunction('gauss', 'location')\n *             .origin('11, 12')\n *             .scale('2km')\n *     ])\n *     .query(esb.matchQuery('properties', 'balcony'))\n *     .scoreMode('multiply');\n *\n * @extends Query\n */\nclass FunctionScoreQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('function_score');\n\n        this._queryOpts.functions = [];\n    }\n\n    /**\n     * Sets the source query.\n     *\n     * @param {Query} query A valid `Query` object\n     * @returns {FunctionScoreQuery} returns `this` so that calls can be chained.\n     */\n    query(query) {\n        checkType(query, Query);\n\n        this._queryOpts.query = query;\n        return this;\n    }\n\n    /**\n     * Controls the way the scores are combined.\n     *\n     * @param {string} mode Can be one of `multiply`, `sum`, `first`, `min`, `max`, `avg`.\n     * Defaults to `multiply`.\n     * @returns {FunctionScoreQuery} returns `this` so that calls can be chained.\n     */\n    scoreMode(mode) {\n        if (_.isNil(mode)) invalidScoreModeParam(mode);\n\n        const modeLower = mode.toLowerCase();\n        if (!SCORE_MODE_SET.has(modeLower)) {\n            invalidScoreModeParam(mode);\n        }\n\n        this._queryOpts.score_mode = mode;\n        return this;\n    }\n\n    /**\n     * Controls the way the query and function scores are combined.\n     *\n     * @param {string} mode Can be one of `multiply`, `replace`, `sum`, `avg`, `max`, `min`.\n     * Defaults to `multiply`.\n     * @returns {FunctionScoreQuery} returns `this` so that calls can be chained.\n     */\n    boostMode(mode) {\n        if (_.isNil(mode)) invalidBoostModeParam(mode);\n\n        const modeLower = mode.toLowerCase();\n        if (!BOOST_MODE_SET.has(modeLower)) {\n            invalidBoostModeParam(mode);\n        }\n\n        this._queryOpts.boost_mode = modeLower;\n        return this;\n    }\n\n    /**\n     * Restricts new score to not exceed given limit. The default for `max_boost` is `FLT_MAX`.\n     *\n     * @param {number} limit\n     * @returns {FunctionScoreQuery} returns `this` so that calls can be chained.\n     */\n    maxBoost(limit) {\n        this._queryOpts.max_boost = limit;\n        return this;\n    }\n\n    /**\n     * Sets the minimum score limit for documents to be included in search result.\n     *\n     * @param {number} limit Minimum score threshold\n     * @returns {FunctionScoreQuery} returns `this` so that calls can be chained.\n     */\n    minScore(limit) {\n        this._queryOpts.min_score = limit;\n        return this;\n    }\n\n    /**\n     * Add a single score function to the list of existing functions.\n     *\n     * @param {ScoreFunction} func A valid `ScoreFunction` object.\n     * @returns {FunctionScoreQuery} returns `this` so that calls can be chained.\n     */\n    function(func) {\n        checkType(func, ScoreFunction);\n\n        this._queryOpts.functions.push(func);\n        return this;\n    }\n\n    /**\n     * Adds array of score functions to the list of existing functions.\n     *\n     * @param {Array<ScoreFunction>} funcs An array of valid `ScoreFunction` objects\n     * @returns {FunctionScoreQuery} returns `this` so that calls can be chained.\n     */\n    functions(funcs) {\n        checkType(funcs, Array);\n\n        funcs.forEach(func => this.function(func));\n        return this;\n    }\n}\n\nmodule.exports = FunctionScoreQuery;\n"
  },
  {
    "path": "src/queries/compound-queries/index.js",
    "content": "'use strict';\n\nexports.scoreFunctions = require('./score-functions');\n\nexports.ConstantScoreQuery = require('./constant-score-query');\nexports.BoolQuery = require('./bool-query');\nexports.DisMaxQuery = require('./dis-max-query');\nexports.FunctionScoreQuery = require('./function-score-query');\nexports.BoostingQuery = require('./boosting-query');\n\n// This was deprecated in 5.0, not implementing\n// exports.IndicesQuery = require('./indices-query');\n"
  },
  {
    "path": "src/queries/compound-queries/score-functions/decay-score-function.js",
    "content": "'use strict';\n\nconst _ = require('../../../_');\n\nconst {\n    util: { invalidParam, recursiveToJSON }\n} = require('../../../core');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html#function-decay';\n\nconst ScoreFunction = require('./score-function');\n\nconst invalidModeParam = invalidParam(\n    ES_REF_URL,\n    'mode',\n    \"'linear', 'exp' or 'gauss'\"\n);\n\n/**\n * Decay functions score a document with a function that decays depending on\n * the distance of a numeric field value of the document from a user given\n * origin. This is similar to a range query, but with smooth edges instead of\n * boxes.\n *\n * Supported decay functions are: `linear`, `exp`, and `gauss`.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html#function-decay)\n *\n * If no `mode` is supplied, `gauss` will be used.\n *\n * @example\n * // Defaults to decay function `gauss`\n * const decayFunc = esb.decayScoreFunction()\n *     .field('location') // field is a geo_point\n *     .origin('11, 12') // geo format\n *     .scale('2km')\n *     .offset('0km')\n *     .decay(0.33);\n *\n * @example\n * const decayFunc = esb.decayScoreFunction('gauss', 'date')\n *     .origin('2013-09-17')\n *     .scale('10d')\n *     .offset('5d')\n *     .decay(0.5);\n *\n * @param {string=} mode Can be one of `linear`, `exp`, and `gauss`.\n * Defaults to `gauss`.\n * @param {string=} field the document field to run decay function against.\n *\n * @extends ScoreFunction\n */\nclass DecayScoreFunction extends ScoreFunction {\n    // eslint-disable-next-line require-jsdoc\n    constructor(mode = 'gauss', field) {\n        super(mode);\n\n        if (!_.isNil(field)) this._field = field;\n    }\n\n    /**\n     * Set the decay mode.\n     *\n     * @param {string} mode  Can be one of `linear`, `exp`, and `gauss`.\n     * Defaults to `gauss`.\n     * @returns {DecayScoreFunction} returns `this` so that calls can be chained.\n     */\n    mode(mode) {\n        if (_.isNil(mode)) invalidModeParam(mode);\n\n        const modeLower = mode.toLowerCase();\n        if (\n            modeLower !== 'linear' &&\n            modeLower !== 'exp' &&\n            modeLower !== 'gauss'\n        ) {\n            invalidModeParam(mode);\n        }\n\n        this._name = mode;\n        return this;\n    }\n\n    /**\n     * Sets the decay mode to linear.\n     * Alias for `mode('linear')`\n     *\n     * @returns {DecayScoreFunction} returns `this` so that calls can be chained.\n     */\n    linear() {\n        this._name = 'linear';\n        return this;\n    }\n\n    /**\n     * Sets the decay mode to exp.\n     * Alias for `mode('exp')`\n     *\n     * @returns {DecayScoreFunction} returns `this` so that calls can be chained.\n     */\n    exp() {\n        this._name = 'exp';\n        return this;\n    }\n\n    /**\n     * Sets the decay mode to gauss.\n     * Alias for `mode('gauss')`\n     *\n     * @returns {DecayScoreFunction} returns `this` so that calls can be chained.\n     */\n    gauss() {\n        this._name = 'gauss';\n        return this;\n    }\n\n    /**\n     * Sets the document field to run decay function against.\n     *\n     * @param {string} field the document field to run decay function against.\n     * @returns {DecayScoreFunction} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._field = field;\n        return this;\n    }\n\n    /**\n     * The point of origin used for calculating distance. Must be given as a number\n     * for numeric field, date for date fields and geo point for geo fields.\n     * Required for geo and numeric field. For date fields the default is `now`.\n     * Date math (for example `now-1h`) is supported for origin.\n     *\n     * @param {number|string|Object} origin A valid origin value for the field type.\n     * @returns {DecayScoreFunction} returns `this` so that calls can be chained.\n     */\n    origin(origin) {\n        this._opts.origin = origin;\n        return this;\n    }\n\n    /**\n     * Required for all types. Defines the distance from origin + offset at which\n     * the computed score will equal decay parameter. For geo fields: Can be defined\n     * as number+unit (`1km`, `12m`,…). Default unit is meters. For date fields: Can be\n     * defined as a number+unit (`1h`, `10d`,…). Default unit is milliseconds.\n     * For numeric field: Any number.\n     *\n     * @param {number|string} scale A valid scale value for the field type.\n     * @returns {DecayScoreFunction} returns `this` so that calls can be chained.\n     */\n    scale(scale) {\n        this._opts.scale = scale;\n        return this;\n    }\n\n    /**\n     * If an `offset` is defined, the decay function will only compute the decay function\n     * for documents with a distance greater that the defined offset. The default is `0`.\n     *\n     * @param {number|string} offset A valid offset value for the field type.\n     * @returns {DecayScoreFunction} returns `this` so that calls can be chained.\n     */\n    offset(offset) {\n        this._opts.offset = offset;\n        return this;\n    }\n\n    /**\n     * The `decay` parameter defines how documents are scored at the distance given at `scale`.\n     * If no `decay` is defined, documents at the distance `scale` will be scored `0.5`.\n     *\n     * @param {number} decay A decay value as a double.\n     * @returns {DecayScoreFunction} returns `this` so that calls can be chained.\n     */\n    decay(decay) {\n        this._opts.decay = decay;\n        return this;\n    }\n\n    /**\n     * Overrides default `toJSON` to return DSL representation of the decay score function\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        // TODO: If mode/field is not set throw an error.\n        const repr = Object.assign(\n            { [this._name]: { [this._field]: this._opts } },\n            this._body\n        );\n        return recursiveToJSON(repr);\n    }\n}\n\nmodule.exports = DecayScoreFunction;\n"
  },
  {
    "path": "src/queries/compound-queries/score-functions/field-value-factor-function.js",
    "content": "'use strict';\n\nconst _ = require('../../../_');\n\nconst {\n    util: { invalidParam },\n    consts: { FIELD_MODIFIER_SET }\n} = require('../../../core');\n\nconst ScoreFunction = require('./score-function');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html#function-field-value-factor';\n\nconst invaliModifierdParam = invalidParam(\n    ES_REF_URL,\n    'modifier',\n    FIELD_MODIFIER_SET\n);\n\n/**\n * The `field_value_factor` function allows you to use a field from a document\n * to influence the score. It's similar to using the `script_score` function, however,\n * it avoids the overhead of scripting. If used on a multi-valued field, only the\n * first value of the field is used in calculations.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html#function-field-value-factor)\n *\n * @example\n * // Scoring formula - sqrt(1.2 * doc['popularity'].value)\n * const scoreFunc = esb.fieldValueFactorFunction('popularity')\n *     .factor(1.2)\n *     .modifier('sqrt')\n *     .missing(1);\n *\n * @param {string=} field the field to be extracted from the document.\n *\n * @extends ScoreFunction\n */\nclass FieldValueFactorFunction extends ScoreFunction {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field) {\n        super('field_value_factor');\n\n        if (!_.isNil(field)) this._opts.field = field;\n    }\n\n    /**\n     * Sets the field to be extracted from the document.\n     *\n     * @param {string} field the field to be extracted from the document.\n     * @returns {FieldValueFactorFunction} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._opts.field = field;\n        return this;\n    }\n\n    /**\n     * Optional factor to multiply the field value with, defaults to `1`.\n     *\n     * @param {number} factor Factor to multiply the field with.\n     * @returns {FieldValueFactorFunction} returns `this` so that calls can be chained.\n     */\n    factor(factor) {\n        this._opts.factor = factor;\n        return this;\n    }\n\n    /**\n     * Modifier to apply to the field value, can be one of: `none`, `log`,\n     * `log1p`, `log2p`, `ln`, `ln1p`, `ln2p`, `square`, `sqrt`, or `reciprocal`.\n     * Defaults to `none`.\n     *\n     * @param {string} mod Modified to apply on field. Can be one of: `none`, `log`,\n     * `log1p`, `log2p`, `ln`, `ln1p`, `ln2p`, `square`, `sqrt`, or `reciprocal`.\n     * Defaults to `none`.\n     * @returns {FieldValueFactorFunction} returns `this` so that calls can be chained.\n     */\n    modifier(mod) {\n        if (_.isNil(mod)) invaliModifierdParam(mod);\n\n        const modLower = mod.toLowerCase();\n        if (!FIELD_MODIFIER_SET.has(modLower)) {\n            invaliModifierdParam(mod);\n        }\n\n        this._opts.modifier = modLower;\n        return this;\n    }\n\n    /**\n     * Value used if the document doesn’t have that field. The modifier and factor\n     * are still applied to it as though it were read from the document.\n     *\n     * @param {number} val To be used with documents which do not have field value.\n     * @returns {FieldValueFactorFunction} returns `this` so that calls can be chained.\n     */\n    missing(val) {\n        this._opts.missing = val;\n        return this;\n    }\n}\n\nmodule.exports = FieldValueFactorFunction;\n"
  },
  {
    "path": "src/queries/compound-queries/score-functions/index.js",
    "content": "'use strict';\n\nexports.ScoreFunction = require('./score-function');\nexports.ScriptScoreFunction = require('./script-score-function');\nexports.WeightScoreFunction = require('./weight-score-function');\nexports.RandomScoreFunction = require('./random-score-function');\nexports.FieldValueFactorFunction = require('./field-value-factor-function');\nexports.DecayScoreFunction = require('./decay-score-function');\n"
  },
  {
    "path": "src/queries/compound-queries/score-functions/random-score-function.js",
    "content": "'use strict';\n\nconst ScoreFunction = require('./score-function');\n\n/**\n * The `random_score` generates scores using a hash of the `_uid` field,\n * with a `seed` for variation. If `seed` is not specified, the current time is used.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html#function-random)\n *\n * @example\n * const scoreFunc = esb.randomScoreFunction().seed(299792458);\n *\n * @extends ScoreFunction\n */\nclass RandomScoreFunction extends ScoreFunction {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('random_score');\n    }\n\n    /**\n     * Sets random seed value.\n     *\n     * @param {number} seed A seed value.\n     * @returns {RandomScoreFunction} returns `this` so that calls can be chained.\n     */\n    seed(seed) {\n        this._opts.seed = seed;\n        return this;\n    }\n}\n\nmodule.exports = RandomScoreFunction;\n"
  },
  {
    "path": "src/queries/compound-queries/score-functions/score-function.js",
    "content": "'use strict';\n\nconst {\n    Query,\n    util: { checkType, recursiveToJSON }\n} = require('../../../core');\n\n/**\n * `ScoreFunction` provides support for common options used across\n * various `ScoreFunction` implementations.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html#score-functions)\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} name\n */\nclass ScoreFunction {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name) {\n        this._name = name;\n\n        // Filter, weight go here\n        this._body = {};\n        // Score Function specific options go here\n        this._opts = {};\n    }\n\n    /**\n     * Adds a filter query whose matching documents will have the score function applied.\n     *\n     * @param {Query} filterQry A valid `Query` object.\n     * @returns {ScoreFunction} returns `this` so that calls can be chained.\n     */\n    filter(filterQry) {\n        checkType(filterQry, Query);\n\n        this._body.filter = filterQry;\n        return this;\n    }\n\n    /**\n     * Sets the weight of the score function\n     *\n     * @param {number} weight The weight of this score function.\n     * @returns {ScoreFunction} returns `this` so that calls can be chained.\n     */\n    weight(weight) {\n        this._body.weight = weight;\n        return this;\n    }\n\n    /**\n     * Overrides default `toJSON` to return DSL representation of the score function\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        const repr = Object.assign({ [this._name]: this._opts }, this._body);\n        return recursiveToJSON(repr);\n    }\n}\n\nmodule.exports = ScoreFunction;\n"
  },
  {
    "path": "src/queries/compound-queries/score-functions/script-score-function.js",
    "content": "'use strict';\n\nconst _ = require('../../../_');\n\nconst ScoreFunction = require('./score-function');\n\n/**\n * The `script_score` function allows you to wrap another query and customize\n * the scoring of it optionally with a computation derived from other numeric\n * field values in the doc using a script expression.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html#function-script-score)\n *\n * @example\n * const scoreFunc = esb.scriptScoreFunction(\n *     esb.script('inline', \"_score * doc['my_numeric_field'].value\")\n *         .lang('painless')\n * );\n *\n * @example\n * // Script with parameters\n * const scoreFunc = esb.scriptScoreFunction(\n *     esb.script(\n *         'inline',\n *         \"_score * doc['my_numeric_field'].value / Math.pow(params.param1, params.param2)\"\n *     )\n *         .lang('painless')\n *         .params({ param1: 'value1', param2: 'value2' })\n * );\n *\n * @param {Script|string} script\n *\n * @extends ScoreFunction\n */\nclass ScriptScoreFunction extends ScoreFunction {\n    // eslint-disable-next-line require-jsdoc\n    constructor(script) {\n        super('script_score');\n\n        if (!_.isNil(script)) this._opts.script = script;\n    }\n\n    /**\n     *\n     * @param {Script|string} script\n     * @returns {ScriptScoreFunction} returns `this` so that calls can be chained.\n     */\n    script(script) {\n        this._opts.script = script;\n        return this;\n    }\n}\n\nmodule.exports = ScriptScoreFunction;\n"
  },
  {
    "path": "src/queries/compound-queries/score-functions/weight-score-function.js",
    "content": "'use strict';\n\nconst _ = require('../../../_');\n\nconst ScoreFunction = require('./score-function');\n\nconst {\n    util: { recursiveToJSON }\n} = require('../../../core');\n\n/**\n * The `weight` score allows you to multiply the score by the provided `weight`.\n * This can sometimes be desired since boost value set on specific queries gets\n * normalized, while for this score function it does not.\n * The number value is of type float.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html#function-weight)\n *\n * @example\n * const scoreFunc = esb.weightScoreFunction(42);\n *\n * @param {number=} weight The weight of this score function.\n * @extends ScoreFunction\n */\nclass WeightScoreFunction extends ScoreFunction {\n    // eslint-disable-next-line require-jsdoc\n    constructor(weight) {\n        /*\n            null to `super` is intentional.\n            The following is a valid score function\n            It doesn't have a name field\n            {\n                \"filter\": { \"match\": { \"test\": \"cat\" } },\n                \"weight\": 42\n            }\n        */\n        super(null);\n\n        if (!_.isNil(weight)) this._body.weight = weight;\n    }\n\n    /**\n     * Overrides default `toJSON` to return DSL representation of the score function\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        return recursiveToJSON(this._body);\n    }\n}\n\nmodule.exports = WeightScoreFunction;\n"
  },
  {
    "path": "src/queries/full-text-queries/combined-fields-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { checkType, invalidParam }\n} = require('../../core');\nconst FullTextQueryBase = require('./full-text-query-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-combined-fields-query.html';\n\nconst invalidOperatorParam = invalidParam(\n    ES_REF_URL,\n    'operator',\n    \"'and' or 'or'\"\n);\nconst invalidZeroTermsQueryParam = invalidParam(\n    ES_REF_URL,\n    'zero_terms_query',\n    \"'all' or 'none'\"\n);\n\n/**\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-combined-fields-query.html)\n *\n * @example\n * const qry = esb.combinedFieldsQuery(['subject', 'message'], 'this is a test');\n *\n * NOTE: This query was added in elasticsearch v7.13.\n *\n * @param {Array<string>|string=} fields The fields to be queried\n * @param {string=} queryString The query string\n *\n * @extends FullTextQueryBase\n */\nclass CombinedFieldsQuery extends FullTextQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(fields, queryString) {\n        super('combined_fields', queryString);\n\n        // This field is required\n        // Avoid checking for key in `this.field`\n        this._queryOpts.fields = [];\n\n        if (!_.isNil(fields)) {\n            if (Array.isArray(fields)) this.fields(fields);\n            else this.field(fields);\n        }\n    }\n\n    /**\n     * Appends given field to the list of fields to search against.\n     * Fields can be specified with wildcards.\n     * Individual fields can be boosted with the caret (^) notation.\n     * Example - `\"subject^3\"`\n     *\n     * @param {string} field One of the fields to be queried\n     * @returns {CombinedFieldsQuery} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._queryOpts.fields.push(field);\n        return this;\n    }\n\n    /**\n     * Appends given fields to the list of fields to search against.\n     * Fields can be specified with wildcards.\n     * Individual fields can be boosted with the caret (^) notation.\n     *\n     * @example\n     * // Boost individual fields with caret `^` notation\n     * const qry = esb.combinedFieldsQuery(['subject^3', 'message'], 'this is a test');\n     *\n     * @example\n     * // Specify fields with wildcards\n     * const qry = esb.combinedFieldsQuery(['title', '*_name'], 'Will Smith');\n     *\n     * @param {Array<string>} fields The fields to be queried\n     * @returns {CombinedFieldsQuery} returns `this` so that calls can be chained.\n     */\n    fields(fields) {\n        checkType(fields, Array);\n\n        this._queryOpts.fields = this._queryOpts.fields.concat(fields);\n        return this;\n    }\n\n    /**\n     * If true, match phrase queries are automatically created for multi-term synonyms.\n     *\n     * @param {boolean} enable Defaults to `true`\n     * @returns {CombinedFieldsQuery} returns `this` so that calls can be chained.\n     */\n    autoGenerateSynonymsPhraseQuery(enable) {\n        this._queryOpts.auto_generate_synonyms_phrase_query = enable;\n        return this;\n    }\n\n    /**\n     * The operator to be used in the boolean query which is constructed\n     * by analyzing the text provided. The `operator` flag can be set to `or` or\n     * `and` to control the boolean clauses (defaults to `or`).\n     *\n     * @param {string} operator Can be `and`/`or`. Default is `or`.\n     * @returns {CombinedFieldsQuery} returns `this` so that calls can be chained.\n     */\n    operator(operator) {\n        if (_.isNil(operator)) invalidOperatorParam(operator);\n\n        const operatorLower = operator.toLowerCase();\n        if (operatorLower !== 'and' && operatorLower !== 'or') {\n            invalidOperatorParam(operator);\n        }\n\n        this._queryOpts.operator = operatorLower;\n        return this;\n    }\n\n    /**\n     * If the analyzer used removes all tokens in a query like a `stop` filter does,\n     * the default behavior is to match no documents at all. In order to change that\n     * the `zero_terms_query` option can be used, which accepts `none` (default) and `all`\n     * which corresponds to a `match_all` query.\n     *\n     * @example\n     * const qry = esb.combinedFieldsQuery('message', 'to be or not to be')\n     *     .operator('and')\n     *     .zeroTermsQuery('all');\n     *\n     * @param {string} behavior A no match action, `all` or `none`. Default is `none`.\n     * @returns {CombinedFieldsQuery} returns `this` so that calls can be chained.\n     */\n    zeroTermsQuery(behavior) {\n        if (_.isNil(behavior)) invalidZeroTermsQueryParam(behavior);\n\n        const behaviorLower = behavior.toLowerCase();\n        if (behaviorLower !== 'all' && behaviorLower !== 'none') {\n            invalidZeroTermsQueryParam(behavior);\n        }\n\n        this._queryOpts.zero_terms_query = behaviorLower;\n        return this;\n    }\n}\n\nmodule.exports = CombinedFieldsQuery;\n"
  },
  {
    "path": "src/queries/full-text-queries/common-terms-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { invalidParam, setDefault }\n} = require('../../core');\n\nconst MonoFieldQueryBase = require('./mono-field-query-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-common-terms-query.html';\n\nconst invalidLowFreqOpParam = invalidParam(\n    ES_REF_URL,\n    'low_freq_operator',\n    \"'and' or 'or'\"\n);\nconst invalidHighFreqOpParam = invalidParam(\n    ES_REF_URL,\n    'high_freq_operator',\n    \"'and' or 'or'\"\n);\n\n/**\n * The `common` terms query is a modern alternative to stopwords which\n * improves the precision and recall of search results (by taking\n * stopwords into account), without sacrificing performance.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-common-terms-query.html)\n *\n * @example\n * const qry = esb.commonTermsQuery('body','this is bonsai cool')\n *     .cutoffFrequency(0.001);\n *\n * @param {string=} field The document field to query against\n * @param {string=} queryString The query string\n *\n * @extends MonoFieldQueryBase\n */\nclass CommonTermsQuery extends MonoFieldQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, queryString) {\n        super('common', field, queryString);\n    }\n\n    /**\n     * Print warning message to console namespaced by class name.\n     *\n     * @param {string} msg\n     * @private\n     */\n    _warn(msg) {\n        console.warn(`[CommonTermsQuery] ${msg}`);\n    }\n\n    /**\n     * Print warning messages to not mix Geo Point representations\n     * @private\n     */\n    _warnMixedRepr() {\n        this._warn('Do not mix with other representation!');\n        this._warn('Overwriting.');\n    }\n\n    /**\n     * Check the instance for object representation of Geo Point.\n     * If representation is null, new object is initialised.\n     * If it is not null, warning is logged and point is overwritten.\n     * @private\n     */\n    _checkMinMatchRepr() {\n        if (\n            !setDefault(this._queryOpts, 'minimum_should_match', {}) &&\n            !_.isObject(this._queryOpts.minimum_should_match)\n        ) {\n            this._warnMixedRepr();\n            this._queryOpts.minimum_should_match = {};\n        }\n    }\n\n    /**\n     * Allows specifying an absolute or relative document frequency where high frequency\n     * terms are moved into an optional subquery and are only scored if one of the\n     * low frequency (below the cutoff) terms in the case of an `or` operator or\n     * all of the low frequency terms in the case of an `and` operator match.\n     *\n     * @param {number} frequency It can either be relative to the total number of documents\n     * if in the range `[0..1)` or absolute if greater or equal to `1.0`.\n     * @returns {CommonTermsQuery} returns `this` so that calls can be chained.\n     */\n    cutoffFrequency(frequency) {\n        this._queryOpts.cutoff_frequency = frequency;\n        return this;\n    }\n\n    /**\n     * The operator to be used on low frequency terms in the boolean query\n     * which is constructed by analyzing the text provided. The `operator` flag\n     * can be set to `or` or `and` to control the boolean clauses (defaults to `or`).\n     *\n     * @example\n     * const qry = esb.commonTermsQuery('body', 'nelly the elephant as a cartoon')\n     *     .lowFreqOperator('and')\n     *     .cutoffFrequency(0.001);\n     *\n     * @param {string} operator Can be `and`/`or`. Default is `or`.\n     * @returns {CommonTermsQuery} returns `this` so that calls can be chained.\n     */\n    lowFreqOperator(operator) {\n        if (_.isNil(operator)) invalidLowFreqOpParam(operator);\n\n        const operatorLower = operator.toLowerCase();\n        if (operatorLower !== 'and' && operatorLower !== 'or') {\n            invalidLowFreqOpParam(operator);\n        }\n\n        this._queryOpts.low_freq_operator = operatorLower;\n        return this;\n    }\n\n    /**\n     * The operator to be used on high frequency terms in the boolean query\n     * which is constructed by analyzing the text provided. The `operator` flag\n     * can be set to `or` or `and` to control the boolean clauses (defaults to `or`).\n     *\n     * @param {string} operator Can be `and`/`or`. Default is `or`.\n     * @returns {CommonTermsQuery} returns `this` so that calls can be chained.\n     */\n    highFreqOperator(operator) {\n        if (_.isNil(operator)) invalidHighFreqOpParam(operator);\n\n        const operatorLower = operator.toLowerCase();\n        if (operatorLower !== 'and' && operatorLower !== 'or') {\n            invalidHighFreqOpParam(operator);\n        }\n\n        this._queryOpts.high_freq_operator = operatorLower;\n        return this;\n    }\n\n    /**\n     * Sets the value controlling how many \"should\" clauses in the resulting boolean\n     * query should match for low frequency terms. It can be an absolute value (2),\n     * a percentage (30%) or a combination of both.\n     *\n     * @example\n     * const qry = esb.commonTermsQuery('body', 'nelly the elephant as a cartoon')\n     *     .lowFreq(2)\n     *     .highFreq(3)\n     *     .cutoffFrequency(0.001);\n     *\n     * @param {string|number} lowFreqMinMatch\n     * @returns {CommonTermsQuery} returns `this` so that calls can be chained.\n     */\n    lowFreq(lowFreqMinMatch) {\n        this._checkMinMatchRepr();\n\n        this._queryOpts.minimum_should_match.low_freq = lowFreqMinMatch;\n        return this;\n    }\n\n    /**\n     * Sets the value controlling how many \"should\" clauses in the resulting boolean\n     * query should match for high frequency terms. It can be an absolute value (2),\n     * a percentage (30%) or a combination of both.\n     *\n     * @example\n     * const qry = esb.commonTermsQuery('body', 'nelly the elephant as a cartoon')\n     *     .lowFreq(2)\n     *     .highFreq(3)\n     *     .cutoffFrequency(0.001);\n     *\n     * @param {string|number} highFreqMinMatch\n     * @returns {CommonTermsQuery} returns `this` so that calls can be chained.\n     */\n    highFreq(highFreqMinMatch) {\n        this._checkMinMatchRepr();\n\n        this._queryOpts.minimum_should_match.high_freq = highFreqMinMatch;\n        return this;\n    }\n\n    /**\n     * Enables or disables similarity coordinate scoring of documents\n     * commoning the `CommonTermsQuery`. Default: `false`.\n     *\n     * **NOTE**: This has been removed in elasticsearch 6.0. If provided,\n     * it will be ignored and a deprecation warning will be issued.\n     *\n     * @param {boolean} enable\n     * @returns {CommonTermsQuery} returns `this` so that calls can be chained.\n     */\n    disableCoord(enable) {\n        this._queryOpts.disable_coord = enable;\n        return this;\n    }\n}\n\nmodule.exports = CommonTermsQuery;\n"
  },
  {
    "path": "src/queries/full-text-queries/full-text-query-base.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst { Query } = require('../../core');\n\n/**\n * The `FullTextQueryBase` provides support for common options used across\n * various full text query implementations.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} queryType\n * @param {string=} queryString The query string\n *\n * @extends Query\n */\nclass FullTextQueryBase extends Query {\n    /*\n        Common options:\n        analyzer - applicable on all\n        minimum_should_match - applicable on all except Match Phrase and Match Phrase Prefix\n        query - applicable on all\n    */\n\n    // eslint-disable-next-line require-jsdoc\n    constructor(queryType, queryString) {\n        super(queryType);\n\n        if (!_.isNil(queryString)) this._queryOpts.query = queryString;\n    }\n\n    /**\n     * Set the analyzer to control which analyzer will perform the analysis process on the text\n     *\n     * @example\n     * const qry = esb.matchPhraseQuery('message', 'this is a test')\n     *     .analyzer('my_analyzer');\n     *\n     * @example\n     * const qry = esb.multiMatchQuery(['first', 'last', '*.edge'], 'Jon')\n     *     .type('cross_fields')\n     *     .analyzer('standard');\n     *\n     * @param {string} analyzer\n     * @returns {FullTextQueryBase} returns `this` so that calls can be chained.\n     */\n    analyzer(analyzer) {\n        this._queryOpts.analyzer = analyzer;\n        return this;\n    }\n\n    /**\n     * Sets the value controlling how many \"should\" clauses in the resulting boolean\n     * query should match. It can be an absolute value (2), a percentage (30%)\n     * or a combination of both. For Common Terms Query when specifying different\n     * `minimum_should_match` for low and high frequency terms, an object with the\n     * keys `low_freq` and `high_freq` can be used.\n     *\n     * @example\n     * const qry = esb.commonTermsQuery('body', 'nelly the elephant as a cartoon')\n     *     .minimumShouldMatch(2)\n     *     .cutoffFrequency(0.001);\n     *\n     * @param {string|number|Object} minimumShouldMatch\n     * Note: Object notation can only be used with Common Terms Query.\n     * @returns {FullTextQueryBase} returns `this` so that calls can be chained.\n     */\n    minimumShouldMatch(minimumShouldMatch) {\n        this._queryOpts.minimum_should_match = minimumShouldMatch;\n        return this;\n    }\n\n    /**\n     * Sets the query string.\n     *\n     * @example\n     * const qry = esb.queryStringQuery()\n     *     .query('city.\\\\*:(this AND that OR thus)')\n     *     .useDisMax(true);\n     *\n     * @param {string} queryString\n     * @returns {FullTextQueryBase} returns `this` so that calls can be chained.\n     */\n    query(queryString) {\n        this._queryOpts.query = queryString;\n        return this;\n    }\n}\n\nmodule.exports = FullTextQueryBase;\n"
  },
  {
    "path": "src/queries/full-text-queries/index.js",
    "content": "'use strict';\n\nexports.FullTextQueryBase = require('./full-text-query-base');\nexports.MatchPhraseQueryBase = require('./match-phrase-query-base');\nexports.MonoFieldQueryBase = require('./mono-field-query-base');\nexports.QueryStringQueryBase = require('./query-string-query-base');\n\nexports.MatchQuery = require('./match-query');\nexports.MatchPhraseQuery = require('./match-phrase-query');\nexports.MatchPhrasePrefixQuery = require('./match-phrase-prefix-query');\nexports.MultiMatchQuery = require('./multi-match-query');\nexports.CommonTermsQuery = require('./common-terms-query');\nexports.QueryStringQuery = require('./query-string-query');\nexports.SimpleQueryStringQuery = require('./simple-query-string-query');\nexports.CombinedFieldsQuery = require('./combined-fields-query');\n"
  },
  {
    "path": "src/queries/full-text-queries/match-phrase-prefix-query.js",
    "content": "'use strict';\n\nconst MatchPhraseQueryBase = require('./match-phrase-query-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-query-phrase-prefix.html';\n\n/**\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-query-phrase-prefix.html)\n *\n * @example\n * const qry = esb.matchPhrasePrefixQuery('message', 'quick brown f');\n *\n * @param {string=} field The document field to query against\n * @param {string=} queryString The query string\n *\n * @extends MatchPhraseQueryBase\n */\nclass MatchPhrasePrefixQuery extends MatchPhraseQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, queryString) {\n        super('match_phrase_prefix', ES_REF_URL, field, queryString);\n    }\n\n    /**\n     * Control to how many prefixes the last term will be expanded.\n     *\n     * @example\n     * const qry = esb.matchPhrasePrefixQuery('message', 'quick brown f')\n     *     .maxExpansions(10);\n     *\n     * @param {number} limit Defaults to 50.\n     * @returns {MatchPhrasePrefixQuery} returns `this` so that calls can be chained.\n     */\n    maxExpansions(limit) {\n        this._queryOpts.max_expansions = limit;\n        return this;\n    }\n}\n\nmodule.exports = MatchPhrasePrefixQuery;\n"
  },
  {
    "path": "src/queries/full-text-queries/match-phrase-query-base.js",
    "content": "'use strict';\n\nconst MonoFieldQueryBase = require('./mono-field-query-base');\n\n/**\n * The `MatchPhraseQueryBase` provides support for common options used across\n * various bucket match phrase query implementations.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} queryType\n * @param {string} refUrl\n * @param {string=} field The document field to query against\n * @param {string=} queryString The query string\n *\n * @extends MonoFieldQueryBase\n */\nclass MatchPhraseQueryBase extends MonoFieldQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(queryType, refUrl, field, queryString) {\n        super(queryType, field, queryString);\n\n        this._refUrl = refUrl;\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on `MatchPhraseQueryBase`\n     */\n    minimumShouldMatch() {\n        console.log(`Please refer ${this._refUrl}`);\n        throw new Error(\n            `minimumShouldMatch is not supported in ${this.constructor.name}`\n        );\n    }\n\n    /**\n     * Configures the `slop`(default is 0) for matching terms in any order.\n     * Transposed terms have a slop of 2.\n     *\n     * @param {number} slop A positive integer value, defaults is 0.\n     * @returns {MatchPhraseQueryBase} returns `this` so that calls can be chained.\n     */\n    slop(slop) {\n        this._queryOpts.slop = slop;\n        return this;\n    }\n}\n\nmodule.exports = MatchPhraseQueryBase;\n"
  },
  {
    "path": "src/queries/full-text-queries/match-phrase-query.js",
    "content": "'use strict';\n\nconst MatchPhraseQueryBase = require('./match-phrase-query-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-query-phrase.html';\n\n/**\n * The `match_phrase` query analyzes the text and creates a `phrase` query out of\n * the analyzed text.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-query-phrase.html)\n *\n * @example\n * const qry = esb.matchPhraseQuery('message', 'to be or not to be');\n *\n * @param {string=} field The document field to query against\n * @param {string=} queryString The query string\n *\n * @extends MatchPhraseQueryBase\n */\nclass MatchPhraseQuery extends MatchPhraseQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, queryString) {\n        super('match_phrase', ES_REF_URL, field, queryString);\n    }\n}\n\nmodule.exports = MatchPhraseQuery;\n"
  },
  {
    "path": "src/queries/full-text-queries/match-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { invalidParam }\n} = require('../../core');\nconst MonoFieldQueryBase = require('./mono-field-query-base');\nconst { validateRewiteMethod } = require('../helper');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-query.html';\n\nconst invalidOperatorParam = invalidParam(\n    ES_REF_URL,\n    'operator',\n    \"'and' or 'or'\"\n);\nconst invalidZeroTermsQueryParam = invalidParam(\n    ES_REF_URL,\n    'zero_terms_query',\n    \"'all' or 'none'\"\n);\n\n/**\n * `match` query accepts text/numerics/dates, analyzes them, and constructs a query.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-query.html)\n *\n * @param {string=} field The document field to query against\n * @param {string=} queryString The query string\n *\n * @example\n * const matchQry = esb.matchQuery('message', 'to be or not to be');\n *\n * @example\n * // Providing additional parameters:\n * const qry = esb.matchQuery('message', 'this is a test').operator('and');\n *\n * @extends MonoFieldQueryBase\n */\nclass MatchQuery extends MonoFieldQueryBase {\n    // NOTE: Did not add methods for `slop`, `phrase_slop` and `type`.\n    // These are deprecated.\n\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, queryString) {\n        super('match', field, queryString);\n    }\n\n    /**\n     * The operator to be used in the boolean query which is constructed\n     * by analyzing the text provided. The `operator` flag can be set to `or` or\n     * `and` to control the boolean clauses (defaults to `or`).\n     *\n     * @param {string} operator Can be `and`/`or`. Default is `or`.\n     * @returns {MatchQuery} returns `this` so that calls can be chained.\n     */\n    operator(operator) {\n        if (_.isNil(operator)) invalidOperatorParam(operator);\n\n        const operatorLower = operator.toLowerCase();\n        if (operatorLower !== 'and' && operatorLower !== 'or') {\n            invalidOperatorParam(operator);\n        }\n\n        this._queryOpts.operator = operatorLower;\n        return this;\n    }\n\n    /**\n     * Sets the `lenient` parameter which allows to ignore exceptions caused\n     * by data-type mismatches such as trying to query a numeric field with a\n     * text query string when set to `true`.\n     *\n     * @param {boolean} enable Defaules to `false`\n     * @returns {MatchQuery} returns `this` so that calls can be chained.\n     */\n    lenient(enable) {\n        this._queryOpts.lenient = enable;\n        return this;\n    }\n\n    /**\n     * Sets the `fuzziness` parameter which is interpreted as a Levenshtein Edit Distance —\n     * the number of one character changes that need to be made to one string to make it\n     * the same as another string.\n     *\n     * @param {number|string} factor Can be specified either as a number, or the maximum\n     * number of edits, or as `AUTO` which generates an edit distance based on the length\n     * of the term.\n     * @returns {MatchQuery} returns `this` so that calls can be chained.\n     */\n    fuzziness(factor) {\n        this._queryOpts.fuzziness = factor;\n        return this;\n    }\n\n    /**\n     * Sets the prefix length for a fuzzy prefix `MatchQuery`\n     *\n     * @param {number} len\n     * @returns {MatchQuery} returns `this` so that calls can be chained.\n     */\n    prefixLength(len) {\n        this._queryOpts.prefix_length = len;\n        return this;\n    }\n\n    /**\n     * Sets the max expansions for a fuzzy prefix `MatchQuery`\n     *\n     * @param {number} limit\n     * @returns {MatchQuery} returns `this` so that calls can be chained.\n     */\n    maxExpansions(limit) {\n        this._queryOpts.max_expansions = limit;\n        return this;\n    }\n\n    /**\n     * Sets the rewrite method. Valid values are:\n     * - `constant_score` - tries to pick the best constant-score rewrite\n     *  method based on term and document counts from the query.\n     *  Synonyms - `constant_score_auto`, `constant_score_filter`\n     *\n     * - `scoring_boolean` - translates each term into boolean should and\n     *  keeps the scores as computed by the query\n     *\n     * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n     *  are computed.\n     *\n     * - `constant_score_filter` - first creates a private Filter, by visiting\n     *  each term in sequence and marking all docs for that term\n     *\n     * - `top_terms_boost_N` - first translates each term into boolean should\n     *  and scores are only computed as the boost using the top N\n     *  scoring terms. Replace N with an integer value.\n     *\n     * - `top_terms_N` - first translates each term into boolean should\n     *  and keeps the scores as computed by the query. Only the top N\n     *  scoring terms are used. Replace N with an integer value.\n     *\n     * Default is `constant_score`.\n     *\n     * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n     * `constant_score_filter` (synonyms for `constant_score`) have been removed\n     * in elasticsearch 6.0.\n     *\n     * This is an advanced option, use with care.\n     *\n     * @param {string} method The rewrite method as a string.\n     * @returns {MatchQuery} returns `this` so that calls can be chained.\n     * @throws {Error} If the given `rewrite` method is not valid.\n     */\n    rewrite(method) {\n        validateRewiteMethod(method, 'rewrite', ES_REF_URL);\n\n        this._queryOpts.rewrite = method;\n        return this;\n    }\n\n    /**\n     * Sets the fuzzy rewrite method. Valid values are:\n     * - `constant_score` - tries to pick the best constant-score rewrite\n     *  method based on term and document counts from the query.\n     *  Synonyms - `constant_score_auto`, `constant_score_filter`\n     *\n     * - `scoring_boolean` - translates each term into boolean should and\n     *  keeps the scores as computed by the query\n     *\n     * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n     *  are computed.\n     *\n     * - `constant_score_filter` - first creates a private Filter, by visiting\n     *  each term in sequence and marking all docs for that term\n     *\n     * - `top_terms_boost_N` - first translates each term into boolean should\n     *  and scores are only computed as the boost using the top N\n     *  scoring terms. Replace N with an integer value.\n     *\n     * - `top_terms_N` - first translates each term into boolean should\n     *  and keeps the scores as computed by the query. Only the top N\n     *  scoring terms are used. Replace N with an integer value.\n     *\n     * Default is `constant_score`.\n     *\n     * This is an advanced option, use with care.\n     *\n     * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n     * `constant_score_filter` (synonyms for `constant_score`) have been removed\n     * in elasticsearch 6.0.\n     *\n     * @param {string} method The rewrite method as a string.\n     * @returns {MatchQuery} returns `this` so that calls can be chained.\n     * @throws {Error} If the given `fuzzy_rewrite` method is not valid.\n     */\n    fuzzyRewrite(method) {\n        validateRewiteMethod(method, 'fuzzy_rewrite', ES_REF_URL);\n\n        this._queryOpts.fuzzy_rewrite = method;\n        return this;\n    }\n\n    /**\n     * Fuzzy transpositions (`ab` → `ba`) are allowed by default but can be disabled\n     * by setting `fuzzy_transpositions` to false.\n     * @param {boolean} enable\n     * @returns {MatchQuery} returns `this` so that calls can be chained.\n     */\n    fuzzyTranspositions(enable) {\n        this._queryOpts.fuzzy_transpositions = enable;\n        return this;\n    }\n\n    /**\n     * If the analyzer used removes all tokens in a query like a `stop` filter does,\n     * the default behavior is to match no documents at all. In order to change that\n     * the `zero_terms_query` option can be used, which accepts `none` (default) and `all`\n     * which corresponds to a `match_all` query.\n     *\n     * @example\n     * const qry = esb.matchQuery('message', 'to be or not to be')\n     *     .operator('and')\n     *     .zeroTermsQuery('all');\n     *\n     * @param {string} behavior A no match action, `all` or `none`. Default is `none`.\n     * @returns {MatchQuery} returns `this` so that calls can be chained.\n     */\n    zeroTermsQuery(behavior) {\n        if (_.isNil(behavior)) invalidZeroTermsQueryParam(behavior);\n\n        const behaviorLower = behavior.toLowerCase();\n        if (behaviorLower !== 'all' && behaviorLower !== 'none') {\n            invalidZeroTermsQueryParam(behavior);\n        }\n\n        this._queryOpts.zero_terms_query = behaviorLower;\n        return this;\n    }\n\n    /**\n     * Allows specifying an absolute or relative document frequency where high frequency\n     * terms are moved into an optional subquery and are only scored if one of the\n     * low frequency (below the cutoff) terms in the case of an `or` operator or\n     * all of the low frequency terms in the case of an `and` operator match.\n     *\n     * @example\n     * const qry = esb.matchQuery('message', 'to be or not to be')\n     *     .cutoffFrequency(0.001);\n     *\n     * @param {number} frequency It can either be relative to the total number of documents\n     * if in the range `[0..1)` or absolute if greater or equal to `1.0`.\n     * @returns {MatchQuery} returns `this` so that calls can be chained.\n     */\n    cutoffFrequency(frequency) {\n        this._queryOpts.cutoff_frequency = frequency;\n        return this;\n    }\n}\n\nmodule.exports = MatchQuery;\n"
  },
  {
    "path": "src/queries/full-text-queries/mono-field-query-base.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst FullTextQueryBase = require('./full-text-query-base');\n\n/**\n * The `MonoFieldQueryBase` provides support for common options used across\n * various full text query implementations with single search field.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} queryType\n * @param {string=} field The document field to query against\n * @param {string=} queryString The query string\n *\n * @extends FullTextQueryBase\n */\nclass MonoFieldQueryBase extends FullTextQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(queryType, field, queryString) {\n        super(queryType, queryString);\n\n        if (!_.isNil(field)) this._field = field;\n    }\n\n    /**\n     * Sets the field to search on.\n     *\n     * @param {string} field\n     * @returns {MonoFieldQueryBase} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._field = field;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation of the Full text query\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        // recursiveToJSON doesn't seem to be required here.\n\n        // Revisit this.. Smells a little bit\n        if (!_.has(this._queryOpts, 'query')) {\n            throw new Error('Query string is required for full text query!');\n        }\n\n        const queryOptKeys = Object.keys(this._queryOpts);\n        const qryOpts =\n            queryOptKeys.length === 1 ? this._queryOpts.query : this._queryOpts;\n\n        const repr = {\n            [this.queryType]: {\n                [this._field]: qryOpts\n            }\n        };\n        return repr;\n    }\n}\n\nmodule.exports = MonoFieldQueryBase;\n"
  },
  {
    "path": "src/queries/full-text-queries/multi-match-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { checkType, invalidParam },\n    consts: { MULTI_MATCH_TYPE }\n} = require('../../core');\nconst FullTextQueryBase = require('./full-text-query-base');\nconst { validateRewiteMethod } = require('../helper');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-multi-match-query.html';\n\nconst invalidTypeParam = invalidParam(ES_REF_URL, 'type', MULTI_MATCH_TYPE);\nconst invalidOperatorParam = invalidParam(\n    ES_REF_URL,\n    'operator',\n    \"'and' or 'or'\"\n);\nconst invalidBehaviorParam = invalidParam(\n    ES_REF_URL,\n    'behavior',\n    \"'all' or 'none'\"\n);\n\n/**\n * A `MultiMatchQuery` query builds further on top of the\n * `MultiMatchQuery` by allowing multiple fields to be specified.\n * The idea here is to allow to more easily build a concise match type query\n * over multiple fields instead of using a relatively more expressive query\n * by using multiple match queries within a bool query.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-multi-match-query.html)\n *\n * @example\n * const qry = esb.multiMatchQuery(['subject', 'message'], 'this is a test');\n *\n * @param {Array<string>|string=} fields The fields to be queried\n * @param {string=} queryString The query string\n *\n * @extends FullTextQueryBase\n */\nclass MultiMatchQuery extends FullTextQueryBase {\n    // Extremely similar to match query.\n    // mixins are one way to go about it.\n    // repeating code for now\n\n    // eslint-disable-next-line require-jsdoc\n    constructor(fields, queryString) {\n        super('multi_match', queryString);\n\n        // This field is required\n        // Avoid checking for key in `this.field`\n        this._queryOpts.fields = [];\n\n        if (!_.isNil(fields)) {\n            if (Array.isArray(fields)) this.fields(fields);\n            else this.field(fields);\n        }\n    }\n\n    /**\n     * Appends given field to the list of fields to search against.\n     * Fields can be specified with wildcards.\n     * Individual fields can be boosted with the caret (^) notation.\n     * Example - `\"subject^3\"`\n     *\n     * @param {string} field One of the fields to be queried\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._queryOpts.fields.push(field);\n        return this;\n    }\n\n    /**\n     * Appends given fields to the list of fields to search against.\n     * Fields can be specified with wildcards.\n     * Individual fields can be boosted with the caret (^) notation.\n     *\n     * @example\n     * // Boost individual fields with caret `^` notation\n     * const qry = esb.multiMatchQuery(['subject^3', 'message'], 'this is a test');\n     *\n     * @example\n     * // Specify fields with wildcards\n     * const qry = esb.multiMatchQuery(['title', '*_name'], 'Will Smith');\n     *\n     * @param {Array<string>} fields The fields to be queried\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     */\n    fields(fields) {\n        checkType(fields, Array);\n\n        this._queryOpts.fields = this._queryOpts.fields.concat(fields);\n        return this;\n    }\n\n    /**\n     * Sets the type of multi match query. Valid values are:\n     * - `best_fields` - (default) Finds documents which match any field,\n     * but uses the `_score` from the best field.\n     *\n     * - `most_fields` - Finds documents which match any field and combines\n     * the `_score` from each field.\n     *\n     * - `cross_fields` - Treats fields with the same `analyzer` as though\n     * they were one big field. Looks for each word in *any* field\n     *\n     * - `phrase` - Runs a `match_phrase` query on each field and combines\n     * the `_score` from each field.\n     *\n     * - `phrase_prefix` - Runs a `match_phrase_prefix` query on each field\n     * and combines the `_score` from each field.\n     *\n     * - `bool_prefix` - (added in v7.2) Creates a match_bool_prefix query on each field and\n     * combines the _score from each field.\n     *\n     * @example\n     * // Find the single best matching field\n     * const qry = esb.multiMatchQuery(['subject', 'message'], 'brown fox')\n     *     .type('best_fields')\n     *     .tieBreaker(0.3);\n     *\n     * @example\n     * // Query multiple fields analyzed differently for the same text\n     * const qry = esb.multiMatchQuery(\n     *     ['title', 'title.original', 'title.shingles'],\n     *     'quick brown fox'\n     * ).type('most_fields');\n     *\n     * @example\n     * // Run a `match_phrase_prefix` query on multiple fields\n     * const qry = esb.multiMatchQuery(\n     *     ['subject', 'message'],\n     *     'quick brown f'\n     * ).type('phrase_prefix');\n     *\n     * @example\n     * // All terms must be present in at least one field for document to match\n     * const qry = esb.multiMatchQuery(['first_name', 'last_name'], 'Will Smith')\n     *     .type('cross_fields')\n     *     .operator('and');\n     *\n     * @param {string} type Can be one of `best_fields`, `most_fields`,\n     * `cross_fields`, `phrase`, `phrase_prefix` and `bool_prefix`. Default is\n     * `best_fields`.\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     */\n    type(type) {\n        if (_.isNil(type)) invalidTypeParam(type);\n\n        const typeLower = type.toLowerCase();\n        if (!MULTI_MATCH_TYPE.has(typeLower)) invalidTypeParam(type);\n\n        this._queryOpts.type = typeLower;\n        return this;\n    }\n\n    /**\n     * The tie breaker value. The tie breaker capability allows results\n     * that include the same term in multiple fields to be judged better than\n     * results that include this term in only the best of those multiple\n     * fields, without confusing this with the better case of two different\n     * terms in the multiple fields. Default: `0.0`.\n     *\n     * @param {number} factor\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     */\n    tieBreaker(factor) {\n        this._queryOpts.tie_breaker = factor;\n        return this;\n    }\n\n    /**\n     * The operator to be used in the boolean query which is constructed\n     * by analyzing the text provided. The `operator` flag can be set to `or` or\n     * `and` to control the boolean clauses (defaults to `or`).\n     *\n     * @param {string} operator Can be `and`/`or`. Default is `or`.\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     */\n    operator(operator) {\n        if (_.isNil(operator)) invalidOperatorParam(operator);\n\n        const operatorLower = operator.toLowerCase();\n        if (operatorLower !== 'and' && operatorLower !== 'or') {\n            invalidOperatorParam(operator);\n        }\n\n        this._queryOpts.operator = operatorLower;\n        return this;\n    }\n\n    /**\n     * Sets the `lenient` parameter which allows to ignore exceptions caused\n     * by data-type mismatches such as trying to query a numeric field with a\n     * text query string when set to `true`.\n     *\n     * @param {boolean} enable Defaules to `false`\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     */\n    lenient(enable) {\n        this._queryOpts.lenient = enable;\n        return this;\n    }\n\n    // phrase_slop is a synonym for slop.\n    // haven't added method for it..\n\n    /**\n     * Configures the `slop`(default is 0) for matching terms in any order.\n     * Transposed terms have a slop of 2.\n     *\n     * @param {number} slop A positive integer value, defaults is 0.\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     */\n    slop(slop) {\n        this._queryOpts.slop = slop;\n        return this;\n    }\n\n    /**\n     * Sets the `fuzziness` parameter which is interpreted as a Levenshtein Edit Distance —\n     * the number of one character changes that need to be made to one string to make it\n     * the same as another string.\n     *\n     * The `fuzziness` parameter cannot be used with the `phrase`, `phrase_prefix`\n     * or `cross_fields` type.\n     *\n     * @param {number|string} factor Can be specified either as a number, or the maximum\n     * number of edits, or as `AUTO` which generates an edit distance based on the length\n     * of the term.\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     */\n    fuzziness(factor) {\n        this._queryOpts.fuzziness = factor;\n        return this;\n    }\n\n    /**\n     * Sets the prefix length for a fuzzy prefix `MultiMatchQuery`\n     *\n     * @param {number} len\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     */\n    prefixLength(len) {\n        this._queryOpts.prefix_length = len;\n        return this;\n    }\n\n    /**\n     * Sets the max expansions for a fuzzy prefix `MultiMatchQuery`\n     *\n     * @param {number} limit\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     */\n    maxExpansions(limit) {\n        this._queryOpts.max_expansions = limit;\n        return this;\n    }\n\n    /**\n     * Sets the rewrite method. Valid values are:\n     * - `constant_score` - tries to pick the best constant-score rewrite\n     *  method based on term and document counts from the query.\n     *  Synonyms - `constant_score_auto`, `constant_score_filter`\n     *\n     * - `scoring_boolean` - translates each term into boolean should and\n     *  keeps the scores as computed by the query\n     *\n     * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n     *  are computed.\n     *\n     * - `constant_score_filter` - first creates a private Filter, by visiting\n     *  each term in sequence and marking all docs for that term\n     *\n     * - `top_terms_boost_N` - first translates each term into boolean should\n     *  and scores are only computed as the boost using the top N\n     *  scoring terms. Replace N with an integer value.\n     *\n     * - `top_terms_N` - first translates each term into boolean should\n     *  and keeps the scores as computed by the query. Only the top N\n     *  scoring terms are used. Replace N with an integer value.\n     *\n     * Default is `constant_score`.\n     *\n     * This is an advanced option, use with care.\n     *\n     * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n     * `constant_score_filter` (synonyms for `constant_score`) have been removed\n     * in elasticsearch 6.0.\n     *\n     * @param {string} method The rewrite method as a string.\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     * @throws {Error} If the given `rewrite` method is not valid.\n     */\n    rewrite(method) {\n        validateRewiteMethod(method, 'rewrite', ES_REF_URL);\n\n        this._queryOpts.rewrite = method;\n        return this;\n    }\n\n    /**\n     * Sets the fuzzy rewrite method. Valid values are:\n     * - `constant_score` - tries to pick the best constant-score rewrite\n     *  method based on term and document counts from the query.\n     *  Synonyms - `constant_score_auto`, `constant_score_filter`\n     *\n     * - `scoring_boolean` - translates each term into boolean should and\n     *  keeps the scores as computed by the query\n     *\n     * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n     *  are computed.\n     *\n     * - `constant_score_filter` - first creates a private Filter, by visiting\n     *  each term in sequence and marking all docs for that term\n     *\n     * - `top_terms_boost_N` - first translates each term into boolean should\n     *  and scores are only computed as the boost using the top N\n     *  scoring terms. Replace N with an integer value.\n     *\n     * - `top_terms_N` - first translates each term into boolean should\n     *  and keeps the scores as computed by the query. Only the top N\n     *  scoring terms are used. Replace N with an integer value.\n     *\n     * Default is `constant_score`.\n     *\n     * This is an advanced option, use with care.\n     *\n     * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n     * `constant_score_filter` (synonyms for `constant_score`) have been removed\n     * in elasticsearch 6.0.\n     *\n     * @param {string} method The rewrite method as a string.\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     * @throws {Error} If the given `fuzzy_rewrite` method is not valid.\n     */\n    fuzzyRewrite(method) {\n        validateRewiteMethod(method, 'fuzzy_rewrite', ES_REF_URL);\n\n        this._queryOpts.fuzzy_rewrite = method;\n        return this;\n    }\n\n    /**\n     * If the analyzer used removes all tokens in a query like a `stop` filter does,\n     * the default behavior is to match no documents at all. In order to change that\n     * the `zero_terms_query` option can be used, which accepts `none` (default) and `all`\n     * which corresponds to a `match_all` query.\n     *\n     * @param {string} behavior A no match action, `all` or `none`. Default is `none`.\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     */\n    zeroTermsQuery(behavior) {\n        if (_.isNil(behavior)) invalidBehaviorParam(behavior);\n\n        const behaviorLower = behavior.toLowerCase();\n        if (behaviorLower !== 'all' && behaviorLower !== 'none') {\n            invalidBehaviorParam(behavior);\n        }\n\n        this._queryOpts.zero_terms_query = behavior;\n        return this;\n    }\n\n    /**\n     * Allows specifying an absolute or relative document frequency where high frequency\n     * terms are moved into an optional subquery and are only scored if one of the\n     * low frequency (below the cutoff) terms in the case of an `or` operator or\n     * all of the low frequency terms in the case of an `and` operator match.\n     *\n     * @param {number} frequency It can either be relative to the total number of documents\n     * if in the range `[0..1)` or absolute if greater or equal to `1.0`.\n     * @returns {MultiMatchQuery} returns `this` so that calls can be chained.\n     */\n    cutoffFrequency(frequency) {\n        this._queryOpts.cutoff_frequency = frequency;\n        return this;\n    }\n}\n\nmodule.exports = MultiMatchQuery;\n"
  },
  {
    "path": "src/queries/full-text-queries/query-string-query-base.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { checkType, setDefault, invalidParam }\n} = require('../../core');\nconst FullTextQueryBase = require('./full-text-query-base');\n\nconst invalidOperatorParam = invalidParam('', 'operator', \"'AND' or 'OR'\");\n\n/**\n * The `QueryStringQueryBase` provides support for common options used across\n * full text query implementations `QueryStringQuery` and `SimpleQueryStringQuery`.\n * A query that uses a query parser in order to parse its content.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html)\n *\n * @param {string} queryType\n * @param {string} refUrl\n * @param {string=} queryString The actual query to be parsed.\n *\n * @extends FullTextQueryBase\n */\nclass QueryStringQueryBase extends FullTextQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(queryType, refUrl, queryString) {\n        super(queryType, queryString);\n\n        this._refUrl = refUrl;\n    }\n\n    /**\n     * Appends given field to the list of fields to search against.\n     * Fields can be specified with wildcards.\n     *\n     * Individual fields can be boosted with the caret (^) notation.\n     * Example - `\"subject^3\"`\n     *\n     * @example\n     * const qry = esb.queryStringQuery('this AND that OR thus')\n     *     .field('city.*')\n     *     .useDisMax(true);\n     *\n     * @example\n     * const qry = esb.simpleQueryStringQuery('foo bar -baz').field('content');\n     *\n     * @param {string} field One of the fields to be queried\n     * @returns {QueryStringQueryBase} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        setDefault(this._queryOpts, 'fields', []);\n\n        this._queryOpts.fields.push(field);\n        return this;\n    }\n\n    /**\n     * Appends given fields to the list of fields to search against.\n     * Fields can be specified with wildcards.\n     *\n     * Individual fields can be boosted with the caret (^) notation.\n     * Example - `[ \"subject^3\", \"message\" ]`\n     *\n     * @example\n     * const qry = esb.queryStringQuery('this AND that')\n     *     .fields(['content', 'name'])\n     *\n     * @example\n     * const qry = esb.simpleQueryStringQuery('foo bar baz')\n     *     .fields(['content', 'name.*^5']);\n     *\n     * @param {Array<string>} fields The fields to be queried\n     * @returns {QueryStringQueryBase} returns `this` so that calls can be chained.\n     */\n    fields(fields) {\n        checkType(fields, Array);\n        setDefault(this._queryOpts, 'fields', []);\n\n        this._queryOpts.fields = this._queryOpts.fields.concat(fields);\n        return this;\n    }\n\n    /**\n     * The default operator used if no explicit operator is specified.\n     * For example, with a default operator of `OR`, the query `capital of Hungary`\n     * is translated to `capital OR of OR Hungary`, and with default operator of AND,\n     * the same query is translated to `capital AND of AND Hungary`.\n     * The default value is OR.\n     *\n     * @param {string} operator Can be `AND`/`OR`. Default is `OR`.\n     * @returns {QueryStringQueryBase} returns `this` so that calls can be chained.\n     */\n    defaultOperator(operator) {\n        if (_.isNil(operator)) invalidOperatorParam(operator, this._refUrl);\n\n        const operatorUpper = operator.toUpperCase();\n        if (operatorUpper !== 'AND' && operatorUpper !== 'OR') {\n            invalidOperatorParam(operator, this._refUrl);\n        }\n\n        this._queryOpts.default_operator = operatorUpper;\n        return this;\n    }\n\n    /**\n     * By default, wildcards terms in a query string are not analyzed.\n     * By setting this value to `true`, a best effort will be made to analyze those as well.\n     *\n     * @param {boolean} enable\n     * @returns {QueryStringQueryBase} returns `this` so that calls can be chained.\n     */\n    analyzeWildcard(enable) {\n        this._queryOpts.analyze_wildcard = enable;\n        return this;\n    }\n\n    /**\n     * Sets the `lenient` parameter which allows to ignore exceptions caused\n     * by data-type mismatches such as trying to query a numeric field with a\n     * text query string when set to `true`.\n     *\n     * @param {boolean} enable Defaules to `false`\n     * @returns {QueryStringQueryBase} returns `this` so that calls can be chained.\n     */\n    lenient(enable) {\n        this._queryOpts.lenient = enable;\n        return this;\n    }\n\n    /**\n     * A suffix to append to fields for quoted parts of the query string.\n     * This allows to use a field that has a different analysis chain for exact matching.\n     *\n     * @param {string} suffix\n     * @returns {QueryStringQueryBase} returns `this` so that calls can be chained.\n     */\n    quoteFieldSuffix(suffix) {\n        this._queryOpts.quote_field_suffix = suffix;\n        return this;\n    }\n\n    /**\n     * Perform the query on all fields detected in the mapping that can be queried.\n     * Will be used by default when the `_all` field is disabled and\n     * no `default_field` is specified (either in the index settings or\n     * in the request body) and no `fields` are specified.\n     * @param {boolean} enable\n     * @returns {QueryStringQueryBase} returns `this` so that calls can be chained.\n     */\n    allFields(enable) {\n        this._queryOpts.all_fields = enable;\n        return this;\n    }\n}\n\nmodule.exports = QueryStringQueryBase;\n"
  },
  {
    "path": "src/queries/full-text-queries/query-string-query.js",
    "content": "'use strict';\n\nconst QueryStringQueryBase = require('./query-string-query-base');\nconst { validateRewiteMethod } = require('../helper');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html';\n\n/**\n * A query that uses a query parser in order to parse its content.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html)\n *\n * @example\n * const qry = esb.queryStringQuery('this AND that OR thus')\n *     .defaultField('content');\n *\n * @param {string=} queryString The actual query to be parsed.\n *\n * @extends QueryStringQueryBase\n */\nclass QueryStringQuery extends QueryStringQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(queryString) {\n        super('query_string', ES_REF_URL, queryString);\n    }\n\n    /**\n     * The default field for query terms if no prefix field is specified.\n     * Defaults to the `index.query.default_field` index settings, which\n     * in turn defaults to `_all`.\n     *\n     * @param {string} field\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    defaultField(field) {\n        this._queryOpts.default_field = field;\n        return this;\n    }\n\n    /**\n     * When set, `*` or `?` are allowed as the first character. Defaults to `true`.\n     *\n     * @param {boolean} enable\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    allowLeadingWildcard(enable) {\n        this._queryOpts.allow_leading_wildcard = enable;\n        return this;\n    }\n\n    /**\n     * Set to true to enable position increments in result queries. Defaults to true.\n     *\n     * @param {boolean} enable\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    enablePositionIncrements(enable) {\n        this._queryOpts.enable_position_increments = enable;\n        return this;\n    }\n\n    /**\n     * Controls the number of terms fuzzy queries will expand to. Defaults to `50`.\n     *\n     * @param {number} limit\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    fuzzyMaxExpansions(limit) {\n        this._queryOpts.fuzzy_max_expansions = limit;\n        return this;\n    }\n\n    /**\n     * Sets the `fuzziness` parameter which is interpreted as a Levenshtein Edit Distance —\n     * the number of one character changes that need to be made to one string to make it\n     * the same as another string. Defaults to `AUTO`.\n     *\n     * @param {number|string} factor Can be specified either as a number, or the maximum\n     * number of edits, or as `AUTO` which generates an edit distance based on the length\n     * of the term. Defaults to `AUTO`.\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    fuzziness(factor) {\n        this._queryOpts.fuzziness = factor;\n        return this;\n    }\n\n    /**\n     * Set the prefix length for fuzzy queries. Default is `0`.\n     *\n     * @param {number} len\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    fuzzyPrefixLength(len) {\n        this._queryOpts.fuzzy_prefix_length = len;\n        return this;\n    }\n\n    /**\n     * Sets the rewrite method. Valid values are:\n     * - `constant_score` - tries to pick the best constant-score rewrite\n     *  method based on term and document counts from the query.\n     *  Synonyms - `constant_score_auto`, `constant_score_filter`\n     *\n     * - `scoring_boolean` - translates each term into boolean should and\n     *  keeps the scores as computed by the query\n     *\n     * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n     *  are computed.\n     *\n     * - `constant_score_filter` - first creates a private Filter, by visiting\n     *  each term in sequence and marking all docs for that term\n     *\n     * - `top_terms_boost_N` - first translates each term into boolean should\n     *  and scores are only computed as the boost using the top N\n     *  scoring terms. Replace N with an integer value.\n     *\n     * - `top_terms_N` - first translates each term into boolean should\n     *  and keeps the scores as computed by the query. Only the top N\n     *  scoring terms are used. Replace N with an integer value.\n     *\n     * Default is `constant_score`.\n     *\n     * This is an advanced option, use with care.\n     *\n     * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n     * `constant_score_filter` (synonyms for `constant_score`) have been removed\n     * in elasticsearch 6.0.\n     *\n     * @param {string} method The rewrite method as a string.\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     * @throws {Error} If the given `rewrite` method is not valid.\n     */\n    rewrite(method) {\n        validateRewiteMethod(method, 'rewrite', ES_REF_URL);\n\n        this._queryOpts.rewrite = method;\n        return this;\n    }\n\n    /**\n     * Sets the fuzzy rewrite method. Valid values are:\n     * - `constant_score` - tries to pick the best constant-score rewrite\n     *  method based on term and document counts from the query.\n     *  Synonyms - `constant_score_auto`, `constant_score_filter`\n     *\n     * - `scoring_boolean` - translates each term into boolean should and\n     *  keeps the scores as computed by the query\n     *\n     * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n     *  are computed.\n     *\n     * - `constant_score_filter` - first creates a private Filter, by visiting\n     *  each term in sequence and marking all docs for that term\n     *\n     * - `top_terms_boost_N` - first translates each term into boolean should\n     *  and scores are only computed as the boost using the top N\n     *  scoring terms. Replace N with an integer value.\n     *\n     * - `top_terms_N` - first translates each term into boolean should\n     *  and keeps the scores as computed by the query. Only the top N\n     *  scoring terms are used. Replace N with an integer value.\n     *\n     * Default is `constant_score`.\n     *\n     * This is an advanced option, use with care.\n     *\n     * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n     * `constant_score_filter` (synonyms for `constant_score`) have been removed\n     * in elasticsearch 6.0.\n     *\n     * @param {string} method The rewrite method as a string.\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     * @throws {Error} If the given `fuzzy_rewrite` method is not valid.\n     */\n    fuzzyRewrite(method) {\n        validateRewiteMethod(method, 'fuzzy_rewrite', ES_REF_URL);\n\n        this._queryOpts.fuzzy_rewrite = method;\n        return this;\n    }\n\n    /**\n     * Sets the default slop for phrases. If zero, then exact phrase matches are required.\n     * Default value is 0.\n     *\n     * @param {number} slop A positive integer value, defaults is 0.\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    phraseSlop(slop) {\n        this._queryOpts.phrase_slop = slop;\n        return this;\n    }\n\n    /**\n     * Auto generate phrase queries. Defaults to `false`.\n     *\n     * Note: This parameter has been removed in elasticsearch 6.0. If provided,\n     * it will be ignored and issue a deprecation warning.\n     *\n     * @param {boolean} enable\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    autoGeneratePhraseQueries(enable) {\n        this._queryOpts.auto_generate_phrase_queries = enable;\n        return this;\n    }\n\n    /**\n     * Limit on how many automaton states regexp queries are allowed to create.\n     * This protects against too-difficult (e.g. exponentially hard) regexps.\n     * Defaults to 10000.\n     *\n     * @param {number} limit\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    maxDeterminizedStates(limit) {\n        this._queryOpts.max_determinized_states = limit;\n        return this;\n    }\n\n    /**\n     * Time Zone to be applied to any range query related to dates.\n     *\n     * @param {string} zone\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    timeZone(zone) {\n        this._queryOpts.time_zone = zone;\n        return this;\n    }\n\n    /**\n     * Whether query text should be split on whitespace prior to analysis.\n     * Instead the queryparser would parse around only real operators.\n     * Default is `false`. It is not allowed to set this option to `false`\n     * if `auto_generate_phrase_queries` is already set to `true`.\n     *\n     * Note: This parameter has been removed in elasticsearch 6.0. If provided,\n     * it will be ignored and issue a deprecation warning. The `query_string`\n     * query now splits on operator only.\n     *\n     * @param {string} enable\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    splitOnWhitespace(enable) {\n        this._queryOpts.split_on_whitespace = enable;\n        return this;\n    }\n\n    /**\n     * Should the queries be combined using `dis_max` (set it to `true`),\n     * or a bool query (set it to `false`). Defaults to `true`.\n     *\n     * Note: This parameter has been removed in elasticsearch 6.0. If provided,\n     * it will be ignored and issue a deprecation warning. The `tie_breaker`\n     * parameter must be used instead.\n     *\n     * @example\n     * const qry = esb.queryStringQuery('this AND that OR thus')\n     *     .fields(['content', 'name^5'])\n     *     .useDisMax(true);\n     *\n     * @param {boolean} enable\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    useDisMax(enable) {\n        this._queryOpts.use_dis_max = enable;\n        return this;\n    }\n\n    /**\n     * When using `dis_max`, the disjunction max tie breaker. Defaults to `0`.\n     *\n     * @param {number} factor\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    tieBreaker(factor) {\n        this._queryOpts.tie_breaker = factor;\n        return this;\n    }\n\n    /**\n     * Sets the quote analyzer name used to analyze the `query`\n     * when in quoted text.\n     *\n     * @param {string} analyzer A valid analyzer name.\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    quoteAnalyzer(analyzer) {\n        this._queryOpts.quote_analyzer = analyzer;\n        return this;\n    }\n\n    /**\n     * If they query string should be escaped or not.\n     *\n     * @param {boolean} enable\n     * @returns {QueryStringQuery} returns `this` so that calls can be chained.\n     */\n    escape(enable) {\n        this._queryOpts.escape = enable;\n        return this;\n    }\n}\n\nmodule.exports = QueryStringQuery;\n"
  },
  {
    "path": "src/queries/full-text-queries/simple-query-string-query.js",
    "content": "'use strict';\n\nconst QueryStringQueryBase = require('./query-string-query-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html';\n\n/**\n * A query that uses the `SimpleQueryParser` to parse its context.\n * Unlike the regular `query_string` query, the `simple_query_string` query\n * will never throw an exception, and discards invalid parts of the query.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html)\n *\n * @example\n * const qry = esb.simpleQueryStringQuery(\n *     '\"fried eggs\" +(eggplant | potato) -frittata'\n * )\n *     .analyzer('snowball')\n *     .fields(['body^5', '_all'])\n *     .defaultOperator('and');\n *\n * @param {string=} queryString The query string\n *\n * @extends QueryStringQueryBase\n */\nclass SimpleQueryStringQuery extends QueryStringQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(queryString) {\n        super('simple_query_string', ES_REF_URL, queryString);\n    }\n\n    /**\n     * `simple_query_string` support multiple flags to specify which parsing features\n     * should be enabled. It is specified as a `|`-delimited string.\n     *\n     * @example\n     * const qry = esb.simpleQueryStringQuery('foo | bar + baz*')\n     *     .flags('OR|AND|PREFIX');\n     *\n     * @param {string} flags `|` delimited string. The available flags are: `ALL`, `NONE`,\n     * `AND`, `OR`, `NOT`, `PREFIX`, `PHRASE`, `PRECEDENCE`, `ESCAPE`, `WHITESPACE`,\n     * `FUZZY`, `NEAR`, and `SLOP`.\n     * @returns {SimpleQueryStringQuery} returns `this` so that calls can be chained.\n     */\n    flags(flags) {\n        this._queryOpts.flags = flags;\n        return this;\n    }\n}\n\nmodule.exports = SimpleQueryStringQuery;\n"
  },
  {
    "path": "src/queries/geo-queries/geo-bounding-box-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    GeoPoint,\n    util: { checkType, invalidParam }\n} = require('../../core');\n\nconst GeoQueryBase = require('./geo-query-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-geo-bounding-box-query.html';\n\nconst invalidTypeParam = invalidParam(\n    ES_REF_URL,\n    'type',\n    \"'memory' or 'indexed'\"\n);\n\n/**\n * A query allowing to filter hits based on a point location using a bounding box.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-geo-bounding-box-query.html)\n *\n * @example\n * // Format of point in Geohash\n * const qry = esb.geoBoundingBoxQuery('pin.location')\n *     .topLeft(esb.geoPoint().string('dr5r9ydj2y73'))\n *     .bottomRight(esb.geoPoint().string('drj7teegpus6'));\n *\n * @example\n * // Format of point with lat lon as properties\n * const qry = esb.geoBoundingBoxQuery()\n *     .field('pin.location')\n *     .topLeft(esb.geoPoint()\n *         .lat(40.73)\n *         .lon(-74.1))\n *     .bottomRight(esb.geoPoint()\n *         .lat(40.10)\n *         .lon(-71.12));\n *\n * @example\n * // Set bounding box values separately\n * const qry = esb.geoBoundingBoxQuery('pin.location')\n *     .top(40.73)\n *     .left(-74.1)\n *     .bottom(40.01)\n *     .right(-71.12);\n *\n * @param {string=} field\n *\n * @extends GeoQueryBase\n */\nclass GeoBoundingBoxQuery extends GeoQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field) {\n        super('geo_bounding_box', field);\n    }\n\n    /**\n     * Sets the top left coordinate for the Geo bounding box filter for\n     * querying documents\n     *\n     * @param {GeoPoint} point A valid `GeoPoint`\n     * @returns {GeoBoundingBoxQuery} returns `this` so that calls can be chained.\n     */\n    topLeft(point) {\n        checkType(point, GeoPoint);\n\n        this._fieldOpts.top_left = point;\n        return this;\n    }\n\n    /**\n     * Sets the bottom right coordinate for the Geo bounding box filter for\n     * querying documents\n     *\n     * @param {GeoPoint} point A valid `GeoPoint`\n     * @returns {GeoBoundingBoxQuery} returns `this` so that calls can be chained.\n     */\n    bottomRight(point) {\n        checkType(point, GeoPoint);\n\n        this._fieldOpts.bottom_right = point;\n        return this;\n    }\n\n    /**\n     * Sets the top right coordinate for the Geo bounding box filter for\n     * querying documents\n     *\n     * @param {GeoPoint} point A valid `GeoPoint`\n     * @returns {GeoBoundingBoxQuery} returns `this` so that calls can be chained.\n     */\n    topRight(point) {\n        checkType(point, GeoPoint);\n\n        this._fieldOpts.top_right = point;\n        return this;\n    }\n\n    /**\n     * Sets the bottom left coordinate for the Geo bounding box filter for\n     * querying documents\n     *\n     * @param {GeoPoint} point A valid `GeoPoint`\n     * @returns {GeoBoundingBoxQuery} returns `this` so that calls can be chained.\n     */\n    bottomLeft(point) {\n        checkType(point, GeoPoint);\n\n        this._fieldOpts.bottom_left = point;\n        return this;\n    }\n\n    /**\n     * Sets value for top of the bounding box.\n     *\n     * @param {number} val\n     * @returns {GeoBoundingBoxQuery} returns `this` so that calls can be chained.\n     */\n    top(val) {\n        this._fieldOpts.top = val;\n        return this;\n    }\n\n    /**\n     * Sets value for left of the bounding box.\n     *\n     * @param {number} val\n     * @returns {GeoBoundingBoxQuery} returns `this` so that calls can be chained.\n     */\n    left(val) {\n        this._fieldOpts.left = val;\n        return this;\n    }\n\n    /**\n     * Sets value for bottom of the bounding box.\n     *\n     * @param {number} val\n     * @returns {GeoBoundingBoxQuery} returns `this` so that calls can be chained.\n     */\n    bottom(val) {\n        this._fieldOpts.bottom = val;\n        return this;\n    }\n\n    /**\n     * Sets value for right of the bounding box.\n     *\n     * @param {number} val\n     * @returns {GeoBoundingBoxQuery} returns `this` so that calls can be chained.\n     */\n    right(val) {\n        this._fieldOpts.right = val;\n        return this;\n    }\n\n    /**\n     * Sets the type of execution for the bounding box query.\n     * The type of the bounding box execution by default is set to memory,\n     * which means in memory checks if the doc falls within the bounding\n     * box range. In some cases, an indexed option will perform faster\n     * (but note that the geo_point type must have lat and lon indexed in this case)\n     *\n     * @example\n     *\n     * const geoQry = esb.geoBoundingBoxQuery()\n     *     .field('pin.location')\n     *     .topLeft(esb.geoPoint()\n     *         .lat(40.73)\n     *         .lon(-74.1))\n     *     .bottomRight(esb.geoPoint()\n     *         .lat(40.10)\n     *         .lon(-71.12))\n     *     .type('indexed');\n     *\n     * @param {string} type Can either `memory` or `indexed`\n     * @returns {GeoBoundingBoxQuery} returns `this` so that calls can be chained.\n     */\n    type(type) {\n        if (_.isNil(type)) invalidTypeParam(type);\n\n        const typeLower = type.toLowerCase();\n        if (typeLower !== 'memory' && typeLower !== 'indexed') {\n            invalidTypeParam(type);\n        }\n\n        this._queryOpts.type = typeLower;\n        return this;\n    }\n}\n\nmodule.exports = GeoBoundingBoxQuery;\n"
  },
  {
    "path": "src/queries/geo-queries/geo-distance-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    GeoPoint,\n    util: { checkType, invalidParam }\n} = require('../../core');\n\nconst GeoQueryBase = require('./geo-query-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-geo-distance-query.html';\n\nconst invalidDistanceTypeParam = invalidParam(\n    ES_REF_URL,\n    'distance_type',\n    \"'plane' or 'arc'\"\n);\n\n/**\n * Filters documents that include only hits that exists within a specific distance from a geo point.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-geo-distance-query.html)\n *\n * @example\n * const qry = esb.geoDistanceQuery('pin.location', esb.geoPoint().lat(40).lon(-70))\n *     .distance('12km');\n *\n * const qry = esb.geoDistanceQuery()\n *     .field('pin.location')\n *     .distance('200km')\n *     .geoPoint(esb.geoPoint().lat(40).lon(-70));\n *\n * @param {string=} field\n * @param {GeoPoint=} point Geo point used to measure and filter documents based on distance from it.\n *\n * @extends GeoQueryBase\n */\nclass GeoDistanceQuery extends GeoQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, point) {\n        super('geo_distance', field);\n\n        if (!_.isNil(point)) this.geoPoint(point);\n    }\n\n    /**\n     * Sets the radius of the circle centred on the specified location. Points which\n     * fall into this circle are considered to be matches. The distance can be specified\n     * in various units.\n     *\n     * @param {string|number} distance Radius of circle centred on specified location.\n     * @returns {GeoDistanceQuery} returns `this` so that calls can be chained.\n     */\n    distance(distance) {\n        this._queryOpts.distance = distance;\n        return this;\n    }\n\n    /**\n     * Sets the distance calculation mode, `arc` or `plane`.\n     * The `arc` calculation is the more accurate.\n     * The `plane` is the faster but least accurate.\n     *\n     * @param {string} type\n     * @returns {GeoDistanceQuery} returns `this` so that calls can be chained\n     * @throws {Error} If `type` is neither `plane` nor `arc`.\n     */\n    distanceType(type) {\n        if (_.isNil(type)) invalidDistanceTypeParam(type);\n\n        const typeLower = type.toLowerCase();\n        if (typeLower !== 'plane' && typeLower !== 'arc')\n            invalidDistanceTypeParam(type);\n\n        this._queryOpts.distance_type = typeLower;\n        return this;\n    }\n\n    /**\n     * Sets the point to filter documents based on the distance from it.\n     *\n     * @param {GeoPoint} point Geo point used to measure and filter documents based on distance from it.\n     * @returns {GeoDistanceQuery} returns `this` so that calls can be chained\n     * @throws {TypeError} If parameter `point` is not an instance of `GeoPoint`\n     */\n    geoPoint(point) {\n        checkType(point, GeoPoint);\n\n        this._fieldOpts = point;\n        return this;\n    }\n}\n\nmodule.exports = GeoDistanceQuery;\n"
  },
  {
    "path": "src/queries/geo-queries/geo-polygon-query.js",
    "content": "'use strict';\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst GeoQueryBase = require('./geo-query-base');\n\n/**\n * A query allowing to include hits that only fall within a polygon of points.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-geo-polygon-query.html)\n *\n * @example\n * const geoQry = esb.geoPolygonQuery('person.location')\n *     .points([\n *         {\"lat\" : 40, \"lon\" : -70},\n *         {\"lat\" : 30, \"lon\" : -80},\n *         {\"lat\" : 20, \"lon\" : -90}\n *     ]);\n *\n * @param {string=} field\n *\n * @extends GeoQueryBase\n */\nclass GeoPolygonQuery extends GeoQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field) {\n        super('geo_polygon', field);\n    }\n\n    /**\n     * Sets the points which form the polygon.\n     * Points can be instances of `GeoPoint`, Object with `lat`, `lon` keys,\n     * `GeoJSON` array representation or string(`geohash`/`lat, lon`)\n     *\n     * @example\n     * // Format in `[lon, lat]`\n     * const qry = esb.geoPolygonQuery('person.location').points([\n     *     [-70, 40],\n     *     [-80, 30],\n     *     [-90, 20]\n     * ]);\n     *\n     * @example\n     * // Format in lat,lon\n     * const qry = esb.geoPolygonQuery('person.location').points([\n     *     '40, -70',\n     *     '30, -80',\n     *     '20, -90'\n     * ]);\n     *\n     * @example\n     * // Geohash\n     * const qry = esb.geoPolygonQuery('person.location').points([\n     *     'drn5x1g8cu2y',\n     *     '30, -80',\n     *     '20, -90'\n     * ]);\n     *\n     * @param {Array<*>} points\n     * @returns {GeoPolygonQuery} returns `this` so that calls can be chained\n     * @throws {TypeError} If `points` parameter is not an instance of `Array`.\n     */\n    points(points) {\n        checkType(points, Array);\n\n        this._fieldOpts.points = points;\n        return this;\n    }\n}\n\nmodule.exports = GeoPolygonQuery;\n"
  },
  {
    "path": "src/queries/geo-queries/geo-query-base.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Query,\n    util: { invalidParam, recursiveToJSON }\n} = require('../../core');\n\nconst invalidValidationMethod = invalidParam(\n    '',\n    'validation_method',\n    \"'IGNORE_MALFORMED', 'COERCE' or 'STRICT'\"\n);\n\n/**\n * The `GeoQueryBase` provides support for common options used across\n * various geo query implementations.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} queryType\n * @param {string=} field\n *\n * @extends Query\n */\nclass GeoQueryBase extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(queryType, field) {\n        super(queryType);\n\n        this._field = null;\n        this._fieldOpts = {};\n\n        if (!_.isNil(field)) this._field = field;\n    }\n\n    /**\n     * Sets the field to run the geo query on.\n     *\n     * @param {string} field\n     * @returns {GeoQueryBase} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._field = field;\n        return this;\n    }\n\n    /**\n     * Sets the `validation_method` parameter. Can be set to `IGNORE_MALFORMED` to accept\n     * geo points with invalid latitude or longitude, `COERCE` to try and infer correct latitude\n     * or longitude, or `STRICT` (default is `STRICT`).\n     *\n     * Note: The `ignore_malformed` and `coerce` parameters have been removed\n     * from `geo_bounding_box`, `geo_polygon`, and `geo_distance` queries in\n     * elasticsearch 6.0.\n     *\n     * @param {string} method One of `IGNORE_MALFORMED`, `COERCE` or `STRICT`(default)\n     * @returns {GeoQueryBase} returns `this` so that calls can be chained.\n     * @throws {Error} If `method` parameter is not one of `IGNORE_MALFORMED`, `COERCE` or `STRICT`\n     */\n    validationMethod(method) {\n        if (_.isNil(method)) invalidValidationMethod(method);\n\n        const methodUpper = method.toUpperCase();\n        if (\n            methodUpper !== 'IGNORE_MALFORMED' &&\n            methodUpper !== 'COERCE' &&\n            methodUpper !== 'STRICT'\n        ) {\n            invalidValidationMethod(method);\n        }\n\n        this._queryOpts.validation_method = methodUpper;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation of the geo query\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        return recursiveToJSON({\n            [this.queryType]: Object.assign(\n                { [this._field]: this._fieldOpts },\n                this._queryOpts\n            )\n        });\n    }\n}\n\nmodule.exports = GeoQueryBase;\n"
  },
  {
    "path": "src/queries/geo-queries/geo-shape-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    GeoShape,\n    IndexedShape,\n    util: { checkType, invalidParam },\n    consts: { GEO_RELATION_SET }\n} = require('../../core');\n\nconst GeoQueryBase = require('./geo-query-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-geo-shape-query.html';\n\nconst invalidRelationParam = invalidParam(\n    ES_REF_URL,\n    'relation',\n    GEO_RELATION_SET\n);\n\n/**\n * Filter documents indexed using the `geo_shape` type. Requires\n * the `geo_shape` Mapping.\n *\n * The `geo_shape` query uses the same grid square representation as\n * the `geo_shape` mapping to find documents that have a shape that\n * intersects with the query shape. It will also use the same PrefixTree\n * configuration as defined for the field mapping.\n *\n * The query supports two ways of defining the query shape, either by\n * providing a whole shape definition, or by referencing the name of\n * a shape pre-indexed in another index.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-geo-shape-query.html)\n *\n * @example\n * const geoQry = esb.geoShapeQuery('location')\n *     .shape(esb.geoShape()\n *         .type('envelope')\n *         .coordinates([[13.0, 53.0], [14.0, 52.0]]))\n *     .relation('within');\n *\n * @example\n * // Pre-indexed shape\n * const geoQry = esb.geoShapeQuery()\n *     .field('location')\n *     .indexedShape(esb.indexedShape()\n *         .id('DEU')\n *         .type('countries')\n *         .index('shapes')\n *         .path('location'))\n *\n * @param {string=} field\n *\n * @extends GeoQueryBase\n */\nclass GeoShapeQuery extends GeoQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field) {\n        super('geo_shape', field);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on GeoShapeQuery\n     */\n    validationMethod() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('validationMethod is not supported in GeoShapeQuery');\n    }\n\n    /**\n     * Sets the shape definition for the geo query.\n     *\n     * @param {GeoShape} shape\n     * @returns {GeoShapeQuery} returns `this` so that calls can be chained.\n     * @throws {TypeError} If given `shape` is not an instance of `GeoShape`\n     */\n    shape(shape) {\n        checkType(shape, GeoShape);\n\n        this._fieldOpts.shape = shape;\n        return this;\n    }\n\n    /**\n     * Sets the reference name of a shape pre-indexed in another index.\n     *\n     * @param {IndexedShape} shape\n     * @returns {GeoShapeQuery} returns `this` so that calls can be chained.\n     * @throws {TypeError} If given `shape` is not an instance of `IndexedShape`\n     */\n    indexedShape(shape) {\n        checkType(shape, IndexedShape);\n\n        this._fieldOpts.indexed_shape = shape;\n        return this;\n    }\n\n    /**\n     * Sets the relationship between Query and indexed data\n     * that will be used to determine if a Document should be matched or not.\n     *\n     * @param {string} relation Can be one of `WITHIN`, `CONTAINS`, `DISJOINT`\n     * or `INTERSECTS`(default)\n     * @returns {GeoShapeQuery} returns `this` so that calls can be chained\n     */\n    relation(relation) {\n        if (_.isNil(relation)) invalidRelationParam(relation);\n\n        const relationUpper = relation.toUpperCase();\n        if (!GEO_RELATION_SET.has(relationUpper)) {\n            invalidRelationParam(relation);\n        }\n\n        this._fieldOpts.relation = relationUpper;\n        return this;\n    }\n\n    /**\n     * When set to `true` will ignore an unmapped `path` and will not match any\n     * documents for this query. When set to `false` (the default value) the query\n     * will throw an exception if the path is not mapped.\n     *\n     * @param {boolean} enable `true` or `false`, `false` by default.\n     * @returns {GeoShapeQuery} returns `this` so that calls can be chained.\n     */\n    ignoreUnmapped(enable) {\n        this._queryOpts.ignore_unmapped = enable;\n        return this;\n    }\n}\n\nmodule.exports = GeoShapeQuery;\n"
  },
  {
    "path": "src/queries/geo-queries/index.js",
    "content": "'use strict';\n\nexports.GeoQueryBase = require('./geo-query-base');\n\nexports.GeoShapeQuery = require('./geo-shape-query');\nexports.GeoBoundingBoxQuery = require('./geo-bounding-box-query');\nexports.GeoDistanceQuery = require('./geo-distance-query');\nexports.GeoPolygonQuery = require('./geo-polygon-query');\n"
  },
  {
    "path": "src/queries/helper.js",
    "content": "'use strict';\n\nconst { inspect } = require('../core/inspect');\n\nconst {\n    util: { firstDigitPos },\n    consts: { REWRITE_METHOD_SET }\n} = require('../core');\n\n/**\n * Validate the rewrite method.\n *\n * @private\n * @param {string} method\n * @param {string} paramName\n * @param {string} refUrl\n * @throws {Error} If the given rewrite method is not valid.\n */\nexports.validateRewiteMethod = function validateRewiteMethod(\n    method,\n    paramName,\n    refUrl\n) {\n    // NOTE: This does not check for lower case comparison.\n    if (!REWRITE_METHOD_SET.has(method)) {\n        const rewriteMethodName = `${method.substring(\n            0,\n            firstDigitPos(method)\n        )}N`;\n        if (!REWRITE_METHOD_SET.has(rewriteMethodName)) {\n            console.log(`See ${refUrl}`);\n            console.warn(`Got '${paramName}' - ${method}`);\n            throw new Error(\n                `The '${paramName}' parameter should belong to ${inspect(\n                    REWRITE_METHOD_SET\n                )}`\n            );\n        }\n    }\n};\n"
  },
  {
    "path": "src/queries/index.js",
    "content": "'use strict';\n\nexports.MatchAllQuery = require('./match-all-query');\nexports.MatchNoneQuery = require('./match-none-query');\n\nexports.fullTextQueries = require('./full-text-queries');\n\nexports.termLevelQueries = require('./term-level-queries');\n\nexports.compoundQueries = require('./compound-queries');\n\nexports.joiningQueries = require('./joining-queries');\n\nexports.geoQueries = require('./geo-queries');\n\nexports.specializedQueries = require('./specialized-queries');\n\nexports.spanQueries = require('./span-queries');\n\nexports.vectorQueries = require('./vector-queries');\n"
  },
  {
    "path": "src/queries/joining-queries/has-child-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst JoiningQueryBase = require('./joining-query-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-has-child-query.html';\n\n/**\n * The `has_child` filter accepts a query and the child type to run against, and\n * results in parent documents that have child docs matching the query.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-has-child-query.html)\n *\n * @example\n * // Scoring support\n * const qry = esb.hasChildQuery(\n *     esb.termQuery('tag', 'something'),\n *     'blog_tag'\n * ).scoreMode('min');\n *\n * @example\n * // Sort by child documents' `click_count` field\n * const qry = esb.hasChildQuery()\n *     .query(\n *         esb.functionScoreQuery().function(\n *             esb.scriptScoreFunction(\"_score * doc['click_count'].value\")\n *         )\n *     )\n *     .type('blog_tag')\n *     .scoreMode('max');\n *\n * @param {Query=} qry A valid `Query` object\n * @param {string=} type The child type\n *\n * @extends JoiningQueryBase\n */\nclass HasChildQuery extends JoiningQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(qry, type) {\n        super('has_child', ES_REF_URL, qry);\n\n        if (!_.isNil(type)) this._queryOpts.type = type;\n    }\n\n    /**\n     * Sets the child document type to search against.\n     * Alias for method `childType`.\n     *\n     * @param {string} type A valid doc type name\n     * @returns {HasChildQuery} returns `this` so that calls can be chained.\n     */\n    type(type) {\n        this._queryOpts.type = type;\n        return this;\n    }\n\n    /**\n     * Sets the child document type to search against\n     *\n     * @param {string} type A valid doc type name\n     * @returns {HasChildQuery} returns `this` so that calls can be chained.\n     */\n    childType(type) {\n        console.warn(\n            '[HasChildQuery] Field `child_type` is deprecated. Use `type` instead.'\n        );\n        return this.type(type);\n    }\n\n    /**\n     * Specify the minimum number of children are required to match\n     * for the parent doc to be considered a match\n     *\n     * @example\n     * const qry = esb.hasChildQuery(esb.termQuery('tag', 'something'), 'blog_tag')\n     *     .minChildren(2)\n     *     .maxChildren(10)\n     *     .scoreMode('min');\n     *\n     * @param {number} limit A positive `integer` value.\n     * @returns {HasChildQuery} returns `this` so that calls can be chained.\n     */\n    minChildren(limit) {\n        this._queryOpts.min_children = limit;\n        return this;\n    }\n\n    /**\n     * Specify the maximum number of children are required to match\n     * for the parent doc to be considered a match\n     *\n     * @example\n     * const qry = esb.hasChildQuery(esb.termQuery('tag', 'something'), 'blog_tag')\n     *     .minChildren(2)\n     *     .maxChildren(10)\n     *     .scoreMode('min');\n     *\n     * @param {number} limit A positive `integer` value.\n     * @returns {HasChildQuery} returns `this` so that calls can be chained.\n     */\n    maxChildren(limit) {\n        this._queryOpts.max_children = limit;\n        return this;\n    }\n}\n\nmodule.exports = HasChildQuery;\n"
  },
  {
    "path": "src/queries/joining-queries/has-parent-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst JoiningQueryBase = require('./joining-query-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-has-parent-query.html';\n\n/**\n * The `has_parent` query accepts a query and a parent type. The query is\n * executed in the parent document space, which is specified by the parent\n * type. This query returns child documents which associated parents have\n * matched.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-has-parent-query.html)\n *\n * @example\n * const qry = esb.hasParentQuery(esb.termQuery('tag', 'something'), 'blog');\n *\n * @example\n * // Sorting tags by parent documents' `view_count` field\n * const qry = esb.hasParentQuery()\n *     .parentType('blog')\n *     .score(true)\n *     .query(\n *         esb.functionScoreQuery().function(\n *             esb.scriptScoreFunction(\"_score * doc['view_count'].value\")\n *         )\n *     );\n *\n * @param {Query=} qry A valid `Query` object\n * @param {string=} type The parent type\n *\n * @extends JoiningQueryBase\n */\nclass HasParentQuery extends JoiningQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(qry, type) {\n        super('has_parent', ES_REF_URL, qry);\n\n        if (!_.isNil(type)) this._queryOpts.parent_type = type;\n    }\n\n    /**\n     * @throws {Error} `score_mode` is deprecated. Use `score` instead.\n     * @override\n     */\n    scoreMode() {\n        console.log('`score_mode` is deprecated. Use `score` instead');\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('scoreMode is not supported in HasParentQuery');\n    }\n\n    /**\n     * Sets the child document type to search against\n     * Alias for method `parentType`\n     *\n     * @param {string} type A valid doc type name\n     * @returns {HasParentQuery} returns `this` so that calls can be chained.\n     */\n    type(type) {\n        return this.parentType(type);\n    }\n\n    /**\n     * Sets the child document type to search against\n     *\n     * @param {string} type A valid doc type name\n     * @returns {HasParentQuery} returns `this` so that calls can be chained.\n     */\n    parentType(type) {\n        this._queryOpts.parent_type = type;\n        return this;\n    }\n\n    /**\n     * By default, scoring is `false` which ignores the score from the parent document.\n     * The score is in this case equal to the boost on the `has_parent` query (Defaults to 1).\n     * If the score is set to `true`, then the score of the matching parent document is\n     * aggregated into the child documents belonging to the matching parent document.\n     *\n     * @example\n     * const qry = esb.hasParentQuery(\n     *     esb.termQuery('tag', 'something'),\n     *     'blog'\n     * ).score(true);\n     *\n     * @param {boolean} enable `true` to enable scoring, `false` to disable.\n     * `false` by default.\n     * @returns {HasParentQuery} returns `this` so that calls can be chained.\n     */\n    score(enable) {\n        this._queryOpts.score = enable;\n        return this;\n    }\n}\n\nmodule.exports = HasParentQuery;\n"
  },
  {
    "path": "src/queries/joining-queries/index.js",
    "content": "'use strict';\n\nexports.JoiningQueryBase = require('./joining-query-base');\n\nexports.NestedQuery = require('./nested-query');\nexports.HasChildQuery = require('./has-child-query');\nexports.HasParentQuery = require('./has-parent-query');\nexports.ParentIdQuery = require('./parent-id-query');\n"
  },
  {
    "path": "src/queries/joining-queries/joining-query-base.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Query,\n    InnerHits,\n    util: { checkType, invalidParam },\n    consts: { NESTED_SCORE_MODE_SET }\n} = require('../../core');\n\nconst invalidScoreModeParam = invalidParam(\n    '',\n    'score_mode',\n    NESTED_SCORE_MODE_SET\n);\n/**\n * The `JoiningQueryBase` class provides support for common options used across\n * various joining query implementations.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} queryType\n * @param {string} refUrl\n * @param {Query=} qry A valid `Query` object\n *\n * @extends Query\n */\nclass JoiningQueryBase extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(queryType, refUrl, qry) {\n        super(queryType);\n        this.refUrl = refUrl;\n\n        if (!_.isNil(qry)) this.query(qry);\n    }\n\n    /**\n     * Sets the nested query to be executed.\n     *\n     * @param {Query} qry A valid `Query` object\n     * @returns {JoiningQueryBase} returns `this` so that calls can be chained.\n     */\n    query(qry) {\n        checkType(qry, Query);\n\n        this._queryOpts.query = qry;\n        return this;\n    }\n\n    /**\n     * Sets the scoring method.\n     *\n     * Valid values are:\n     * - `none` - no scoring\n     * - `max` - the highest score of all matched child documents is used\n     * - `min` - the lowest score of all matched child documents is used\n     * - `sum` - the sum the all the matched child documents is used\n     * - `avg` - the default, the average of all matched child documents is used\n     *\n     * @example\n     * const qry = esb.hasChildQuery(\n     *     esb.termQuery('tag', 'something'),\n     *     'blog_tag'\n     * ).scoreMode('min');\n     *\n     * @param {string} mode Can be one of `none`, `sum`, `min`, `max`, `avg`.\n     * Defaults to `avg` for `NestedQuery`, `none` for `HasChildQuery`.\n     * @returns {JoiningQueryBase} returns `this` so that calls can be chained.\n     */\n    scoreMode(mode) {\n        if (_.isNil(mode)) invalidScoreModeParam(mode);\n\n        const modeLower = mode.toLowerCase();\n        if (!NESTED_SCORE_MODE_SET.has(modeLower)) {\n            invalidScoreModeParam(mode);\n        }\n\n        this._queryOpts.score_mode = modeLower;\n        return this;\n    }\n\n    /**\n     * When set to `true` will ignore an unmapped `path` and will not match any\n     * documents for this query. When set to `false` (the default value) the query\n     * will throw an exception if the path is not mapped.\n     *\n     * @param {boolean} enable `true` or `false`, `false` by default.\n     * @returns {JoiningQueryBase} returns `this` so that calls can be chained.\n     */\n    ignoreUnmapped(enable) {\n        this._queryOpts.ignore_unmapped = enable;\n        return this;\n    }\n\n    /**\n     * Sets the inner hits options\n     *\n     * @param {InnerHits} innerHits A valid `InnerHits` object\n     * @returns {JoiningQueryBase} returns `this` so that calls can be chained.\n     */\n    innerHits(innerHits) {\n        checkType(innerHits, InnerHits);\n\n        this._queryOpts.inner_hits = innerHits;\n        return this;\n    }\n}\n\nmodule.exports = JoiningQueryBase;\n"
  },
  {
    "path": "src/queries/joining-queries/nested-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst JoiningQueryBase = require('./joining-query-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-nested-query.html';\n\n/**\n * Nested query allows to query nested objects. The query is executed against\n * the nested objects / docs as if they were indexed as separate docs\n * (they are, internally) and resulting in the root parent doc (or parent nested mapping).\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-nested-query.html)\n *\n * @example\n * const qry = esb.nestedQuery()\n *     .path('obj1')\n *     .scoreMode('avg')\n *     .query(\n *         esb.boolQuery().must([\n *             esb.matchQuery('obj1.name', 'blue'),\n *             esb.rangeQuery('obj1.count').gt(5)\n *         ])\n *     );\n *\n * @param {Query=} qry A valid `Query` object\n * @param {string=} path The nested object path.\n *\n * @extends JoiningQueryBase\n */\nclass NestedQuery extends JoiningQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(qry, path) {\n        super('nested', ES_REF_URL, qry);\n\n        if (!_.isNil(path)) this._queryOpts.path = path;\n    }\n\n    /**\n     * Sets the root context for the nested query.\n     *\n     * @param {string} path\n     * @returns {NestedQuery} returns `this` so that calls can be chained.\n     */\n    path(path) {\n        this._queryOpts.path = path;\n        return this;\n    }\n}\n\nmodule.exports = NestedQuery;\n"
  },
  {
    "path": "src/queries/joining-queries/parent-id-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst { Query } = require('../../core');\n\n/**\n * The `parent_id` query can be used to find child documents which belong to a particular parent.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-parent-id-query.html)\n *\n * @example\n * const qry = esb.parentIdQuery('blog_tag', 1);\n *\n * @param {string=} type The **child** type. This must be a type with `_parent` field.\n * @param {string|number=} id The required parent id select documents must refer to.\n *\n * @extends Query\n */\nclass ParentIdQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(type, id) {\n        super('parent_id');\n\n        if (!_.isNil(type)) this._queryOpts.type = type;\n        if (!_.isNil(id)) this._queryOpts.id = id;\n    }\n\n    /**\n     * Sets the child type.\n     *\n     * @param {string} type The **child** type. This must be a type with `_parent` field.\n     * @returns {ParentIdQuery} returns `this` so that calls can be chained.\n     */\n    type(type) {\n        this._queryOpts.type = type;\n        return this;\n    }\n\n    /**\n     * Sets the id.\n     *\n     * @param {string|number} id The required parent id select documents must refer to.\n     * @returns {ParentIdQuery} returns `this` so that calls can be chained.\n     */\n    id(id) {\n        this._queryOpts.id = id;\n        return this;\n    }\n\n    /**\n     * When set to `true` will ignore an unmapped `path` and will not match any\n     * documents for this query. When set to `false` (the default value) the query\n     * will throw an exception if the path is not mapped.\n     *\n     * @param {boolean} enable `true` or `false`, `false` by default.\n     * @returns {ParentIdQuery} returns `this` so that calls can be chained.\n     */\n    ignoreUnmapped(enable) {\n        this._queryOpts.ignore_unmapped = enable;\n        return this;\n    }\n}\n\nmodule.exports = ParentIdQuery;\n"
  },
  {
    "path": "src/queries/match-all-query.js",
    "content": "'use strict';\n\nconst { Query } = require('../core');\n\n/**\n * The most simple query, which matches all documents, giving them all a `_score` of `1.0`.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-all-query.html)\n *\n * @example\n * const qry = esb.matchAllQuery().boost(1.2);\n *\n * @extends Query\n */\nclass MatchAllQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('match_all');\n    }\n}\n\nmodule.exports = MatchAllQuery;\n"
  },
  {
    "path": "src/queries/match-none-query.js",
    "content": "'use strict';\n\nconst { Query } = require('../core');\n\n/**\n * The inverse of the `match_all` query, which matches no documents.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-all-query.html)\n *\n * @example\n * const qry = esb.matchNoneQuery();\n *\n * @extends Query\n */\nclass MatchNoneQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('match_none');\n    }\n}\n\nmodule.exports = MatchNoneQuery;\n"
  },
  {
    "path": "src/queries/span-queries/index.js",
    "content": "'use strict';\n\nexports.SpanLittleBigQueryBase = require('./span-little-big-query-base');\n\nexports.SpanTermQuery = require('./span-term-query');\nexports.SpanMultiTermQuery = require('./span-multi-term-query');\nexports.SpanFirstQuery = require('./span-first-query');\nexports.SpanNearQuery = require('./span-near-query');\nexports.SpanOrQuery = require('./span-or-query');\nexports.SpanNotQuery = require('./span-not-query');\nexports.SpanContainingQuery = require('./span-containing-query');\nexports.SpanWithinQuery = require('./span-within-query');\nexports.SpanFieldMaskingQuery = require('./span-field-masking-query');\n"
  },
  {
    "path": "src/queries/span-queries/span-containing-query.js",
    "content": "'use strict';\n\nconst SpanLittleBigQueryBase = require('./span-little-big-query-base');\n\n/**\n * Returns matches which enclose another span query. The span containing query\n * maps to Lucene `SpanContainingQuery`.\n *\n * Matching spans from big that contain matches from little are returned.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-span-containing-query.html)\n *\n * @example\n * const spanQry = esb.spanContainingQuery()\n *     .little(esb.spanTermQuery('field1', 'foo'))\n *     .big(esb.spanNearQuery()\n *         .clauses([\n *             esb.spanTermQuery('field1', 'bar'),\n *             esb.spanTermQuery('field1', 'baz')\n *         ])\n *         .slop(5)\n *         .inOrder(true))\n *\n * @extends SpanLittleBigQueryBase\n */\nclass SpanContainingQuery extends SpanLittleBigQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('span_containing');\n    }\n}\n\nmodule.exports = SpanContainingQuery;\n"
  },
  {
    "path": "src/queries/span-queries/span-field-masking-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst SpanQueryBase = require('./span-query-base');\n\n/**\n * Wrapper to allow span queries to participate in composite single-field\n * span queries by lying about their search field. The span field masking\n * query maps to Lucene's `SpanFieldMaskingQuery`.\n *\n * This can be used to support queries like span-near or span-or across\n * different fields, which is not ordinarily permitted.\n *\n * Span field masking query is invaluable in conjunction with multi-fields\n * when same content is indexed with multiple analyzers. For instance we\n * could index a field with the standard analyzer which breaks text up into\n * words, and again with the english analyzer which stems words into their root form.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-span-field-masking-query.html)\n *\n * @param {string=} field\n * @param {SpanQueryBase=} spanQry Any other span type query\n *\n * @example\n * const spanQry = esb.spanNearQuery()\n *     .clauses([\n *         esb.spanTermQuery('text', 'quick brown'),\n *         esb.spanFieldMaskingQuery()\n *             .field('text')\n *             .query(esb.spanTermQuery('text.stems', 'fox'))\n *     ])\n *     .slop(5)\n *     .inOrder(false);\n *\n * @extends SpanQueryBase\n */\nclass SpanFieldMaskingQuery extends SpanQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, spanQry) {\n        super('field_masking_span');\n\n        if (!_.isNil(field)) this._queryOpts.field = field;\n        if (!_.isNil(spanQry)) this.query(spanQry);\n    }\n\n    /**\n     * Sets the span query.\n     *\n     * @param {SpanQueryBase} spanQry\n     * @returns {SpanFieldMaskingQuery} returns `this` so that calls can be chained.\n     */\n    query(spanQry) {\n        checkType(spanQry, SpanQueryBase);\n\n        this._queryOpts.query = spanQry;\n        return this;\n    }\n\n    /**\n     * Sets the field to mask.\n     *\n     * @param {string} field\n     * @returns {SpanFieldMaskingQuery} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._queryOpts.field = field;\n        return this;\n    }\n}\n\nmodule.exports = SpanFieldMaskingQuery;\n"
  },
  {
    "path": "src/queries/span-queries/span-first-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst SpanQueryBase = require('./span-query-base');\n\n/**\n * Matches spans near the beginning of a field. The span first query maps to Lucene `SpanFirstQuery`.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-span-first-query.html)\n *\n * @example\n * const spanQry = esb.spanFirstQuery()\n *     .match(esb.spanTermQuery('user', 'kimchy'))\n *     .end(3);\n *\n * @param {SpanQueryBase=} spanQry Any other span type query\n *\n * @extends SpanQueryBase\n */\nclass SpanFirstQuery extends SpanQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(spanQry) {\n        super('span_first');\n\n        if (!_.isNil(spanQry)) this.match(spanQry);\n    }\n\n    /**\n     * Sets the `match` clause which can be any other span type query.\n     *\n     * @param {SpanQueryBase} spanQry\n     * @returns {SpanFirstQuery} returns `this` so that calls can be chained.\n     */\n    match(spanQry) {\n        checkType(spanQry, SpanQueryBase);\n\n        this._queryOpts.match = spanQry;\n        return this;\n    }\n\n    /**\n     * Sets the maximum end position permitted in a match.\n     *\n     * @param {number} limit The maximum end position permitted in a match.\n     * @returns {SpanFirstQuery} returns `this` so that calls can be chained.\n     */\n    end(limit) {\n        this._queryOpts.end = limit;\n        return this;\n    }\n}\n\nmodule.exports = SpanFirstQuery;\n"
  },
  {
    "path": "src/queries/span-queries/span-little-big-query-base.js",
    "content": "'use strict';\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst SpanQueryBase = require('./span-query-base');\n\n/**\n * Base class for span queries with `little`, `big` clauses.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @extends SpanQueryBase\n */\nclass SpanLittleBigQueryBase extends SpanQueryBase {\n    /**\n     * Sets the `little` clause.\n     *\n     * @param {SpanQueryBase} spanQry Any span type query\n     * @returns {SpanLittleBigQueryBase} returns `this` so that calls can be chained.\n     */\n    little(spanQry) {\n        checkType(spanQry, SpanQueryBase);\n\n        this._queryOpts.little = spanQry;\n        return this;\n    }\n\n    /**\n     * Sets the `big` clause.\n     *\n     * @param {SpanQueryBase} spanQry Any span type query\n     * @returns {SpanLittleBigQueryBase} returns `this` so that calls can be chained.\n     */\n    big(spanQry) {\n        checkType(spanQry, SpanQueryBase);\n\n        this._queryOpts.big = spanQry;\n        return this;\n    }\n}\n\nmodule.exports = SpanLittleBigQueryBase;\n"
  },
  {
    "path": "src/queries/span-queries/span-multi-term-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst { MultiTermQueryBase } = require('../term-level-queries');\n\nconst SpanQueryBase = require('./span-query-base');\n\n/**\n * The `span_multi` query allows you to wrap a `multi term query` (one of wildcard,\n * fuzzy, prefix, range or regexp query) as a `span query`, so it can be nested.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-span-multi-term-query.html)\n *\n * @example\n * const spanQry = esb.spanMultiTermQuery()\n *     .match(esb.prefixQuery('user', 'ki').boost(1.08));\n *\n * @param {MultiTermQueryBase=} multiTermQry One of wildcard, fuzzy, prefix, range or regexp query\n *\n * @extends SpanQueryBase\n */\nclass SpanMultiTermQuery extends SpanQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(multiTermQry) {\n        super('span_multi');\n\n        if (!_.isNil(multiTermQry)) this.match(multiTermQry);\n    }\n\n    /**\n     * Sets the multi term query.\n     *\n     * @param {MultiTermQueryBase} multiTermQry One of wildcard, fuzzy, prefix, range or regexp query\n     * @returns {SpanMultiTermQuery} returns `this` so that calls can be chained.\n     */\n    match(multiTermQry) {\n        checkType(multiTermQry, MultiTermQueryBase);\n\n        this._queryOpts.match = multiTermQry;\n        return this;\n    }\n}\n\nmodule.exports = SpanMultiTermQuery;\n"
  },
  {
    "path": "src/queries/span-queries/span-near-query.js",
    "content": "'use strict';\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst SpanQueryBase = require('./span-query-base');\n\n/**\n * Matches spans which are near one another. One can specify `slop`, the maximum\n * number of intervening unmatched positions, as well as whether matches are\n * required to be in-order. The span near query maps to Lucene `SpanNearQuery`.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-span-near-query.html)\n *\n * @example\n * const spanQry = esb.spanNearQuery()\n *     .clauses([\n *         esb.spanTermQuery('field', 'value1'),\n *         esb.spanTermQuery('field', 'value2'),\n *         esb.spanTermQuery('field', 'value3')\n *     ])\n *     .slop(12)\n *     .inOrder(false);\n *\n * @extends SpanQueryBase\n */\nclass SpanNearQuery extends SpanQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('span_near');\n    }\n\n    /**\n     * Sets the clauses element which is a list of one or more other span type queries.\n     *\n     * @param {Array<SpanQueryBase>} clauses\n     * @returns {SpanNearQuery} returns `this` so that calls can be chained.\n     * @throws {TypeError} If parameter `clauses` is not an instance of Array or if\n     * any member of the array is not an instance of `SpanQueryBase`.\n     */\n    clauses(clauses) {\n        checkType(clauses, Array);\n        clauses.forEach(clause => checkType(clause, SpanQueryBase));\n\n        this._queryOpts.clauses = clauses;\n        return this;\n    }\n\n    /**\n     * Configures the `slop`(default is 0), the maximum number of intervening\n     * unmatched positions permitted.\n     *\n     * @param {number} slop A positive integer value, defaults is 0.\n     * @returns {SpanNearQuery} returns `this` so that calls can be chained.\n     */\n    slop(slop) {\n        this._queryOpts.slop = slop;\n        return this;\n    }\n\n    // TODO: Add documentation for inOrder\n\n    /**\n     *\n     * @param {boolean} enable\n     * @returns {SpanNearQuery} returns `this` so that calls can be chained.\n     */\n    inOrder(enable) {\n        this._queryOpts.in_order = enable;\n        return this;\n    }\n}\n\nmodule.exports = SpanNearQuery;\n"
  },
  {
    "path": "src/queries/span-queries/span-not-query.js",
    "content": "'use strict';\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst SpanQueryBase = require('./span-query-base');\n\n/**\n * Removes matches which overlap with another span query. The span not query\n * maps to Lucene `SpanNotQuery`.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-span-not-query.html)\n *\n * @example\n * const spanQry = esb.spanNotQuery()\n *     .include(esb.spanTermQuery('field1', 'hoya'))\n *     .exclude(esb.spanNearQuery()\n *         .clauses([\n *             esb.spanTermQuery('field1', 'la'),\n *             esb.spanTermQuery('field1', 'hoya')\n *         ])\n *         .slop(0)\n *         .inOrder(true));\n *\n * @extends SpanQueryBase\n */\nclass SpanNotQuery extends SpanQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('span_not');\n    }\n\n    /**\n     * Sets the `include` clause which is the span query whose matches are filtered\n     *\n     * @param {SpanQueryBase} spanQry\n     * @returns {SpanNotQuery} returns `this` so that calls can be chained.\n     */\n    include(spanQry) {\n        checkType(spanQry, SpanQueryBase);\n\n        this._queryOpts.include = spanQry;\n        return this;\n    }\n\n    /**\n     * Sets the `exclude` clause which is the span query whose matches must\n     * not overlap those returned.\n     *\n     * @param {SpanQueryBase} spanQry\n     * @returns {SpanNotQuery} returns `this` so that calls can be chained.\n     */\n    exclude(spanQry) {\n        checkType(spanQry, SpanQueryBase);\n\n        this._queryOpts.exclude = spanQry;\n        return this;\n    }\n\n    /**\n     * If set the amount of tokens before the include span can't have overlap with\n     * the exclude span.\n     *\n     * @param {number} pre\n     * @returns {SpanNotQuery} returns `this` so that calls can be chained.\n     */\n    pre(pre) {\n        this._queryOpts.pre = pre;\n        return this;\n    }\n\n    /**\n     * If set the amount of tokens after the include span can't have overlap with the exclude span.\n     *\n     * @param {number} post\n     * @returns {SpanNotQuery} returns `this` so that calls can be chained.\n     */\n    post(post) {\n        this._queryOpts.post = post;\n        return this;\n    }\n\n    /**\n     * If set the amount of tokens from within the include span can't have overlap\n     * with the exclude span. Equivalent of setting both `pre` and `post`.\n     *\n     * @param {number} dist\n     * @returns {SpanNotQuery} returns `this` so that calls can be chained.\n     */\n    dist(dist) {\n        this._queryOpts.dist = dist;\n        return this;\n    }\n}\n\nmodule.exports = SpanNotQuery;\n"
  },
  {
    "path": "src/queries/span-queries/span-or-query.js",
    "content": "'use strict';\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst SpanQueryBase = require('./span-query-base');\n\n/**\n * Matches the union of its span clauses. The span or query maps to Lucene `SpanOrQuery`.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-span-or-query.html)\n *\n * @example\n * const spanQry = esb.spanOrQuery()\n *     .clauses([\n *         esb.spanTermQuery('field', 'value1'),\n *         esb.spanTermQuery('field', 'value2'),\n *         esb.spanTermQuery('field', 'value3')\n *     ]);\n *\n * @extends SpanQueryBase\n */\nclass SpanOrQuery extends SpanQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('span_or');\n    }\n\n    /**\n     * Sets the clauses element which is a list of one or more other span type queries.\n     *\n     * @param {Array<SpanQueryBase>} clauses\n     * @returns {SpanOrQuery} returns `this` so that calls can be chained.\n     * @throws {TypeError} If parameter `clauses` is not an instance of Array or if\n     * any member of the array is not an instance of `SpanQueryBase`.\n     */\n    clauses(clauses) {\n        checkType(clauses, Array);\n        clauses.forEach(clause => checkType(clause, SpanQueryBase));\n\n        this._queryOpts.clauses = clauses;\n        return this;\n    }\n}\n\nmodule.exports = SpanOrQuery;\n"
  },
  {
    "path": "src/queries/span-queries/span-query-base.js",
    "content": "'use strict';\n\nconst { Query } = require('../../core');\n\n/**\n * Interface-like class used to group and identify various implementations of Span queries.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @extends Query\n */\nclass SpanQueryBase extends Query {}\n\nmodule.exports = SpanQueryBase;\n"
  },
  {
    "path": "src/queries/span-queries/span-term-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst SpanQueryBase = require('./span-query-base');\n\n/**\n * Matches spans containing a term. The span term query maps to Lucene `SpanTermQuery`.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-span-term-query.html)\n *\n * @example\n * const qry = esb.spanTermQuery('user', 'kimchy');\n *\n * @example\n * const qry = esb.spanTermQuery()\n *     .field('user')\n *     .value('kimchy')\n *     .boost(2.0);\n *\n * @param {string=} field The document field to query against\n * @param {string|number=} value The query string\n *\n * @extends SpanQueryBase\n */\nclass SpanTermQuery extends SpanQueryBase {\n    // This is extremely similar to ValueTermQueryBase\n    // Maybe rename, move and reuse it?\n\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, value) {\n        super('span_term');\n\n        if (!_.isNil(field)) this._field = field;\n        if (!_.isNil(value)) this._queryOpts.value = value;\n    }\n\n    /**\n     * Sets the field to search on.\n     *\n     * @param {string} field\n     * @returns {SpanTermQuery} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._field = field;\n        return this;\n    }\n\n    /**\n     * Sets the query string.\n     *\n     * @param {string|number} queryVal\n     * @returns {SpanTermQuery} returns `this` so that calls can be chained.\n     */\n    value(queryVal) {\n        this._queryOpts.value = queryVal;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation of the Span term query\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        // recursiveToJSON doesn't seem to be required here.\n\n        // Revisit this.. Smells a little bit\n        if (!_.has(this._queryOpts, 'value')) {\n            throw new Error('Value is required for Span term query!');\n        }\n\n        const qryOpts =\n            Object.keys(this._queryOpts).length === 1\n                ? this._queryOpts.value\n                : this._queryOpts;\n        return {\n            [this.queryType]: {\n                [this._field]: qryOpts\n            }\n        };\n    }\n}\n\nmodule.exports = SpanTermQuery;\n"
  },
  {
    "path": "src/queries/span-queries/span-within-query.js",
    "content": "'use strict';\n\nconst SpanLittleBigQueryBase = require('./span-little-big-query-base');\n\n/**\n * Returns matches which are enclosed inside another span query. The span within\n * query maps to Lucene `SpanWithinQuery`.\n *\n * Matching spans from `little` that are enclosed within `big` are returned.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-span-containing-query.html)\n *\n * @example\n * const spanQry = esb.spanWithinQuery()\n *     .little(esb.spanTermQuery('field1', 'foo'))\n *     .big(esb.spanNearQuery()\n *         .clauses([\n *             esb.spanTermQuery('field1', 'bar'),\n *             esb.spanTermQuery('field1', 'baz')\n *         ])\n *         .slop(5)\n *         .inOrder(true));\n *\n * @extends SpanLittleBigQueryBase\n */\nclass SpanWithinQuery extends SpanLittleBigQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('span_within');\n    }\n}\n\nmodule.exports = SpanWithinQuery;\n"
  },
  {
    "path": "src/queries/specialized-queries/distance-feature-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\nconst { Query } = require('../../core');\n\n/**\n * The `distance_feature` query can be used to filter documents that are inside\n * a timeframe or radius given an **origin** point. For dates the difference can be\n * minutes, hours, etc and for coordinates it can be meters, kilometers..\n *\n *  [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-distance-feature-query.html)\n *\n * NOTE: Only available in Elasticsearch 7.1.0+.\n *\n * @example\n * const query = new DistanceFeatureQuery('time');\n *   query\n *       .origin('now')\n *       .pivot('1h')\n *       .toJSON();\n * @param {string} field The field inside the document to be used in the query\n * @extends Query\n */\nclass DistanceFeatureQuery extends Query {\n    /**\n     * @param {string} field The field inside the document to be used in the query\n     */\n    constructor(field) {\n        super('distance_feature');\n        if (!_.isNil(field)) this._queryOpts.field = field;\n    }\n\n    /**\n     * Sets the field for the `distance_feature` query\n     * @param {string} fieldName Name of the field inside the document\n     * @returns {DistanceFeatureQuery} Instance of the distance feature query\n     */\n    field(fieldName) {\n        this._queryOpts.field = fieldName;\n        return this;\n    }\n\n    /**\n     * Sets the origin of the function. Date or point of coordinates\n     * used to calculate distances\n     * @param {GeoPoint | string} originPoint Array of coordinates, LatLng object, \"now-1h\"\n     * @returns {DistanceFeatureQuery} Instance of the distance feature query\n     */\n    origin(originPoint) {\n        this._queryOpts.origin = originPoint;\n        return this;\n    }\n\n    /**\n     * Distance from the origin at which relevance scores receive half of the boost value.\n     * @param {string} pivotDistance Distance value. If the field value is date then this must be a\n     * [time unit](https://www.elastic.co/guide/en/elasticsearch/reference/current/api-conventions.html#time-units).\n     * If it's a geo point field, then a [distance unit](https://www.elastic.co/guide/en/elasticsearch/reference/current/api-conventions.html#distance-units)\n     * @returns {DistanceFeatureQuery} Instance of the distance feature query\n     */\n    pivot(pivotDistance) {\n        this._queryOpts.pivot = pivotDistance;\n        return this;\n    }\n}\n\nmodule.exports = DistanceFeatureQuery;\n"
  },
  {
    "path": "src/queries/specialized-queries/index.js",
    "content": "'use strict';\n\nexports.MoreLikeThisQuery = require('./more-like-this-query');\nexports.ScriptQuery = require('./script-query');\nexports.ScriptScoreQuery = require('./script-score-query');\nexports.PercolateQuery = require('./percolate-query');\nexports.DistanceFeatureQuery = require('./distance-feature-query');\nexports.RankFeatureQuery = require('./rank-feature-query');\n"
  },
  {
    "path": "src/queries/specialized-queries/more-like-this-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Query,\n    util: { checkType }\n} = require('../../core');\n\n/**\n * The More Like This Query (MLT Query) finds documents that are \"like\" a given set\n * of documents. In order to do so, MLT selects a set of representative terms of\n * these input documents, forms a query using these terms, executes the query and\n * returns the results. The user controls the input documents, how the terms should\n * be selected and how the query is formed.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-mlt-query.html)\n *\n * @example\n * // Ask for documents that are similar to a provided piece of text\n * const qry = esb.moreLikeThisQuery()\n *     .fields(['title', 'description'])\n *     .like('Once upon a time')\n *     .minTermFreq(1)\n *     .maxQueryTerms(12);\n *\n * @example\n * // Mixing texts with documents already existing in the index\n * const qry = esb.moreLikeThisQuery()\n *     .fields(['title', 'description'])\n *     .like({ _index: 'imdb', _type: 'movies', _id: '1' })\n *     .like({ _index: 'imdb', _type: 'movies', _id: '2' })\n *     .like('and potentially some more text here as well')\n *     .minTermFreq(1)\n *     .maxQueryTerms(12);\n *\n * @example\n * // Provide documents not present in the index\n * const qry = esb.moreLikeThisQuery()\n *     .fields(['name.first', 'name.last'])\n *     .like([\n *         {\n *             _index: 'marvel',\n *             _type: 'quotes',\n *             doc: {\n *                 name: { first: 'Ben', last: 'Grimm' },\n *                 tweet: \"You got no idea what I'd... what I'd give to be invisible.\"\n *             }\n *         },\n *         { _index: 'marvel', _type: 'quotes', _id: '2' }\n *     ])\n *     .minTermFreq(1)\n *     .maxQueryTerms(12);\n *\n * @extends Query\n */\nclass MoreLikeThisQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('more_like_this');\n    }\n\n    /**\n     *\n     * @private\n     * @param {string} clauseType\n     * @param {string|Object|Array} clauses\n     */\n    _setSearchClause(clauseType, clauses) {\n        // Replace the field. Don't care about previous contents\n        if (Array.isArray(clauses)) this._queryOpts[clauseType] = clauses;\n        else if (!_.has(this._queryOpts, clauseType)) {\n            // Keep the single `like` without array.\n            this._queryOpts[clauseType] = clauses;\n        } else {\n            // Wrap the single `like` in an array\n            if (!Array.isArray(this._queryOpts[clauseType])) {\n                this._queryOpts[clauseType] = [this._queryOpts[clauseType]];\n            }\n            // Append to array\n            this._queryOpts[clauseType].push(clauses);\n        }\n    }\n\n    /**\n     * Sets the list of fields to fetch and analyze the text from. Defaults to\n     * the `_all` field for free text and to all possible fields for document inputs.\n     *\n     * @param {Array<string>} fields Array of fields to search against\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    fields(fields) {\n        checkType(fields, Array);\n\n        this._queryOpts.fields = fields;\n        return this;\n    }\n\n    /**\n     * Sets the search clause for the query. It is the only required parameter of the MLT query\n     * and follows a versatile syntax, in which the user can specify free form text and/or\n     * a single or multiple documents (see examples above). The syntax to specify documents\n     * is similar to the one used by the [Multi GET API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-multi-get.html).\n     * When specifying documents, the text is fetched from fields unless overridden\n     * in each document request. The text is analyzed by the analyzer at the field,\n     * but could also be overridden. The syntax to override the analyzer at the\n     * field follows a similar syntax to the `per_field_analyzer` parameter of the\n     * [Term Vectors API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-termvectors.html#docs-termvectors-per-field-analyzer).\n     * Additionally, to provide documents not necessarily present in the index,\n     * [artificial documents](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-termvectors.html#docs-termvectors-artificial-doc)\n     * are also supported.\n     *\n     * If string or object is passed, it is\n     * appended to the list. If an array is passed, it replaces the existing list.\n     *\n     * @param {string|Object|Array} like Can be passed as a string,\n     * Object representing indexed document, or array of string/objects.\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    like(like) {\n        this._setSearchClause('like', like);\n        return this;\n    }\n\n    /**\n     * The `unlike` parameter is used in conjunction with `like` in order not to\n     * select terms found in a chosen set of documents. In other words, we could ask\n     * for documents `like`: \"Apple\", but `unlike`: \"cake crumble tree\".\n     * The syntax is the same as like.\n     *\n     * @param {string|Object|Array} unlike Can be passed as a string,\n     * Object representing indexed document, or array of string/objects.\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    unlike(unlike) {\n        this._setSearchClause('unlike', unlike);\n        return this;\n    }\n\n    /**\n     * Sets the text to find documents like it.\n     *\n     * Note: This parameter has been removed in elasticsearch 6.0. Use `like`\n     * instead.\n     *\n     * @param {string} txt The text to find documents like it.\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    likeText(txt) {\n        this._queryOpts.like_text = txt;\n        return this;\n    }\n\n    /**\n     * Sets the list of `ids` for the documents with syntax similar to\n     * the [Multi GET API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-multi-get.html).\n     *\n     * Note: This parameter has been removed in elasticsearch 6.0. Use `like`\n     * instead.\n     *\n     * @param {Array<string>} ids\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    ids(ids) {\n        checkType(ids, Array);\n\n        this._queryOpts.ids = ids;\n        return this;\n    }\n\n    /**\n     * Sets the list of `docs` for the documents with syntax similar to\n     * the [Multi GET API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-multi-get.html).\n     *\n     * Note: This parameter has been removed in elasticsearch 6.0. Use `like`\n     * instead.\n     *\n     * @param {Array<Object>} docs\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    docs(docs) {\n        checkType(docs, Array);\n\n        this._queryOpts.docs = docs;\n        return this;\n    }\n\n    /**\n     * Sets the maximum number of query terms that will be selected.\n     * Increasing this value gives greater accuracy at the expense of query execution speed.\n     * Defaults to `25`.\n     *\n     * @param {number} termsLimit The maximum number of query terms that will be selected.\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    maxQueryTerms(termsLimit) {\n        this._queryOpts.max_query_terms = termsLimit;\n        return this;\n    }\n\n    /**\n     * Sets the minimum term frequency below which the terms will be ignored from\n     * the input document Defaults to 2.\n     *\n     * @param {number} termFreqLimit\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    minTermFreq(termFreqLimit) {\n        this._queryOpts.min_term_freq = termFreqLimit;\n        return this;\n    }\n\n    /**\n     * Sets the minimum document frequency below which the terms will be ignored\n     * from the input document. Defaults to `5`.\n     *\n     * @param {number} docFreqLimit The minimum document frequency\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    minDocFreq(docFreqLimit) {\n        this._queryOpts.min_doc_freq = docFreqLimit;\n        return this;\n    }\n\n    /**\n     * Sets the maximum document frequency above which the terms will be ignored\n     * from the input document. Defaults to unbounded (`0`).\n     *\n     * @param {number} docFreqLimit The minimum document frequency\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    maxDocFreq(docFreqLimit) {\n        this._queryOpts.max_doc_freq = docFreqLimit;\n        return this;\n    }\n\n    /**\n     * Sets the minimum word length below which the terms will be ignored.\n     * Defaults to `0`.\n     *\n     * @param {number} wordLenLimit\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    minWordLength(wordLenLimit) {\n        this._queryOpts.min_word_length = wordLenLimit;\n        return this;\n    }\n\n    /**\n     * Sets the maximum word length above which the terms will be ignored.\n     * Defaults to unbounded (`0`).\n     *\n     * @param {number} wordLenLimit\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    maxWordLength(wordLenLimit) {\n        this._queryOpts.max_word_length = wordLenLimit;\n        return this;\n    }\n\n    /**\n     * Sets the array of stop words. Any word in this set is considered\n     * \"uninteresting\" and ignored.\n     *\n     * @param {Array<string>} words Array of stop words.\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained\n     */\n    stopWords(words) {\n        this._queryOpts.stop_words = words;\n        return this;\n    }\n\n    /**\n     * Set the analyzer to control which analyzer will perform the analysis process on the text.\n     * Defaults to the analyzer associated with the first field in `fields`.\n     *\n     * @param {string} analyzer A valid text analyzer.\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained.\n     */\n    analyzer(analyzer) {\n        this._queryOpts.analyzer = analyzer;\n        return this;\n    }\n\n    /**\n     * Sets the value controlling how many `should` clauses in the boolean\n     * query should match. It can be an absolute value (2), a percentage (30%)\n     * or a combination of both. (Defaults to `\"30%\"`).\n     *\n     * @param {string|number} minimumShouldMatch An absolute value (`2`), a percentage (`30%`)\n     * or a combination of both.\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained.\n     */\n    minimumShouldMatch(minimumShouldMatch) {\n        this._queryOpts.minimum_should_match = minimumShouldMatch;\n        return this;\n    }\n\n    /**\n     * Sets the boost factor to use when boosting terms.\n     * Defaults to deactivated (`0`).\n     *\n     * @param {number} boost A positive value to boost terms.\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained.\n     */\n    boostTerms(boost) {\n        this._queryOpts.boost_terms = boost;\n        return this;\n    }\n\n    /**\n     * Specifies whether the input documents should also be included in the\n     * search results returned. Defaults to `false`.\n     *\n     * @param {boolean} enable\n     * @returns {MoreLikeThisQuery} returns `this` so that calls can be chained.\n     */\n    include(enable) {\n        this._queryOpts.include = enable;\n        return this;\n    }\n}\n\nmodule.exports = MoreLikeThisQuery;\n"
  },
  {
    "path": "src/queries/specialized-queries/percolate-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { checkType },\n    Query\n} = require('../../core');\n\n/**\n * The `percolate` query can be used to match queries stored in an index.\n * The `percolate` query itself contains the document that will be used\n * as query to match with the stored queries.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-percolate-query.html)\n *\n * @example\n * const percolateQry = esb.percolateQuery('query', 'doctype')\n *     .document({ message: 'A new bonsai tree in the office' });\n *\n * const percolateQry = esb.percolateQuery()\n *     .field('query')\n *     .documentType('doctype')\n *     .index('my-index')\n *     .type('message')\n *     .id('1')\n *     .version(1);\n *\n * @param {string=} field The field of type `percolator` and that holds the indexed queries.\n * @param {string=} docType The type / mapping of the document being percolated.\n *\n * @extends Query\n */\nclass PercolateQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, docType) {\n        super('percolate');\n        this._queryOpts.documents = [];\n\n        if (!_.isNil(field)) this._queryOpts.field = field;\n        // Delegate this to method:\n        if (!_.isNil(docType)) this._queryOpts.document_type = docType;\n    }\n\n    /**\n     * Sets the field of type `percolator` and that holds the indexed queries.\n     *\n     * @param {string} field The field of type `percolator` and that holds the indexed queries.\n     * @returns {PercolateQuery} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._queryOpts.field = field;\n        return this;\n    }\n\n    /**\n     * Sets the type / mapping of the document being percolated.\n     *\n     * Note: This param has been deprecated in elasticsearch 6.0. From 6.0 and\n     * later, it is no longer required to specify the `document_type` parameter.\n     *\n     * @param {string} docType The type / mapping of the document being percolated.\n     * @returns {PercolateQuery} returns `this` so that calls can be chained.\n     */\n    documentType(docType) {\n        this._queryOpts.document_type = docType;\n        return this;\n    }\n\n    /**\n     * Appends given source document to the list of source documents being percolated.\n     * Instead of specifying the source document being percolated,\n     * the source can also be retrieved from an already stored document.\n     *\n     * @example\n     *const qry = esb.percolateQuery('query', 'people')\n     * .document({ name: 'Will Smith' });\n     *\n     * @param {Object} doc The source document being percolated.\n     * @returns {PercolateQuery} returns `this` so that calls can be chained.\n     */\n    document(doc) {\n        this._queryOpts.documents.push(doc);\n        return this;\n    }\n\n    /**\n     * Appends given source documents to the list of source documents being percolated.\n     * Instead of specifying the source documents being percolated,\n     * the source can also be retrieved from already stored documents.\n     *\n     * @example\n     *const qry = esb.percolateQuery('query', 'people')\n     * .documents([{ name: 'Will Smith' }, { name: 'Willow Smith' }]);\n     *\n     * @param {Object[]} docs The source documents being percolated.\n     * @returns {PercolateQuery} returns `this` so that calls can be chained.\n     */\n    documents(docs) {\n        checkType(docs, Array);\n\n        this._queryOpts.documents = this._queryOpts.documents.concat(docs);\n        return this;\n    }\n\n    /**\n     * Sets the index the document resides in. This is a required parameter if `document`\n     * is not specified.\n     *\n     * @param {string} index The index the document resides in.\n     * @returns {PercolateQuery} returns `this` so that calls can be chained.\n     */\n    index(index) {\n        this._queryOpts.index = index;\n        return this;\n    }\n\n    /**\n     * Sets the type of the document to fetch. This is a required parameter if `document`\n     * is not specified.\n     *\n     * @param {string} type The type of the document to fetch.\n     * @returns {PercolateQuery} returns `this` so that calls can be chained.\n     */\n    type(type) {\n        this._queryOpts.type = type;\n        return this;\n    }\n\n    /**\n     * Sets the id of the document to fetch. This is a required parameter if `document`\n     * is not specified.\n     *\n     * @param {string} id The id of the document to fetch.\n     * @returns {PercolateQuery} returns `this` so that calls can be chained.\n     */\n    id(id) {\n        this._queryOpts.id = id;\n        return this;\n    }\n\n    /**\n     * Sets the routing to be used to fetch document to percolate. Optional.\n     *\n     * @param {string} routing The routing to be used to fetch document to percolate.\n     * @returns {PercolateQuery} returns `this` so that calls can be chained.\n     */\n    routing(routing) {\n        this._queryOpts.routing = routing;\n        return this;\n    }\n\n    /**\n     * Sets the preference to be used to fetch document to percolate. Optional.\n     *\n     * @param {string} preference The preference to be used to fetch document to percolate.\n     * @returns {PercolateQuery} returns `this` so that calls can be chained.\n     */\n    preference(preference) {\n        this._queryOpts.preference = preference;\n        return this;\n    }\n\n    /**\n     * Sets the expected version of the document to be fetched. Optional.\n     * If the version does not match, the search request will fail\n     * with a version conflict error.\n     *\n     * @param {string} version The expected version of the document to be fetched.\n     * @returns {PercolateQuery} returns `this` so that calls can be chained.\n     */\n    version(version) {\n        this._queryOpts.version = version;\n        return this;\n    }\n}\n\nmodule.exports = PercolateQuery;\n"
  },
  {
    "path": "src/queries/specialized-queries/rank-feature-query.js",
    "content": "'use strict';\n\nconst { Query } = require('../../core');\nconst _ = require('../../_');\n\n/**\n * The rank_feature query boosts the relevance score on the numeric value of\n * document with a rank_feature/rank_features field.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-rank-feature-query.html)\n *\n * NOTE: This query was added in elasticsearch v7.0.\n *\n * @example\n * const query = new RankFeatureQuery('rank_feature_field');\n *   query\n *       .linear()\n *       .toJSON();\n * @param {string} field The field inside the document to be used in the query\n * @extends Query\n */\nclass RankFeatureQuery extends Query {\n    /**\n     * @param {string} field The field inside the document to be used in the query\n     */\n    constructor(field) {\n        super('rank_feature');\n        if (!_.isNil(field)) this._queryOpts.field = field;\n    }\n\n    /**\n     * Sets the field for the `rank_feature` query\n     * @param {string} fieldName Name of the field inside the document\n     * @returns {RankFeatureQuery} Instance of the distance feature query\n     */\n    field(fieldName) {\n        this._queryOpts.field = fieldName;\n        return this;\n    }\n\n    /**\n     * Linear function to boost relevance scores based on the value of the rank feature field\n     * @returns {RankFeatureQuery}\n     */\n    linear() {\n        this._queryOpts.linear = {};\n        return this;\n    }\n\n    /**\n     * Saturation function to boost relevance scores based on the value of the rank feature field.\n     * Uses a default pivot value computed by Elasticsearch.\n     * @returns {RankFeatureQuery}\n     */\n    saturation() {\n        this._queryOpts.saturation = {};\n        return this;\n    }\n\n    /**\n     * Saturation function to boost relevance scores based on the value of the rank feature field.\n     * @param {number} pivot\n     * @returns {RankFeatureQuery}\n     */\n    saturationPivot(pivot) {\n        this._queryOpts.saturation = {};\n        this._queryOpts.saturation.pivot = pivot;\n        return this;\n    }\n\n    /**\n     * The log function gives a score equal to log(scaling_factor + S), where S\n     * is the value of the rank feature field and scaling_factor is a configurable\n     * scaling factor.\n     * @param {number} scaling_factor\n     * @returns {RankFeatureQuery}\n     */\n    log(scalingFactor) {\n        this._queryOpts.log = {};\n        this._queryOpts.log.scaling_factor = scalingFactor;\n        return this;\n    }\n\n    /**\n     * The sigmoid function extends the saturation function with a configurable exponent.\n     * @param {number} pivot\n     * @param {number} exponent\n     * @returns {RankFeatureQuery}\n     */\n    sigmoid(pivot, exponent) {\n        this._queryOpts.sigmoid = {};\n        this._queryOpts.sigmoid.pivot = pivot;\n        this._queryOpts.sigmoid.exponent = exponent;\n        return this;\n    }\n}\n\nmodule.exports = RankFeatureQuery;\n"
  },
  {
    "path": "src/queries/specialized-queries/script-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Query,\n    Script,\n    util: { checkType }\n} = require('../../core');\n\n/**\n * A query allowing to define scripts as queries.\n * They are typically used in a filter context.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-script-query.html)\n *\n * @example\n * const scriptQry = esb.scriptQuery(esb.script()\n *  .lang('painless')\n *  .inline(\"doc['num1'].value > 1\"))\n *\n * // Use in filter context\n * const qry = esb.boolQuery().must(scriptQry);\n *\n * @param {Script=} script\n *\n * @extends Query\n */\nclass ScriptQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(script) {\n        super('script');\n\n        if (!_.isNil(script)) this.script(script);\n    }\n\n    /**\n     * Sets the `script` for query.\n     *\n     * @param {Script} script\n     * @returns {ScriptQuery} returns `this` so that calls can be chained.\n     */\n    script(script) {\n        checkType(script, Script);\n\n        this._queryOpts.script = script;\n        return this;\n    }\n}\n\nmodule.exports = ScriptQuery;\n"
  },
  {
    "path": "src/queries/specialized-queries/script-score-query.js",
    "content": "'use strict';\n\nconst {\n    Query,\n    Script,\n    util: { checkType }\n} = require('../../core');\n\n/**\n * A query that uses a script to provide a custom score for returned documents.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-script-score-query.html)\n *\n * NOTE: This query was added in elasticsearch v7.0.\n *\n * @example\n * const qry = esb.scriptScoreQuery()\n *   .query(esb.matchQuery(\"message\", \"elasticsearch\"))\n *   .script(esb.script().source(\"doc['my-int'].value / 10\"))\n *\n * @extends Query\n */\nclass ScriptScoreQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor() {\n        super('script_score');\n    }\n\n    /**\n     * Sets the query used to return documents.\n     *\n     * @param {Query} query A valid `Query` object\n     * @returns {ScriptScoreQuery} returns `this` so that calls can be chained.\n     */\n    query(query) {\n        checkType(query, Query);\n\n        this._queryOpts.query = query;\n        return this;\n    }\n\n    /**\n     * Sets the script used to compute the score of documents returned by the query.\n     *\n     * @param {Script} script A valid `Script` object\n     * @returns {ScriptScoreQuery} returns `this` so that calls can be chained.\n     */\n    script(script) {\n        checkType(script, Script);\n\n        this._queryOpts.script = script;\n        return this;\n    }\n\n    /**\n     * Sets the minimum score limit for documents to be included in search result.\n     *\n     * @param {number} limit Minimum score threshold\n     * @returns {ScriptScoreQuery} returns `this` so that calls can be chained.\n     */\n    minScore(limit) {\n        this._queryOpts.min_score = limit;\n        return this;\n    }\n}\n\nmodule.exports = ScriptScoreQuery;\n"
  },
  {
    "path": "src/queries/term-level-queries/exists-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst { Query } = require('../../core');\n\n/**\n * Returns documents that have at least one non-`null` value in the original field\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-exists-query.html)\n *\n * @example\n * const qry = esb.existsQuery('user');\n *\n * @example\n * const qry = esb.boolQuery().mustNot(esb.existsQuery('user'));\n *\n * @param {string=} field\n *\n * @extends Query\n */\nclass ExistsQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field) {\n        super('exists');\n\n        if (!_.isNil(field)) this._queryOpts.field = field;\n    }\n\n    /**\n     * Sets the field to search on.\n     *\n     * @param {string} field\n     * @returns {ExistsQuery} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._queryOpts.field = field;\n        return this;\n    }\n}\n\nmodule.exports = ExistsQuery;\n"
  },
  {
    "path": "src/queries/term-level-queries/fuzzy-query.js",
    "content": "'use strict';\n\nconst MultiTermQueryBase = require('./multi-term-query-base');\n\n/**\n * The fuzzy query generates all possible matching terms that are within\n * the maximum edit distance specified in `fuzziness` and then checks\n * the term dictionary to find out which of those generated terms\n * actually exist in the index.\n *\n * The fuzzy query uses similarity based on Levenshtein edit distance.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-fuzzy-query.html)\n *\n * @example\n * const qry = esb.fuzzyQuery('user', 'ki');\n *\n * @example\n * // More advanced settings\n * const qry = esb.fuzzyQuery('user', 'ki')\n *     .fuzziness(2)\n *     .prefixLength(0)\n *     .maxExpansions(100)\n *     .boost(1.0);\n *\n * @param {string=} field\n * @param {string|number=} value\n *\n * @extends MultiTermQueryBase\n */\nclass FuzzyQuery extends MultiTermQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, value) {\n        super('fuzzy', field, value);\n    }\n\n    /**\n     * Sets the `fuzziness` parameter which is interpreted as a Levenshtein Edit Distance —\n     * the number of one character changes that need to be made to one string to make it\n     * the same as another string.\n     *\n     * @param {number|string} factor Can be specified either as a number, or the maximum\n     * number of edits, or as `AUTO` which generates an edit distance based on the length\n     * of the term.\n     * @returns {FuzzyQuery} returns `this` so that calls can be chained.\n     */\n    fuzziness(factor) {\n        this._queryOpts.fuzziness = factor;\n        return this;\n    }\n\n    /**\n     * The number of initial characters which will not be “fuzzified”.\n     * This helps to reduce the number of terms which must be examined. Defaults to `0`.\n     *\n     * @param {number} len Characters to skip fuzzy for. Defaults to `0`.\n     * @returns {FuzzyQuery} returns `this` so that calls can be chained.\n     */\n    prefixLength(len) {\n        this._queryOpts.prefix_length = len;\n        return this;\n    }\n\n    /**\n     * The maximum number of terms that the fuzzy query will expand to. Defaults to `50`.\n     *\n     * @param {number} limit Limit for fuzzy query expansion. Defaults to `50`.\n     * @returns {FuzzyQuery} returns `this` so that calls can be chained.\n     */\n    maxExpansions(limit) {\n        this._queryOpts.max_expansions = limit;\n        return this;\n    }\n\n    /**\n     * Transpositions (`ab` → `ba`) are allowed by default but can be disabled\n     * by setting `transpositions` to false.\n     *\n     * @param {boolean} enable\n     * @returns {FuzzyQuery} returns `this` so that calls can be chained.\n     */\n    transpositions(enable) {\n        this._queryOpts.transpositions = enable;\n        return this;\n    }\n}\n\nmodule.exports = FuzzyQuery;\n"
  },
  {
    "path": "src/queries/term-level-queries/ids-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Query,\n    util: { checkType }\n} = require('../../core');\n\n/**\n * Filters documents that only have the provided ids.\n * Note, this query uses the _uid field.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-ids-query.html)\n *\n * @example\n * const qry = esb.idsQuery('my_type', ['1', '4', '100']);\n *\n * @param {Array|string=} type The elasticsearch doc type\n * @param {Array=} ids List of ids to fiter on.\n *\n * @extends Query\n */\nclass IdsQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(type, ids) {\n        super('ids');\n\n        if (!_.isNil(type)) this._queryOpts.type = type;\n        if (!_.isNil(ids)) this.values(ids);\n    }\n\n    /**\n     * Sets the elasticsearch doc type to query on.\n     * The type is optional and can be omitted, and can also accept an array of values.\n     * If no type is specified, all types defined in the index mapping are tried.\n     *\n     * @param {Array<string>|string} type The elasticsearch doc type\n     * @returns {IdsQuery} returns `this` so that calls can be chained.\n     */\n    type(type) {\n        this._queryOpts.type = type;\n        return this;\n    }\n\n    /**\n     * Sets the list of ids to fiter on.\n     *\n     * @param {Array<string|number>} ids\n     * @returns {IdsQuery} returns `this` so that calls can be chained.\n     */\n    values(ids) {\n        checkType(ids, Array);\n\n        this._queryOpts.values = ids;\n        return this;\n    }\n\n    /**\n     * Sets the list of ids to fiter on.\n     * Alias for `values` method.\n     *\n     * @param {Array<string|number>} ids\n     * @returns {IdsQuery} returns `this` so that calls can be chained.\n     */\n    ids(ids) {\n        return this.values(ids);\n    }\n}\n\nmodule.exports = IdsQuery;\n"
  },
  {
    "path": "src/queries/term-level-queries/index.js",
    "content": "'use strict';\n\nexports.MultiTermQueryBase = require('./multi-term-query-base');\n\nexports.TermQuery = require('./term-query');\nexports.TermsQuery = require('./terms-query');\nexports.TermsSetQuery = require('./terms-set-query');\nexports.RangeQuery = require('./range-query');\nexports.ExistsQuery = require('./exists-query');\nexports.PrefixQuery = require('./prefix-query');\nexports.WildcardQuery = require('./wildcard-query');\nexports.RegexpQuery = require('./regexp-query');\nexports.FuzzyQuery = require('./fuzzy-query');\nexports.TypeQuery = require('./type-query');\nexports.IdsQuery = require('./ids-query');\n"
  },
  {
    "path": "src/queries/term-level-queries/multi-term-query-base.js",
    "content": "'use strict';\n\nconst ValueTermQueryBase = require('./value-term-query-base');\n\n/**\n * Interface-like class used to group and identify various implementations of\n * multi term queries:\n *\n * - Wildcard Query\n * - Fuzzy Query\n * - Prefix Query\n * - Range Query\n * - Regexp Query\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @extends ValueTermQueryBase\n */\nclass MultiTermQueryBase extends ValueTermQueryBase {}\n\nmodule.exports = MultiTermQueryBase;\n"
  },
  {
    "path": "src/queries/term-level-queries/prefix-query.js",
    "content": "'use strict';\n\nconst MultiTermQueryBase = require('./multi-term-query-base');\nconst { validateRewiteMethod } = require('../helper');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-prefix-query.html';\n\n/**\n * Matches documents that have fields containing terms with a specified prefix (**not analyzed**).\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-prefix-query.html)\n *\n * @example\n * const qry = esb.prefixQuery('user', 'ki').boost(2.0);\n *\n * @param {string=} field\n * @param {string|number=} value\n *\n * @extends MultiTermQueryBase\n */\nclass PrefixQuery extends MultiTermQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, value) {\n        super('prefix', field, value);\n    }\n\n    /**\n     * Sets the rewrite method. Valid values are:\n     * - `constant_score` - tries to pick the best constant-score rewrite\n     *  method based on term and document counts from the query.\n     *  Synonyms - `constant_score_auto`, `constant_score_filter`\n     *\n     * - `scoring_boolean` - translates each term into boolean should and\n     *  keeps the scores as computed by the query\n     *\n     * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n     *  are computed.\n     *\n     * - `constant_score_filter` - first creates a private Filter, by visiting\n     *  each term in sequence and marking all docs for that term\n     *\n     * - `top_terms_boost_N` - first translates each term into boolean should\n     *  and scores are only computed as the boost using the top N\n     *  scoring terms. Replace N with an integer value.\n     *\n     * - `top_terms_N` - first translates each term into boolean should\n     *  and keeps the scores as computed by the query. Only the top N\n     *  scoring terms are used. Replace N with an integer value.\n     *\n     * Default is `constant_score`.\n     *\n     * This is an advanced option, use with care.\n     *\n     * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n     * `constant_score_filter` (synonyms for `constant_score`) have been removed\n     * in elasticsearch 6.0.\n     *\n     * @param {string} method The rewrite method as a string.\n     * @returns {PrefixQuery} returns `this` so that calls can be chained.\n     * @throws {Error} If the given `rewrite` method is not valid.\n     */\n    rewrite(method) {\n        validateRewiteMethod(method, 'rewrite', ES_REF_URL);\n\n        this._queryOpts.rewrite = method;\n        return this;\n    }\n}\n\nmodule.exports = PrefixQuery;\n"
  },
  {
    "path": "src/queries/term-level-queries/range-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { invalidParam },\n    consts: { GEO_RELATION_SET }\n} = require('../../core');\n\nconst MultiTermQueryBase = require('./multi-term-query-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-range-query.html';\n\nconst invalidRelationParam = invalidParam(\n    ES_REF_URL,\n    'relation',\n    GEO_RELATION_SET\n);\n\n/**\n * Matches documents with fields that have terms within a certain range.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-range-query.html)\n *\n * @param {string=} field\n *\n * @example\n * const qry = esb.rangeQuery('age')\n *     .gte(10)\n *     .lte(20)\n *     .boost(2.0);\n *\n * @example\n * const qry = esb.rangeQuery('date').gte('now-1d/d').lt('now/d');\n *\n * @extends MultiTermQueryBase\n */\nclass RangeQuery extends MultiTermQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field) {\n        super('range', field);\n    }\n\n    /**\n     * @override\n     * @throws {Error} This method cannot be called on RangeQuery\n     */\n    value() {\n        console.log(`Please refer ${ES_REF_URL}`);\n        throw new Error('value is not supported in RangeQuery');\n    }\n\n    /**\n     * Greater-than or equal to\n     *\n     * @param {string|number} val\n     * @returns {RangeQuery} returns `this` so that calls can be chained.\n     */\n    gte(val) {\n        this._queryOpts.gte = val;\n        return this;\n    }\n\n    /**\n     * Less-than or equal to\n     *\n     * @param {string|number} val\n     * @returns {RangeQuery} returns `this` so that calls can be chained.\n     */\n    lte(val) {\n        this._queryOpts.lte = val;\n        return this;\n    }\n\n    /**\n     * Greater-than\n     *\n     * @param {string|number} val\n     * @returns {RangeQuery} returns `this` so that calls can be chained.\n     */\n    gt(val) {\n        this._queryOpts.gt = val;\n        return this;\n    }\n\n    /**\n     * Less-than\n     *\n     * @param {string|number} val\n     * @returns {RangeQuery} returns `this` so that calls can be chained.\n     */\n    lt(val) {\n        this._queryOpts.lt = val;\n        return this;\n    }\n\n    /**\n     * The lower bound. Defaults to start from the first.\n     *\n     * @param {string|number} val The lower bound value, type depends on field type\n     * @returns {RangeQuery} returns `this` so that calls can be chained.\n     */\n    from(val) {\n        this._queryOpts.from = val;\n        return this;\n    }\n\n    /**\n     * The upper bound. Defaults to unbounded.\n     *\n     * @param {string|number} val The upper bound value, type depends on field type\n     * @returns {RangeQuery} returns `this` so that calls can be chained.\n     */\n    to(val) {\n        this._queryOpts.to = val;\n        return this;\n    }\n\n    /**\n     * Should the first from (if set) be inclusive or not. Defaults to `true`\n     *\n     * @param {boolean} enable `true` to include, `false` to exclude\n     * @returns {RangeQuery} returns `this` so that calls can be chained.\n     */\n    includeLower(enable) {\n        this._queryOpts.include_lower = enable;\n        return this;\n    }\n\n    /**\n     * Should the last to (if set) be inclusive or not. Defaults to `true`.\n     *\n     * @param {boolean} enable `true` to include, `false` to exclude\n     * @returns {RangeQuery} returns `this` so that calls can be chained.\n     */\n    includeUpper(enable) {\n        this._queryOpts.include_upper = enable;\n        return this;\n    }\n\n    /**\n     * Time Zone to be applied to any range query related to dates.\n     *\n     * @param {string} zone\n     * @returns {RangeQuery} returns `this` so that calls can be chained.\n     */\n    timeZone(zone) {\n        this._queryOpts.time_zone = zone;\n        return this;\n    }\n\n    /**\n     * Sets the format expression for parsing the upper and lower bounds.\n     * If no format is specified, then it will use the first format specified in the field mapping.\n     *\n     * @example\n     * const qry = esb.rangeQuery('born')\n     *     .gte('01/01/2012')\n     *     .lte('2013')\n     *     .format('dd/MM/yyyy||yyyy');\n     *\n     * @param {string} fmt Format for parsing upper and lower bounds.\n     * @returns {RangeQuery} returns `this` so that calls can be chained\n     */\n    format(fmt) {\n        this._queryOpts.format = fmt;\n        return this;\n    }\n\n    /**\n     * Sets the relationship between Query and indexed data\n     * that will be used to determine if a Document should be matched or not.\n     *\n     * @param {string} relation Can be one of `WITHIN`, `CONTAINS`, `DISJOINT`\n     * or `INTERSECTS`(default)\n     * @returns {RangeQuery} returns `this` so that calls can be chained\n     */\n    relation(relation) {\n        if (_.isNil(relation)) invalidRelationParam(relation);\n\n        const relationUpper = relation.toUpperCase();\n        if (!GEO_RELATION_SET.has(relationUpper)) {\n            invalidRelationParam(relation);\n        }\n\n        this._queryOpts.relation = relationUpper;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation of the `range` query\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        // recursiveToJSON doesn't seem to be required here.\n        return {\n            [this.queryType]: {\n                [this._field]: this._queryOpts\n            }\n        };\n    }\n}\n\nmodule.exports = RangeQuery;\n"
  },
  {
    "path": "src/queries/term-level-queries/regexp-query.js",
    "content": "'use strict';\n\nconst MultiTermQueryBase = require('./multi-term-query-base');\nconst { validateRewiteMethod } = require('../helper');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-regexp-query.html';\n\n/**\n * Query for regular expression term queries. Elasticsearch will apply the regexp\n * to the terms produced by the tokenizer for that field, and not to the original\n * text of the field.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-regexp-query.html)\n *\n * @example\n * const qry = esb.regexpQuery('name.first', 's.*y').boost(1.2);\n *\n * @param {string=} field\n * @param {string|number=} value\n *\n * @extends MultiTermQueryBase\n */\nclass RegexpQuery extends MultiTermQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, value) {\n        super('regexp', field, value);\n    }\n\n    /**\n     * Set special flags. Possible flags are `ALL` (default),\n     * `ANYSTRING`, `COMPLEMENT`, `EMPTY`, `INTERSECTION`, `INTERVAL`, or `NONE`.\n     *\n     * @example\n     * const qry = esb.regexpQuery('name.first', 's.*y')\n     *     .flags('INTERSECTION|COMPLEMENT|EMPTY');\n     *\n     * @param {string} flags `|` separated flags. Possible flags are `ALL` (default),\n     * `ANYSTRING`, `COMPLEMENT`, `EMPTY`, `INTERSECTION`, `INTERVAL`, or `NONE`.\n     * @returns {RegexpQuery} returns `this` so that calls can be chained.\n     */\n    flags(flags) {\n        this._queryOpts.flags = flags;\n        return this;\n    }\n\n    /**\n     * Allow case insensitive matching or not (added in 7.10.0).\n     * Defaults to false.\n     *\n     * @example\n     * const qry = esb.regexpQuery('name.first', 's.*y')\n     *     .caseInsensitive(true);\n     *\n     * @param {boolean} caseInsensitive\n     * @returns {RegexpQuery} returns `this` so that calls can be chained.\n     */\n    caseInsensitive(caseInsensitive) {\n        this._queryOpts.case_insensitive = caseInsensitive;\n        return this;\n    }\n\n    /**\n     * Limit on how many automaton states regexp queries are allowed to create.\n     * This protects against too-difficult (e.g. exponentially hard) regexps.\n     * Defaults to 10000.\n     *\n     * @example\n     * const qry = esb.regexpQuery('name.first', 's.*y')\n     *     .flags('INTERSECTION|COMPLEMENT|EMPTY')\n     *     .maxDeterminizedStates(20000);\n     *\n     * @param {number} limit\n     * @returns {RegexpQuery} returns `this` so that calls can be chained.\n     */\n    maxDeterminizedStates(limit) {\n        this._queryOpts.max_determinized_states = limit;\n        return this;\n    }\n\n    /**\n     * Sets the rewrite method. Valid values are:\n     * - `constant_score` - tries to pick the best constant-score rewrite\n     *  method based on term and document counts from the query.\n     *  Synonyms - `constant_score_auto`, `constant_score_filter`\n     *\n     * - `scoring_boolean` - translates each term into boolean should and\n     *  keeps the scores as computed by the query\n     *\n     * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n     *  are computed.\n     *\n     * - `constant_score_filter` - first creates a private Filter, by visiting\n     *  each term in sequence and marking all docs for that term\n     *\n     * - `top_terms_boost_N` - first translates each term into boolean should\n     *  and scores are only computed as the boost using the top N\n     *  scoring terms. Replace N with an integer value.\n     *\n     * - `top_terms_N` - first translates each term into boolean should\n     *  and keeps the scores as computed by the query. Only the top N\n     *  scoring terms are used. Replace N with an integer value.\n     *\n     * Default is `constant_score`.\n     *\n     * This is an advanced option, use with care.\n     *\n     * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n     * `constant_score_filter` (synonyms for `constant_score`) have been removed\n     * in elasticsearch 6.0.\n     *\n     * @param {string} method The rewrite method as a string.\n     * @returns {RegexpQuery} returns `this` so that calls can be chained.\n     * @throws {Error} If the given `rewrite` method is not valid.\n     */\n    rewrite(method) {\n        validateRewiteMethod(method, 'rewrite', ES_REF_URL);\n\n        this._queryOpts.rewrite = method;\n        return this;\n    }\n}\n\nmodule.exports = RegexpQuery;\n"
  },
  {
    "path": "src/queries/term-level-queries/term-query.js",
    "content": "'use strict';\n\nconst ValueTermQueryBase = require('./value-term-query-base');\n\n/**\n * The `term` query finds documents that contain the *exact* term specified\n * in the inverted index.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-term-query.html)\n *\n * @example\n * const termQry = esb.termQuery('user', 'Kimchy');\n *\n * @param {string=} field\n * @param {string|number|boolean=} queryVal\n *\n * @extends ValueTermQueryBase\n */\nclass TermQuery extends ValueTermQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, queryVal) {\n        super('term', field, queryVal);\n    }\n}\n\nmodule.exports = TermQuery;\n"
  },
  {
    "path": "src/queries/term-level-queries/terms-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    util: { checkType }\n} = require('../../core');\n\nconst { Query } = require('../../core');\n\n/**\n * Filters documents that have fields that match any of the provided terms (**not analyzed**).\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-terms-query.html)\n *\n * @example\n * const qry = esb.constantScoreQuery(\n *     esb.termsQuery('user', ['kimchy', 'elasticsearch'])\n * );\n *\n * @example\n * const qry = esb.termsQuery('user')\n *     .index('users')\n *     .type('user')\n *     .id(2)\n *     .path('followers');\n *\n * @param {string=} field\n * @param {Array|string|number|boolean=} values\n *\n * @extends Query\n */\nclass TermsQuery extends Query {\n    // TODO: The DSL is a mess. Think about cleaning up some.\n\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, values) {\n        super('terms');\n\n        // Default assume user is not insane\n        this._isTermsLookup = false;\n        this._termsLookupOpts = {};\n        this._values = [];\n\n        if (!_.isNil(field)) this._field = field;\n        if (!_.isNil(values)) {\n            if (Array.isArray(values)) this.values(values);\n            else this.value(values);\n        }\n    }\n\n    /**\n     * Private helper function to set a terms lookup option.\n     *\n     * @private\n     * @param {string} key\n     * @param {string|number|boolean} val\n     */\n    _setTermsLookupOpt(key, val) {\n        this._isTermsLookup = true;\n        this._termsLookupOpts[key] = val;\n    }\n\n    /**\n     * Sets the field to search on.\n     *\n     * @param {string} field\n     * @returns {TermsQuery} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._field = field;\n        return this;\n    }\n\n    /**\n     * Append given value to list of values to run Terms Query with.\n     *\n     * @param {string|number|boolean} value\n     * @returns {TermsQuery} returns `this` so that calls can be chained\n     */\n    value(value) {\n        this._values.push(value);\n        return this;\n    }\n\n    /**\n     * Specifies the values to run query for.\n     *\n     * @param {Array<string|number|boolean>} values Values to run query for.\n     * @returns {TermsQuery} returns `this` so that calls can be chained\n     * @throws {TypeError} If `values` is not an instance of Array\n     */\n    values(values) {\n        checkType(values, Array);\n\n        this._values = this._values.concat(values);\n        return this;\n    }\n\n    /**\n     * Convenience method for setting term lookup options.\n     * Valid options are `index`, `type`, `id`, `path`and `routing`\n     *\n     * @param {Object} lookupOpts An object with any of the keys `index`,\n     * `type`, `id`, `path` and `routing`.\n     * @returns {TermsQuery} returns `this` so that calls can be chained\n     */\n    termsLookup(lookupOpts) {\n        checkType(lookupOpts, Object);\n\n        this._isTermsLookup = true;\n        Object.assign(this._termsLookupOpts, lookupOpts);\n        return this;\n    }\n\n    /**\n     * The index to fetch the term values from. Defaults to the current index.\n     *\n     * Note: The `index` parameter in the terms filter, used to look up terms in\n     * a dedicated index is mandatory in elasticsearch 6.0. Previously, the\n     * index defaulted to the index the query was executed on. In 6.0, this\n     * index must be explicitly set in the request.\n     *\n     * @param {string} idx The index to fetch the term values from.\n     * Defaults to the current index.\n     * @returns {TermsQuery} returns `this` so that calls can be chained\n     */\n    index(idx) {\n        this._setTermsLookupOpt('index', idx);\n        return this;\n    }\n\n    /**\n     * The type to fetch the term values from.\n     *\n     * @param {string} type\n     * @returns {TermsQuery} returns `this` so that calls can be chained\n     */\n    type(type) {\n        this._setTermsLookupOpt('type', type);\n        return this;\n    }\n\n    /**\n     * The id of the document to fetch the term values from.\n     *\n     * @param {string} id\n     * @returns {TermsQuery} returns `this` so that calls can be chained\n     */\n    id(id) {\n        this._setTermsLookupOpt('id', id);\n        return this;\n    }\n\n    /**\n     * The field specified as path to fetch the actual values for the `terms` filter.\n     *\n     * @param {string} path\n     * @returns {TermsQuery} returns `this` so that calls can be chained\n     */\n    path(path) {\n        this._setTermsLookupOpt('path', path);\n        return this;\n    }\n\n    /**\n     * A custom routing value to be used when retrieving the external terms doc.\n     *\n     * @param {string} routing\n     * @returns {TermsQuery} returns `this` so that calls can be chained\n     */\n    routing(routing) {\n        this._setTermsLookupOpt('routing', routing);\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation of the `terms` query\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        // recursiveToJSON doesn't seem to be required here.\n        return {\n            [this.queryType]: Object.assign({}, this._queryOpts, {\n                [this._field]: this._isTermsLookup\n                    ? this._termsLookupOpts\n                    : this._values\n            })\n        };\n    }\n}\n\nmodule.exports = TermsQuery;\n"
  },
  {
    "path": "src/queries/term-level-queries/terms-set-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst {\n    Query,\n    util: { checkType }\n} = require('../../core');\n\n/**\n * Returns any documents that match with at least one or more of the provided\n * terms. The terms are not analyzed and thus must match exactly. The number of\n * terms that must match varies per document and is either controlled by a\n * minimum should match field or computed per document in a minimum should match\n * script.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-terms-set-query.html)\n *\n * NOTE: This query was added in elasticsearch v6.1.\n *\n * @example\n * const qry = esb.termsSetQuery('codes', ['abc', 'def', 'ghi'])\n *     .minimumShouldMatchField('required_matches')\n *\n * @param {string=} field\n * @param {Array<string|number|boolean>|string|number=} terms\n *\n * @extends Query\n */\nclass TermsSetQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, terms) {\n        super('terms_set');\n\n        this._queryOpts.terms = [];\n\n        if (!_.isNil(field)) this._field = field;\n        if (!_.isNil(terms)) {\n            if (Array.isArray(terms)) this.terms(terms);\n            else this.term(terms);\n        }\n    }\n\n    /**\n     * Sets the field to search on.\n     *\n     * @param {string} field\n     * @returns {TermsSetQuery} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._field = field;\n        return this;\n    }\n\n    /**\n     * Append given term to set of terms to run Terms Set Query with.\n     *\n     * @param {string|number|boolean} term\n     * @returns {TermsSetQuery} returns `this` so that calls can be chained\n     */\n    term(term) {\n        this._queryOpts.terms.push(term);\n        return this;\n    }\n\n    /**\n     * Specifies the terms to run query for.\n     *\n     * @param {Array<string|number|boolean>} terms Terms set to run query for.\n     * @returns {TermsSetQuery} returns `this` so that calls can be chained\n     * @throws {TypeError} If `terms` is not an instance of Array\n     */\n    terms(terms) {\n        checkType(terms, Array);\n\n        this._queryOpts.terms = this._queryOpts.terms.concat(terms);\n        return this;\n    }\n\n    /**\n     * Controls the number of terms that must match per document.\n     *\n     * @param {string} fieldName\n     * @returns {TermsSetQuery} returns `this` so that calls can be chained\n     */\n    minimumShouldMatchField(fieldName) {\n        this._queryOpts.minimum_should_match_field = fieldName;\n        return this;\n    }\n\n    /**\n     * Sets the `script` for query. It controls how many terms are required to\n     * match in a more dynamic way.\n     *\n     * The `params.num_terms` parameter is available in the script to indicate\n     * the number of terms that have been specified.\n     *\n     * @example\n     * const qry = esb.termsSetQuery('codes', ['abc', 'def', 'ghi'])\n     *     .minimumShouldMatchScript({\n     *         source: \"Math.min(params.num_terms, doc['required_matches'].value)\"\n     *     })\n     *\n     * @param {Script|string|Object} script\n     * @returns {ScriptQuery} returns `this` so that calls can be chained.\n     */\n    minimumShouldMatchScript(script) {\n        this._queryOpts.minimum_should_match_script = script;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation of the term level query\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        return {\n            [this.queryType]: { [this._field]: this._queryOpts }\n        };\n    }\n}\n\nmodule.exports = TermsSetQuery;\n"
  },
  {
    "path": "src/queries/term-level-queries/type-query.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst { Query } = require('../../core');\n\n/**\n * Filters documents matching the provided document / mapping type.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-type-query.html)\n *\n * @example\n * const qry = esb.typeQuery('my_type');\n *\n * @param {string=} type The elasticsearch doc type\n *\n * @extends Query\n */\nclass TypeQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(type) {\n        super('type');\n\n        if (!_.isNil(type)) this._queryOpts.value = type;\n    }\n\n    /**\n     * Sets the elasticsearch doc type to query on.\n     *\n     * @param {string} type The elasticsearch doc type\n     * @returns {TypeQuery} returns `this` so that calls can be chained.\n     */\n    value(type) {\n        this._queryOpts.value = type;\n        return this;\n    }\n\n    /**\n     * Sets the elasticsearch doc type to query on.\n     * Alias for method `value`.\n     *\n     * @param {string} type The elasticsearch doc type\n     * @returns {TypeQuery} returns `this` so that calls can be chained.\n     */\n    type(type) {\n        return this.value(type);\n    }\n}\n\nmodule.exports = TypeQuery;\n"
  },
  {
    "path": "src/queries/term-level-queries/value-term-query-base.js",
    "content": "'use strict';\n\nconst _ = require('../../_');\n\nconst { Query } = require('../../core');\n\n/**\n * The `ValueTermQueryBase` provides support for common options used across\n * various term level query implementations.\n *\n * @param {string} queryType\n * @param {string=} field The document field to query against\n * @param {string=} value The query string\n *\n * @extends Query\n */\nclass ValueTermQueryBase extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(queryType, field, value) {\n        super(queryType);\n\n        if (!_.isNil(field)) this._field = field;\n        if (!_.isNil(value)) this._queryOpts.value = value;\n    }\n\n    /**\n     * Sets the field to search on.\n     *\n     * @param {string} field\n     * @returns {ValueTermQueryBase} returns `this` so that calls can be chained.\n     */\n    field(field) {\n        this._field = field;\n        return this;\n    }\n\n    /**\n     * Sets the query string.\n     *\n     * @param {string|number|boolean} queryVal\n     * @returns {ValueTermQueryBase} returns `this` so that calls can be chained.\n     */\n    value(queryVal) {\n        this._queryOpts.value = queryVal;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation of the term level query\n     * class instance.\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch query DSL\n     */\n    toJSON() {\n        // recursiveToJSON doesn't seem to be required here.\n\n        // Revisit this.. Smells a little bit\n        if (!_.has(this._queryOpts, 'value')) {\n            throw new Error('Value is required for term level query!');\n        }\n\n        const qryOpts =\n            Object.keys(this._queryOpts).length === 1\n                ? this._queryOpts.value\n                : this._queryOpts;\n        return {\n            [this.queryType]: {\n                [this._field]: qryOpts\n            }\n        };\n    }\n\n    /**\n     * Allows ASCII case insensitive matching of the value with the indexed\n     * field values when set to true.\n     *\n     * NOTE: Only available in Elasticsearch v7.10.0+\n     *\n     * @param {boolean} value\n     * @returns {ValueTermQueryBase} returns `this` so that calls can be chained.\n     */\n    caseInsensitive(value = true) {\n        this._queryOpts.case_insensitive = value;\n        return this;\n    }\n}\n\nmodule.exports = ValueTermQueryBase;\n"
  },
  {
    "path": "src/queries/term-level-queries/wildcard-query.js",
    "content": "'use strict';\n\nconst MultiTermQueryBase = require('./multi-term-query-base');\nconst { validateRewiteMethod } = require('../helper');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-wildcard-query.html';\n\n/**\n * Matches documents that have fields matching a wildcard expression (**not analyzed**).\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-wildcard-query.html)\n *\n * @example\n * const qry = esb.wildcardQuery('user', 'ki*y').boost(2.0);\n *\n * @param {string=} field\n * @param {string=} value\n *\n * @extends MultiTermQueryBase\n */\nclass WildcardQuery extends MultiTermQueryBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, value) {\n        super('wildcard', field, value);\n    }\n\n    /**\n     * Allow case insensitive matching or not (added in 7.10.0).\n     * Defaults to false.\n     *\n     * @example\n     * const qry = esb.wildcardQuery('user', 'ki*y')\n     *     .caseInsensitive(true);\n     *\n     * @param {boolean} caseInsensitive\n     * @returns {RegexpQuery} returns `this` so that calls can be chained.\n     */\n    caseInsensitive(caseInsensitive) {\n        this._queryOpts.case_insensitive = caseInsensitive;\n        return this;\n    }\n\n    /**\n     * Sets the rewrite method. Valid values are:\n     * - `constant_score` - tries to pick the best constant-score rewrite\n     *  method based on term and document counts from the query.\n     *  Synonyms - `constant_score_auto`, `constant_score_filter`\n     *\n     * - `scoring_boolean` - translates each term into boolean should and\n     *  keeps the scores as computed by the query\n     *\n     * - `constant_score_boolean` - same as `scoring_boolean`, expect no scores\n     *  are computed.\n     *\n     * - `constant_score_filter` - first creates a private Filter, by visiting\n     *  each term in sequence and marking all docs for that term\n     *\n     * - `top_terms_boost_N` - first translates each term into boolean should\n     *  and scores are only computed as the boost using the top N\n     *  scoring terms. Replace N with an integer value.\n     *\n     * - `top_terms_N` - first translates each term into boolean should\n     *  and keeps the scores as computed by the query. Only the top N\n     *  scoring terms are used. Replace N with an integer value.\n     *\n     * Default is `constant_score`.\n     *\n     * This is an advanced option, use with care.\n     *\n     * Note: The deprecated multi term rewrite parameters `constant_score_auto`,\n     * `constant_score_filter` (synonyms for `constant_score`) have been removed\n     * in elasticsearch 6.0.\n     *\n     * @param {string} method The rewrite method as a string.\n     * @returns {WildcardQuery} returns `this` so that calls can be chained.\n     * @throws {Error} If the given `rewrite` method is not valid.\n     */\n    rewrite(method) {\n        validateRewiteMethod(method, 'rewrite', ES_REF_URL);\n\n        this._queryOpts.rewrite = method;\n        return this;\n    }\n}\n\nmodule.exports = WildcardQuery;\n"
  },
  {
    "path": "src/queries/vector-queries/index.js",
    "content": "'use strict';\n\nexports.SparseVectorQuery = require('./sparse-vector-query');\nexports.SemanticQuery = require('./semantic-query');\n"
  },
  {
    "path": "src/queries/vector-queries/semantic-query.js",
    "content": "'use strict';\n\nconst { Query } = require('../../core');\nconst _ = require('../../_');\n\n/**\n * The semantic query enables you to perform semantic search on data stored in a semantic_text field.\n * Semantic search uses dense vector representations to capture the meaning and context of search terms,\n * providing more relevant results compared to traditional keyword-based search methods.\n *\n * Requires Elasticsearch v9.0+ (Stack 9 / Serverless) where the `semantic` query is available.\n *\n * [Elasticsearch reference](https://www.elastic.co/docs/reference/query-languages/query-dsl/query-dsl-semantic-query)\n *\n * @example\n * const qry = esb.semanticQuery('title_semantic', 'mountain lake').boost(2);\n *\n * @extends Query\n */\nclass SemanticQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field, query) {\n        super('semantic');\n        if (!_.isNil(field)) this._queryOpts.field = field;\n        if (!_.isNil(query)) this._queryOpts.query = query;\n    }\n\n    /**\n     * Sets the semantic field to query.\n     * @param {string} field The `semantic_text` field name.\n     * @returns {SemanticQuery}\n     */\n    field(field) {\n        this._queryOpts.field = field;\n        return this;\n    }\n\n    /**\n     * Sets the semantic query text.\n     * @param {string} query The query text.\n     * @returns {SemanticQuery}\n     */\n    query(query) {\n        this._queryOpts.query = query;\n        return this;\n    }\n}\n\nmodule.exports = SemanticQuery;\n"
  },
  {
    "path": "src/queries/vector-queries/sparse-vector-query.js",
    "content": "'use strict';\n\nconst { Query } = require('../../core');\nconst _ = require('../../_');\n\n/**\n * The sparse vector query executes a query consisting of sparse vectors, such as built by a learned sparse retrieval model,\n * NOTE: Only available in Elasticsearch v8.15+\n *\n * [Elasticsearch reference](https://www.elastic.co/docs/reference/query-languages/query-dsl/query-dsl-sparse-vector-query)\n *\n * @example\n * const qry = esb.sparseVector().field('ml_tokens').inferenceId('model_id').query('my query');\n *\n * @extends Query\n */\nclass SparseVectorQuery extends Query {\n    // eslint-disable-next-line require-jsdoc\n\n    constructor(field) {\n        super('sparse_vector');\n        if (!_.isNil(field)) this._queryOpts.field = field;\n    }\n\n    /**\n     * Sets the field to query\n     *\n     * @param {string} field the field for the query\n     * @returns {SparseVectorQuery}\n     */\n    field(field) {\n        this._queryOpts.field = field;\n        return this;\n    }\n\n    /**\n     * Set model inference id\n     *\n     * @param {string} inferenceId The model inference ID\n     * @returns {SparseVectorQuery}\n     */\n    inferenceId(inferenceId) {\n        this._queryOpts.inference_id = inferenceId;\n        return this;\n    }\n\n    /**\n     * Sets the input query.\n     * You should set either query or query vector, but not both\n     *\n     * @param {string} query The input query\n     * @returns {SparseVectorQuery}\n     */\n    query(query) {\n        this._queryOpts.query = query;\n        return this;\n    }\n\n    /**\n     * Set a query vector to the query to run. if you don't use inference\n     * You should set either query or query vector, but not both\n     *\n     * @param {Object} queryVector\n     * @returns {SparseVectorQuery}\n     */\n    queryVector(queryVector) {\n        this._queryOpts.query_vector = queryVector;\n        return this;\n    }\n\n    /**\n     * Enable pruning\n     *\n     * NOTE: Only available in Elasticsearch v9.0+\n     *\n     * @param {boolean} prune\n     * @returns {SparseVectorQuery} returns `this` so that calls can be chained.\n     */\n    prune(prune) {\n        this._queryOpts.prune = prune;\n        return this;\n    }\n\n    /**\n     * Set pruning config tokens_freq_ratio_threshold\n     *\n     * NOTE: Only available in Elasticsearch v9.0+\n     *\n     * @param {number} tokensFreqRatioThreshold\n     * @returns {SparseVectorQuery} returns `this` so that calls can be chained.\n     */\n    tokensFreqRatioThreshold(tokensFreqRatioThreshold) {\n        if (!this._queryOpts.pruning_config) {\n            this._queryOpts.pruning_config = {};\n        }\n        this._queryOpts.pruning_config.tokens_freq_ratio_threshold =\n            tokensFreqRatioThreshold;\n        return this;\n    }\n\n    /**\n     * Set pruning config tokens_weight_threshold\n     *\n     * NOTE: Only available in Elasticsearch v9.0+\n     *\n     * @param {number} tokensWeightThreshold\n     * @returns {SparseVectorQuery} returns `this` so that calls can be chained.\n     */\n    tokensWeightThreshold(tokensWeightThreshold) {\n        if (!this._queryOpts.pruning_config) {\n            this._queryOpts.pruning_config = {};\n        }\n        this._queryOpts.pruning_config.tokens_weight_threshold =\n            tokensWeightThreshold;\n        return this;\n    }\n\n    /**\n     * Set pruning config only_score_pruned_tokens\n     *\n     * NOTE: Only available in Elasticsearch v9.0+\n     *\n     * @param {boolean} onlyScorePrunedTokens\n     * @returns {SparseVectorQuery} returns `this` so that calls can be chained.\n     */\n    onlyScorePrunedTokens(onlyScorePrunedTokens) {\n        if (!this._queryOpts.pruning_config) {\n            this._queryOpts.pruning_config = {};\n        }\n        this._queryOpts.pruning_config.only_score_pruned_tokens =\n            onlyScorePrunedTokens;\n        return this;\n    }\n}\n\nmodule.exports = SparseVectorQuery;\n"
  },
  {
    "path": "src/recipes.js",
    "content": "'use strict';\n\nconst _ = require('./_');\n\nconst {\n    MatchAllQuery,\n    termLevelQueries: { ExistsQuery },\n    compoundQueries: {\n        BoolQuery,\n        FunctionScoreQuery,\n        scoreFunctions: { RandomScoreFunction }\n    }\n} = require('./queries');\n\nconst {\n    Query,\n    util: { checkType }\n} = require('./core');\n\n/**\n * Recipe for the now removed `missing` query.\n *\n * Can be accessed using `esb.recipes.missingQuery` OR `esb.cookMissingQuery`.\n *\n * [Elasticsearch refererence](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-exists-query.html#_literal_missing_literal_query)\n *\n * @example\n * const qry = esb.cookMissingQuery('user');\n *\n * qry.toJSON();\n * {\n *   \"bool\": {\n *     \"must_not\": {\n *       \"exists\": {\n *         \"field\": \"user\"\n *       }\n *     }\n *   }\n * }\n *\n * @param {string} field The field which should be missing the value.\n * @returns {BoolQuery} A boolean query with a `must_not` `exists` clause is returned.\n */\nexports.missingQuery = function missingQuery(field) {\n    return new BoolQuery().mustNot(new ExistsQuery(field));\n};\n\n/**\n * Recipe for random sort query. Takes a query and returns the same\n * wrapped in a random scoring query.\n *\n * Can be accessed using `esb.recipes.randomSortQuery` OR `esb.cookRandomSortQuery`.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html#function-random)\n *\n * @example\n * const reqBody = esb.requestBodySearch()\n *     .query(esb.cookRandomSortQuery(esb.rangeQuery('age').gte(10)))\n *     .size(100);\n *\n * reqBody.toJSON();\n * {\n *   \"query\": {\n *     \"function_score\": {\n *       \"query\": {\n *         \"range\": { \"age\": { \"gte\": 10 } }\n *       },\n *       \"random_score\": {}\n *     }\n *   },\n *   \"size\": 100\n * }\n *\n * @param {Query=} query The query to fetch documents for. Defaults to `match_all` query.\n * @param {number=} seed A seed value for the random score function.\n * @returns {FunctionScoreQuery} A `function_score` query with random sort applied\n * @throws {TypeError} If `query` is not an instance of `Query`.\n */\nexports.randomSortQuery = function randomSortQuery(\n    query = new MatchAllQuery(),\n    seed\n) {\n    checkType(query, Query);\n    const func = new RandomScoreFunction();\n    return new FunctionScoreQuery()\n        .query(query)\n        .function(_.isNil(seed) ? func : func.seed(seed));\n};\n\n/**\n * Recipe for constructing a filter query using `bool` query.\n * Optionally, scoring can be enabled.\n *\n * Can be accessed using `esb.recipes.filterQuery` OR `esb.cookFilterQuery`.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-bool-query.html)\n *\n * @example\n * const boolQry = esb.cookFilterQuery(esb.termQuery('status', 'active'), true);\n * boolQry.toJSON();\n * {\n *   \"bool\": {\n *     \"must\": { \"match_all\": {} },\n *     \"filter\": {\n *       \"term\": { \"status\": \"active\" }\n *     }\n *   }\n * }\n *\n * @param {Query} query The query to fetch documents for.\n * @param {boolean=} scoring Optional flag for enabling/disabling scoring. Disabled by default.\n * If enabled, a score of `1.0` will be assigned to all documents.\n * @returns {BoolQuery} A `bool` query with a `filter` clause is returned.\n * @throws {TypeError} If `query` is not an instance of `Query`.\n */\nexports.filterQuery = function filterQuery(query, scoring = false) {\n    checkType(query, Query);\n\n    const boolQry = new BoolQuery().filter(query);\n    return scoring === true ? boolQry.must(new MatchAllQuery()) : boolQry;\n};\n"
  },
  {
    "path": "src/suggesters/analyzed-suggester-base.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst { Suggester } = require('../core');\n\n/**\n * The `AnalyzedSuggesterBase` provides support for common options used\n * in `TermSuggester` and `PhraseSuggester`.\n *\n * **NOTE:** Instantiating this directly should not be required.\n * However, if you wish to add a custom implementation for whatever reason,\n * this class could be extended.\n *\n * @param {string} suggesterType The type of suggester.\n * Can be one of `term`, `phrase`\n * @param {string} name The name of the Suggester, an arbitrary identifier\n * @param {string=} field The field to fetch the candidate suggestions from.\n * @param {string=} txt A string to get suggestions for.\n *\n * @throws {Error} if `name` is empty\n * @throws {Error} if `suggesterType` is empty\n *\n * @extends Suggester\n */\nclass AnalyzedSuggesterBase extends Suggester {\n    // eslint-disable-next-line require-jsdoc\n    constructor(suggesterType, name, field, txt) {\n        super(suggesterType, name, field);\n\n        if (!_.isNil(txt)) this._opts.text = txt;\n    }\n\n    /**\n     * Sets the text to get suggestions for. If not set, the global\n     * suggestion text will be used.\n     *\n     * @param {string} txt A string to get suggestions for.\n     * @returns {AnalyzedSuggesterBase} returns `this` so that calls can be chained.\n     */\n    text(txt) {\n        this._opts.text = txt;\n        return this;\n    }\n\n    /**\n     * Sets the analyzer to analyse the suggest text with. Defaults to\n     * the search analyzer of the suggest field.\n     *\n     * @param {string} analyzer The analyzer to analyse the suggest text with.\n     * @returns {AnalyzedSuggesterBase} returns `this` so that calls can be chained.\n     */\n    analyzer(analyzer) {\n        this._suggestOpts.analyzer = analyzer;\n        return this;\n    }\n\n    /**\n     * Sets the maximum number of suggestions to be retrieved from each individual shard.\n     * During the reduce phase only the top N suggestions are returned based on the `size`\n     * option. Defaults to the `size` option. Setting this to a value higher than the `size`\n     * can be useful in order to get a more accurate document frequency for spelling\n     * corrections at the cost of performance. Due to the fact that terms are partitioned\n     * amongst shards, the shard level document frequencies of spelling corrections\n     * may not be precise. Increasing this will make these document frequencies\n     * more precise.\n     *\n     * @param {number} size\n     * @returns {AnalyzedSuggesterBase} returns `this` so that calls can be chained.\n     */\n    shardSize(size) {\n        this._suggestOpts.shard_size = size;\n        return this;\n    }\n}\n\nmodule.exports = AnalyzedSuggesterBase;\n"
  },
  {
    "path": "src/suggesters/completion-suggester.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst {\n    Suggester,\n    util: { setDefault }\n} = require('../core');\n\n/**\n * The completion suggester provides auto-complete/search-as-you-type\n * functionality. This is a navigational feature to guide users to relevant\n * results as they are typing, improving search precision. It is not meant\n * for spell correction or did-you-mean functionality like the term or\n * phrase suggesters.\n *\n * Ideally, auto-complete functionality should be as fast as a user types to\n * provide instant feedback relevant to what a user has already typed in.\n * Hence, completion suggester is optimized for speed. The suggester uses\n * data structures that enable fast lookups, but are costly to build\n * and are stored in-memory.\n *\n * Elasticsearch reference\n *   - [Completion Suggester](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters-completion.html)\n *   - [Context Suggester](https://www.elastic.co/guide/en/elasticsearch/reference/current/suggester-context.html)\n *\n * @example\n * const suggest = esb.completionSuggester('song-suggest', 'suggest').prefix('nir');\n *\n * @example\n * const suggest = new esb.CompletionSuggester('place_suggestion', 'suggest')\n *     .prefix('tim')\n *     .size(10)\n *     .contexts('place_type', ['cafe', 'restaurants']);\n *\n * @param {string} name The name of the Suggester, an arbitrary identifier\n * @param {string=} field The field to fetch the candidate suggestions from.\n *\n * @throws {Error} if `name` is empty\n *\n * @extends Suggester\n */\nclass CompletionSuggester extends Suggester {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field) {\n        super('completion', name, field);\n    }\n\n    /**\n     * Sets the `prefix` for the `CompletionSuggester` query.\n     *\n     * @param {string} prefix\n     * @returns {CompletionSuggester} returns `this` so that calls can be chained.\n     */\n    prefix(prefix) {\n        this._opts.prefix = prefix;\n        return this;\n    }\n\n    /**\n     * Sets whether duplicate suggestions should be filtered out (defaults to false).\n     *\n     * NOTE: This option was added in elasticsearch v6.1.\n     *\n     * @param {boolean} skip Enable/disable skipping duplicates\n     * @returns {CompletionSuggester} returns `this` so that calls can be chained.\n     */\n    skipDuplicates(skip = true) {\n        this._suggestOpts.skip_duplicates = skip;\n        return this;\n    }\n\n    /**\n     * Check that the object property `this._suggestOpts.fuzzy` is an object.\n     * Set empty object if required.\n     *\n     * @private\n     */\n    _checkFuzzy() {\n        if (!_.isObject(this._suggestOpts.fuzzy)) {\n            this._suggestOpts.fuzzy = {};\n        }\n    }\n\n    /**\n     * Sets the `fuzzy` parameter. Can be customised with specific fuzzy parameters.\n     *\n     * @param {boolean|Object=} fuzzy Enable/disable `fuzzy` using boolean or\n     * object(with params)\n     * @returns {CompletionSuggester} returns `this` so that calls can be chained.\n     */\n    fuzzy(fuzzy = true) {\n        this._suggestOpts.fuzzy = fuzzy;\n        return this;\n    }\n\n    /**\n     * Sets the `fuzziness` parameter which is interpreted as a Levenshtein Edit Distance —\n     * the number of one character changes that need to be made to one string to make it\n     * the same as another string.\n     *\n     * @example\n     * const suggest = esb.completionSuggester('song-suggest', 'suggest')\n     *     .prefix('nor')\n     *     .fuzziness(2);\n     *\n     * @param {number|string} factor Can be specified either as a number, or the maximum\n     * number of edits, or as `AUTO` which generates an edit distance based on the length\n     * of the term.\n     * @returns {CompletionSuggester} returns `this` so that calls can be chained.\n     */\n    fuzziness(factor) {\n        this._checkFuzzy();\n\n        this._suggestOpts.fuzzy.fuzziness = factor;\n        return this;\n    }\n\n    /**\n     * Transpositions (`ab` → `ba`) are allowed by default but can be disabled\n     * by setting `transpositions` to false.\n     *\n     * @param {boolean} enable\n     * @returns {CompletionSuggester} returns `this` so that calls can be chained.\n     */\n    transpositions(enable) {\n        this._checkFuzzy();\n\n        this._suggestOpts.fuzzy.transpositions = enable;\n        return this;\n    }\n\n    /**\n     * Sets the minimum length of the input before fuzzy suggestions are returned,\n     * defaults 3\n     *\n     * @param {number} len Minimum length of the input before fuzzy suggestions\n     * are returned, defaults 3\n     * @returns {CompletionSuggester} returns `this` so that calls can be chained.\n     */\n    minLength(len) {\n        this._checkFuzzy();\n\n        this._suggestOpts.fuzzy.min_length = len;\n        return this;\n    }\n\n    /**\n     * The number of initial characters which will not be \"fuzzified\".\n     * This helps to reduce the number of terms which must be examined. Defaults to `1`.\n     *\n     * @param {number} len Characters to skip fuzzy for. Defaults to `1`.\n     * @returns {CompletionSuggester} returns `this` so that calls can be chained.\n     */\n    prefixLength(len) {\n        this._checkFuzzy();\n\n        this._suggestOpts.fuzzy.prefix_length = len;\n        return this;\n    }\n\n    /**\n     * If `true`, all measurements (like fuzzy edit distance, transpositions,\n     * and lengths) are measured in Unicode code points instead of in bytes.\n     * This is slightly slower than raw bytes, so it is set to `false` by default.\n     *\n     * @param {boolean} enable Measure in Unicode code points instead of in bytes.\n     * `false` by default.\n     * @returns {CompletionSuggester} returns `this` so that calls can be chained.\n     */\n    unicodeAware(enable) {\n        this._checkFuzzy();\n\n        this._suggestOpts.fuzzy.unicode_aware = enable;\n        return this;\n    }\n\n    /**\n     * Sets the regular expression for completion suggester which supports regex queries.\n     *\n     * @example\n     * const suggest = esb.completionSuggester('song-suggest', 'suggest')\n     *     .regex('n[ever|i]r');\n     *\n     * @param {string} expr Regular expression\n     * @returns {CompletionSuggester} returns `this` so that calls can be chained.\n     */\n    regex(expr) {\n        this._opts.regex = expr;\n        return this;\n    }\n\n    /**\n     * Set special flags. Possible flags are `ALL` (default),\n     * `ANYSTRING`, `COMPLEMENT`, `EMPTY`, `INTERSECTION`, `INTERVAL`, or `NONE`.\n     *\n     * @param {string} flags `|` separated flags. Possible flags are `ALL` (default),\n     * `ANYSTRING`, `COMPLEMENT`, `EMPTY`, `INTERSECTION`, `INTERVAL`, or `NONE`.\n     * @returns {CompletionSuggester} returns `this` so that calls can be chained.\n     */\n    flags(flags) {\n        setDefault(this._suggestOpts, 'regex', {});\n\n        this._suggestOpts.regex.flags = flags;\n        return this;\n    }\n\n    /**\n     * Limit on how many automaton states regexp queries are allowed to create.\n     * This protects against too-difficult (e.g. exponentially hard) regexps.\n     * Defaults to 10000. You can raise this limit to allow more complex regular\n     * expressions to execute.\n     *\n     * @param {number} limit\n     * @returns {CompletionSuggester} returns `this` so that calls can be chained.\n     */\n    maxDeterminizedStates(limit) {\n        setDefault(this._suggestOpts, 'regex', {});\n\n        this._suggestOpts.regex.max_determinized_states = limit;\n        return this;\n    }\n\n    /**\n     * The completion suggester considers all documents in the index, but it is often\n     * desirable to serve suggestions filtered and/or boosted by some criteria.\n     *\n     * To achieve suggestion filtering and/or boosting, you can add context mappings\n     * while configuring a completion field. You can define multiple context mappings\n     * for a completion field. Every context mapping has a unique name and a type.\n     *\n     * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/suggester-context.html)\n     *\n     * @example\n     * const suggest = new esb.CompletionSuggester('place_suggestion', 'suggest')\n     *     .prefix('tim')\n     *     .size(10)\n     *     .contexts('place_type', [\n     *         { context: 'cafe' },\n     *         { context: 'restaurants', boost: 2 }\n     *     ]);\n     *\n     * @example\n     * // Suggestions can be filtered and boosted with respect to how close they\n     * // are to one or more geo points. The following filters suggestions that\n     * // fall within the area represented by the encoded geohash of a geo point:\n     * const suggest = new esb.CompletionSuggester('place_suggestion', 'suggest')\n     *     .prefix('tim')\n     *     .size(10)\n     *     .contexts('location', { lat: 43.662, lon: -79.38 });\n     *\n     * @example\n     * // Suggestions that are within an area represented by a geohash can also be\n     * // boosted higher than others\n     * const suggest = new esb.CompletionSuggester('place_suggestion', 'suggest')\n     *     .prefix('tim')\n     *     .size(10)\n     *     .contexts('location', [\n     *         {\n     *             lat: 43.6624803,\n     *             lon: -79.3863353,\n     *             precision: 2\n     *         },\n     *         {\n     *             context: {\n     *                 lat: 43.6624803,\n     *                 lon: -79.3863353\n     *             },\n     *             boost: 2\n     *         }\n     *     ]);\n     *\n     * @param {string} name\n     * @param {Array|Object} ctx\n     * @returns {CompletionSuggester} returns `this` so that calls can be chained.\n     */\n    contexts(name, ctx) {\n        // This whole feature is bizzare!\n        // Not very happy with the implementation.\n        setDefault(this._suggestOpts, 'contexts', {});\n\n        this._suggestOpts.contexts[name] = ctx;\n        return this;\n    }\n}\n\nmodule.exports = CompletionSuggester;\n"
  },
  {
    "path": "src/suggesters/direct-generator.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst {\n    consts: { SUGGEST_MODE_SET },\n    util: { invalidParam }\n} = require('../core');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters-phrase.html#_direct_generators';\n\nconst invalidSuggestModeParam = invalidParam(\n    ES_REF_URL,\n    'suggest_mode',\n    SUGGEST_MODE_SET\n);\n\n/**\n * The `phrase` suggester uses candidate generators to produce a list of possible\n * terms per term in the given text. A single candidate generator is similar\n * to a `term` suggester called for each individual term in the text. The output\n * of the generators is subsequently scored in combination with the candidates\n * from the other terms to for suggestion candidates.\n *\n * The Phrase suggest API accepts a list of generators under the key `direct_generator`\n * each of the generators in the list are called per term in the original text.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters-phrase.html#_direct_generators)\n *\n * @param {string=} field The field to fetch the candidate suggestions from.\n */\nclass DirectGenerator {\n    // eslint-disable-next-line require-jsdoc\n    constructor(field) {\n        this._body = {};\n\n        if (!_.isNil(field)) this._body.field = field;\n    }\n\n    /**\n     * Sets field to fetch the candidate suggestions from. This is a required option\n     * that either needs to be set globally or per suggestion.\n     *\n     * @param {string} field a valid field name\n     * @returns {DirectGenerator} returns `this` so that calls can be chained\n     */\n    field(field) {\n        this._body.field = field;\n        return this;\n    }\n\n    /**\n     * Sets the number of suggestions to return (defaults to `5`).\n     *\n     * @param {number} size\n     * @returns {DirectGenerator} returns `this` so that calls can be chained.\n     */\n    size(size) {\n        this._body.size = size;\n        return this;\n    }\n\n    /**\n     * Sets the suggest mode which controls what suggestions are included\n     * or controls for what suggest text terms, suggestions should be suggested.\n     *  All values other than `always` can be thought of as an optimization to\n     * generate fewer suggestions to test on each shard and are not rechecked\n     * when combining the suggestions generated on each shard. Thus `missing`\n     * will generate suggestions for terms on shards that do not contain them\n     * even other shards do contain them. Those should be filtered out\n     * using `confidence`.\n     *\n     * Three possible values can be specified:\n     *   - `missing`: Only provide suggestions for suggest text terms that\n     *     are not in the index. This is the default.\n     *   - `popular`:  Only suggest suggestions that occur in more docs\n     *     than the original suggest text term.\n     *   - `always`: Suggest any matching suggestions based on terms in the suggest text.\n     *\n     * @param {string} mode Can be `missing`, `popular` or `always`\n     * @returns {DirectGenerator} returns `this` so that calls can be chained.\n     * @throws {Error} If `mode` is not one of `missing`, `popular` or `always`.\n     */\n    suggestMode(mode) {\n        if (_.isNil(mode)) invalidSuggestModeParam(mode);\n\n        const modeLower = mode.toLowerCase();\n        if (!SUGGEST_MODE_SET.has(modeLower)) {\n            invalidSuggestModeParam(mode);\n        }\n\n        this._body.suggest_mode = modeLower;\n        return this;\n    }\n\n    /**\n     * Sets the maximum edit distance candidate suggestions can have\n     * in order to be considered as a suggestion. Can only be a value\n     * between 1 and 2. Any other value result in an bad request\n     * error being thrown. Defaults to 2.\n     *\n     * @param {number} maxEdits Value between 1 and 2. Defaults to 2.\n     * @returns {DirectGenerator} returns `this` so that calls can be chained.\n     */\n    maxEdits(maxEdits) {\n        this._body.max_edits = maxEdits;\n        return this;\n    }\n\n    /**\n     * Sets the number of minimal prefix characters that must match in order\n     * to be a candidate suggestions. Defaults to 1.\n     *\n     * Increasing this number improves spellcheck performance.\n     * Usually misspellings don't occur in the beginning of terms.\n     *\n     * @param {number} len The number of minimal prefix characters that must match in order\n     * to be a candidate suggestions. Defaults to 1.\n     * @returns {DirectGenerator} returns `this` so that calls can be chained.\n     */\n    prefixLength(len) {\n        this._body.prefix_length = len;\n        return this;\n    }\n\n    /**\n     * Sets the minimum length a suggest text term must have in order to be included.\n     * Defaults to 4.\n     *\n     * @param {number} len The minimum length a suggest text term must have in order\n     * to be included. Defaults to 4.\n     * @returns {DirectGenerator} returns `this` so that calls can be chained.\n     */\n    minWordLength(len) {\n        this._body.min_word_length = len;\n        return this;\n    }\n\n    /**\n     * Sets factor that is used to multiply with the `shards_size` in order to inspect\n     * more candidate spell corrections on the shard level.\n     * Can improve accuracy at the cost of performance. Defaults to 5.\n     *\n     * @param {number} maxInspections Factor used to multiple with `shards_size` in\n     * order to inspect more candidate spell corrections on the shard level.\n     * Defaults to 5\n     * @returns {DirectGenerator} returns `this` so that calls can be chained.\n     */\n    maxInspections(maxInspections) {\n        this._body.max_inspections = maxInspections;\n        return this;\n    }\n\n    /**\n     * Sets the minimal threshold in number of documents a suggestion should appear in.\n     * This can be specified as an absolute number or as a relative percentage of\n     * number of documents. This can improve quality by only suggesting high\n     * frequency terms. Defaults to 0f and is not enabled. If a value higher than 1\n     * is specified then the number cannot be fractional. The shard level document\n     * frequencies are used for this option.\n     *\n     * @param {number} limit Threshold in number of documents a suggestion\n     * should appear in. Defaults to 0f and is not enabled.\n     * @returns {DirectGenerator} returns `this` so that calls can be chained.\n     */\n    minDocFreq(limit) {\n        this._body.min_doc_freq = limit;\n        return this;\n    }\n\n    /**\n     * Sets the maximum threshold in number of documents a suggest text token can\n     * exist in order to be included. Can be a relative percentage number (e.g 0.4)\n     * or an absolute number to represent document frequencies. If an value higher\n     * than 1 is specified then fractional can not be specified. Defaults to 0.01f.\n     * This can be used to exclude high frequency terms from being spellchecked.\n     * High frequency terms are usually spelled correctly on top of this also\n     * improves the spellcheck performance. The shard level document frequencies are\n     * used for this option.\n     *\n     * @param {number} limit Maximum threshold in number of documents a suggest text\n     * token can exist in order to be included. Defaults to 0.01f.\n     * @returns {DirectGenerator} returns `this` so that calls can be chained.\n     */\n    maxTermFreq(limit) {\n        this._body.max_term_freq = limit;\n        return this;\n    }\n\n    /**\n     * Sets the filter (analyzer) that is applied to each of the tokens passed to this\n     * candidate generator. This filter is applied to the original token before\n     * candidates are generated.\n     *\n     * @param {string} filter a filter (analyzer) that is applied to each of the\n     * tokens passed to this candidate generator.\n     * @returns {DirectGenerator} returns `this` so that calls can be chained.\n     */\n    preFilter(filter) {\n        this._body.pre_filter = filter;\n        return this;\n    }\n\n    /**\n     * Sets the filter (analyzer) that is applied to each of the generated tokens\n     * before they are passed to the actual phrase scorer.\n     *\n     * @param {string} filter a filter (analyzer) that is applied to each of the\n     * generated tokens before they are passed to the actual phrase scorer.\n     * @returns {DirectGenerator} returns `this` so that calls can be chained.\n     */\n    postFilter(filter) {\n        this._body.post_filter = filter;\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the `direct_generator`\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch DSL\n     */\n    toJSON() {\n        return this._body;\n    }\n}\n\nmodule.exports = DirectGenerator;\n"
  },
  {
    "path": "src/suggesters/index.js",
    "content": "'use strict';\n\nexports.AnalyzedSuggesterBase = require('./analyzed-suggester-base');\n\nexports.TermSuggester = require('./term-suggester');\nexports.DirectGenerator = require('./direct-generator');\nexports.PhraseSuggester = require('./phrase-suggester');\nexports.CompletionSuggester = require('./completion-suggester');\n"
  },
  {
    "path": "src/suggesters/phrase-suggester.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst {\n    consts: { SMOOTHING_MODEL_SET },\n    util: { recursiveToJSON, invalidParam }\n} = require('../core');\n\nconst AnalyzedSuggesterBase = require('./analyzed-suggester-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters-phrase.html';\n\nconst invalidSmoothingModeParam = invalidParam(\n    ES_REF_URL,\n    'smoothing',\n    SMOOTHING_MODEL_SET\n);\n\n/**\n * The phrase suggester adds additional logic on top of the `term` suggester\n * to select entire corrected phrases instead of individual tokens weighted\n * based on `ngram-language` models. In practice this suggester will be able\n * to make better decisions about which tokens to pick based on co-occurrence\n * and frequencies.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters-phrase.html)\n *\n * @example\n * const suggest = esb.phraseSuggester(\n *     'simple_phrase',\n *     'title.trigram',\n *     'noble prize'\n * )\n *     .size(1)\n *     .gramSize(3)\n *     .directGenerator(esb.directGenerator('title.trigram').suggestMode('always'))\n *     .highlight('<em>', '</em>');\n *\n * @param {string} name The name of the Suggester, an arbitrary identifier\n * @param {string=} field The field to fetch the candidate suggestions from.\n * @param {string=} txt A string to get suggestions for.\n *\n * @throws {Error} if `name` is empty\n *\n * @extends AnalyzedSuggesterBase\n */\nclass PhraseSuggester extends AnalyzedSuggesterBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field, txt) {\n        super('phrase', name, field, txt);\n    }\n\n    /**\n     * Sets max size of the n-grams (shingles) in the `field`. If the field\n     * doesn't contain n-grams (shingles) this should be omitted or set to `1`.\n     *\n     * Note: Elasticsearch tries to detect the gram size based on\n     * the specified `field`. If the field uses a `shingle` filter the `gram_size`\n     * is set to the `max_shingle_size` if not explicitly set.\n     * @param {number} size Max size of the n-grams (shingles) in the `field`.\n     * @returns {PhraseSuggester} returns `this` so that calls can be chained.\n     */\n    gramSize(size) {\n        this._suggestOpts.gram_size = size;\n        return this;\n    }\n\n    /**\n     * Sets the likelihood of a term being a misspelled even if the term exists\n     * in the dictionary. The default is `0.95` corresponding to 5% of the\n     * real words are misspelled.\n     *\n     * @param {number} factor Likelihood of a term being misspelled. Defaults to `0.95`\n     * @returns {PhraseSuggester} returns `this` so that calls can be chained.\n     */\n    realWordErrorLikelihood(factor) {\n        this._suggestOpts.real_word_error_likelihood = factor;\n        return this;\n    }\n\n    /**\n     * Sets the confidence level defines a factor applied to the input phrases score\n     * which is used as a threshold for other suggest candidates. Only candidates\n     * that score higher than the threshold will be included in the result.\n     * For instance a confidence level of `1.0` will only return suggestions\n     * that score higher than the input phrase. If set to `0.0` the top N candidates\n     * are returned. The default is `1.0`.\n     *\n     * @param {number} level Factor applied to the input phrases score, used as\n     * a threshold for other suggest candidates.\n     * @returns {PhraseSuggester} returns `this` so that calls can be chained.\n     */\n    confidence(level) {\n        this._suggestOpts.confidence = level;\n        return this;\n    }\n\n    /**\n     * Sets the maximum percentage of the terms that at most considered to be\n     * misspellings in order to form a correction. This method accepts a float\n     * value in the range `[0..1)` as a fraction of the actual query terms or a\n     * number `>=1` as an absolute number of query terms. The default is set\n     * to `1.0` which corresponds to that only corrections with at most\n     * 1 misspelled term are returned. Note that setting this too high can\n     * negatively impact performance. Low values like 1 or 2 are recommended\n     * otherwise the time spend in suggest calls might exceed the time spend\n     * in query execution.\n     *\n     * @param {number} limit The maximum percentage of the terms that at most considered\n     * to be misspellings in order to form a correction.\n     * @returns {PhraseSuggester} returns `this` so that calls can be chained.\n     */\n    maxErrors(limit) {\n        this._suggestOpts.max_errors = limit;\n        return this;\n    }\n\n    /**\n     * Sets the separator that is used to separate terms in the bigram field.\n     * If not set the whitespace character is used as a separator.\n     *\n     * @param {string} sep The separator that is used to separate terms in the\n     * bigram field.\n     * @returns {PhraseSuggester} returns `this` so that calls can be chained.\n     */\n    separator(sep) {\n        this._suggestOpts.separator = sep;\n        return this;\n    }\n\n    /**\n     * Sets up suggestion highlighting. If not provided then no `highlighted` field\n     * is returned. If provided must contain exactly `pre_tag` and `post_tag` which\n     * are wrapped around the changed tokens. If multiple tokens in a row are changed\n     * the entire phrase of changed tokens is wrapped rather than each token.\n     *\n     * @param {string} preTag Pre-tag to wrap token\n     * @param {string} postTag Post-tag to wrap token\n     * @returns {PhraseSuggester} returns `this` so that calls can be chained.\n     */\n    highlight(preTag, postTag) {\n        this._suggestOpts.highlight = { pre_tag: preTag, post_tag: postTag };\n        return this;\n    }\n\n    /**\n     * Checks each suggestion against the specified `query` to prune suggestions\n     * for which no matching docs exist in the index. The collate query for\n     * a suggestion is run only on the local shard from which the suggestion\n     * has been generated from. The `query` must be specified, and it is run\n     * as a [`template` query](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-template-query.html).\n     *\n     * The current suggestion is automatically made available as the\n     * `{{suggestion}}` variable, which should be used in your query.\n     * Additionally, you can specify a `prune` to control if all phrase\n     * suggestions will be returned, when set to `true` the suggestions will\n     * have an additional option `collate_match`, which will be true if matching\n     * documents for the phrase was found, `false` otherwise. The default value\n     * for prune is `false`.\n     *\n     * @example\n     * const suggest = esb.phraseSuggester('simple_phrase', 'title.trigram')\n     *     .size(1)\n     *     .directGenerator(\n     *         esb.directGenerator('title.trigram')\n     *             .suggestMode('always')\n     *             .minWordLength(1)\n     *     )\n     *     .collate({\n     *         query: {\n     *             inline: {\n     *                 match: {\n     *                     '{{field_name}}': '{{suggestion}}'\n     *                 }\n     *             }\n     *         },\n     *         params: { field_name: 'title' },\n     *         prune: true\n     *     });\n     *\n     * @param {Object} opts The options for `collate`. Can include the following:\n     *   - `query`: The `query` to prune suggestions for which\n     *      no matching docs exist in the index. It is run as a `template` query.\n     *   - `params`: The parameters to be passed to the template. The suggestion\n     *      value will be added to the variables you specify.\n     *   - `prune`: When set to `true`, the suggestions will\n     *      have an additional option `collate_match`, which will be true if matching\n     *      documents for the phrase was found, `false` otherwise. The default value\n     *      for prune is `false`.\n     * @returns {PhraseSuggester} returns `this` so that calls can be chained.\n     */\n    collate(opts) {\n        // Add an instance check here?\n        // I wanted to use `SearchTemplate` here since the syntaqx is deceptively\n        // similar. But not quite the same.\n        // Adding a builder object called collate doesn't make sense either.\n        this._suggestOpts.collate = opts;\n        return this;\n    }\n\n    /**\n     * Sets the smoothing model to balance weight between infrequent grams\n     * (grams (shingles) are not existing in the index) and frequent grams\n     * (appear at least once in the index).\n     *\n     * Three possible values can be specified:\n     *   - `stupid_backoff`: a simple backoff model that backs off to lower order\n     *     n-gram models if the higher order count is 0 and discounts the lower order\n     *     n-gram model by a constant factor. The default `discount` is `0.4`.\n     *     Stupid Backoff is the default model\n     *   - `laplace`: a smoothing model that uses an additive smoothing where a\n     *     constant (typically `1.0` or smaller) is added to all counts to balance weights,\n     *     The default `alpha` is `0.5`.\n     *   - `linear_interpolation`: a smoothing model that takes the weighted mean of the\n     *     unigrams, bigrams and trigrams based on user supplied weights (lambdas).\n     *     Linear Interpolation doesn’t have any default values.\n     *     All parameters (`trigram_lambda`, `bigram_lambda`, `unigram_lambda`)\n     *     must be supplied.\n     *\n     * @param {string} model One of `stupid_backoff`, `laplace`, `linear_interpolation`\n     * @returns {PhraseSuggester} returns `this` so that calls can be chained.\n     */\n    smoothing(model) {\n        if (_.isNil(model)) invalidSmoothingModeParam(model);\n\n        const modelLower = model.toLowerCase();\n        if (!SMOOTHING_MODEL_SET.has(modelLower)) {\n            invalidSmoothingModeParam(model);\n        }\n\n        this._suggestOpts.smoothing = modelLower;\n        return this;\n    }\n\n    /**\n     * Sets the given list of candicate generators which produce a list of possible terms\n     * per term in the given text. Each of the generators in the list are\n     * called per term in the original text.\n     *\n     * The output of the generators is subsequently scored in combination with the\n     * candidates from the other terms to for suggestion candidates.\n     *\n     * @example\n     * const suggest = esb.phraseSuggester('simple_phrase', 'title.trigram')\n     *     .size(1)\n     *     .directGenerator([\n     *         esb.directGenerator('title.trigram').suggestMode('always'),\n     *         esb.directGenerator('title.reverse')\n     *             .suggestMode('always')\n     *             .preFilter('reverse')\n     *             .postFilter('reverse')\n     *     ]);\n     *\n     * @param {Array<DirectGenerator>|DirectGenerator} dirGen Array of `DirectGenerator`\n     * instances or a single instance of `DirectGenerator`\n     * @returns {PhraseSuggester} returns `this` so that calls can be chained.\n     */\n    directGenerator(dirGen) {\n        // TODO: Do instance checks on `dirGen`\n        this._suggestOpts.direct_generator = Array.isArray(dirGen)\n            ? dirGen\n            : [dirGen];\n\n        return this;\n    }\n\n    /**\n     * Override default `toJSON` to return DSL representation for the `phrase suggester`\n     *\n     * @override\n     * @returns {Object} returns an Object which maps to the elasticsearch DSL\n     */\n    toJSON() {\n        return recursiveToJSON(this._body);\n    }\n}\n\nmodule.exports = PhraseSuggester;\n"
  },
  {
    "path": "src/suggesters/term-suggester.js",
    "content": "'use strict';\n\nconst _ = require('../_');\n\nconst {\n    consts: { SUGGEST_MODE_SET, STRING_DISTANCE_SET },\n    util: { invalidParam }\n} = require('../core');\n\nconst AnalyzedSuggesterBase = require('./analyzed-suggester-base');\n\nconst ES_REF_URL =\n    'https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters-term.html';\n\nconst invalidSortParam = invalidParam(\n    ES_REF_URL,\n    'sort',\n    \"'score' or 'frequency'\"\n);\nconst invalidSuggestModeParam = invalidParam(\n    ES_REF_URL,\n    'suggest_mode',\n    SUGGEST_MODE_SET\n);\nconst invalidStringDistanceParam = invalidParam(\n    ES_REF_URL,\n    'string_distance',\n    STRING_DISTANCE_SET\n);\n\n/**\n * The term suggester suggests terms based on edit distance.\n * The provided suggest text is analyzed before terms are suggested.\n * The suggested terms are provided per analyzed suggest text token.\n * The term suggester doesn’t take the query into account that is part of request.\n *\n * [Elasticsearch reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters-term.html)\n *\n * @example\n * const suggest = esb.termSuggester(\n *     'my-suggestion',\n *     'message',\n *     'tring out Elasticsearch'\n * );\n *\n * @param {string} name The name of the Suggester, an arbitrary identifier\n * @param {string=} field The field to fetch the candidate suggestions from.\n * @param {string=} txt A string to get suggestions for.\n *\n * @throws {Error} if `name` is empty\n *\n * @extends AnalyzedSuggesterBase\n */\nclass TermSuggester extends AnalyzedSuggesterBase {\n    // eslint-disable-next-line require-jsdoc\n    constructor(name, field, txt) {\n        super('term', name, field, txt);\n    }\n\n    /**\n     * Sets the sort to control how suggestions should be sorted per\n     * suggest text term.\n     *\n     * Two possible values:\n     *   - `score`: Sort by score first, then document frequency and\n     *     then the term itself.\n     *   - `frequency`: Sort by document frequency first, then similarity\n     *     score and then the term itself.\n     *\n     * @param {string} sort Can be `score` or `frequency`\n     * @returns {TermSuggester} returns `this` so that calls can be chained.\n     * @throws {Error} If `sort` is neither `score` nor `frequency`.\n     */\n    sort(sort) {\n        if (_.isNil(sort)) invalidSortParam(sort);\n\n        const sortLower = sort.toLowerCase();\n        if (sortLower !== 'score' && sortLower !== 'frequency') {\n            invalidSortParam(sort);\n        }\n\n        this._suggestOpts.sort = sortLower;\n        return this;\n    }\n\n    /**\n     * Sets the suggest mode which controls what suggestions are included\n     * or controls for what suggest text terms, suggestions should be suggested.\n     *\n     * Three possible values can be specified:\n     *   - `missing`: Only provide suggestions for suggest text terms that\n     *     are not in the index. This is the default.\n     *   - `popular`:  Only suggest suggestions that occur in more docs\n     *     than the original suggest text term.\n     *   - `always`: Suggest any matching suggestions based on terms in the suggest text.\n     *\n     * @param {string} mode Can be `missing`, `popular` or `always`\n     * @returns {TermSuggester} returns `this` so that calls can be chained.\n     * @throws {Error} If `mode` is not one of `missing`, `popular` or `always`.\n     */\n    suggestMode(mode) {\n        if (_.isNil(mode)) invalidSuggestModeParam(mode);\n\n        const modeLower = mode.toLowerCase();\n        if (!SUGGEST_MODE_SET.has(modeLower)) {\n            invalidSuggestModeParam(mode);\n        }\n\n        this._suggestOpts.suggest_mode = modeLower;\n        return this;\n    }\n\n    /**\n     * Sets the maximum edit distance candidate suggestions can have\n     * in order to be considered as a suggestion. Can only be a value\n     * between 1 and 2. Any other value result in an bad request\n     * error being thrown. Defaults to 2.\n     *\n     * @param {number} maxEdits Value between 1 and 2. Defaults to 2.\n     * @returns {TermSuggester} returns `this` so that calls can be chained.\n     */\n    maxEdits(maxEdits) {\n        this._suggestOpts.max_edits = maxEdits;\n        return this;\n    }\n\n    /**\n     * Sets the number of minimal prefix characters that must match in order\n     * to be a candidate suggestions. Defaults to 1.\n     *\n     * Increasing this number improves spellcheck performance.\n     * Usually misspellings don't occur in the beginning of terms.\n     *\n     * @param {number} len The number of minimal prefix characters that must match in order\n     * to be a candidate suggestions. Defaults to 1.\n     * @returns {TermSuggester} returns `this` so that calls can be chained.\n     */\n    prefixLength(len) {\n        this._suggestOpts.prefix_length = len;\n        return this;\n    }\n\n    /**\n     * Sets the minimum length a suggest text term must have in order to be included.\n     * Defaults to 4.\n     *\n     * @param {number} len The minimum length a suggest text term must have in order\n     * to be included. Defaults to 4.\n     * @returns {TermSuggester} returns `this` so that calls can be chained.\n     */\n    minWordLength(len) {\n        this._suggestOpts.min_word_length = len;\n        return this;\n    }\n\n    /**\n     * Sets factor that is used to multiply with the `shards_size` in order to inspect\n     * more candidate spell corrections on the shard level.\n     * Can improve accuracy at the cost of performance. Defaults to 5.\n     *\n     * @param {number} maxInspections Factor used to multiple with `shards_size` in\n     * order to inspect more candidate spell corrections on the shard level.\n     * Defaults to 5\n     * @returns {TermSuggester} returns `this` so that calls can be chained.\n     */\n    maxInspections(maxInspections) {\n        this._suggestOpts.max_inspections = maxInspections;\n        return this;\n    }\n\n    /**\n     * Sets the minimal threshold in number of documents a suggestion should appear in.\n     * This can be specified as an absolute number or as a relative percentage of\n     * number of documents. This can improve quality by only suggesting high\n     * frequency terms. Defaults to 0f and is not enabled. If a value higher than 1\n     * is specified then the number cannot be fractional. The shard level document\n     * frequencies are used for this option.\n     *\n     * @param {number} limit Threshold in number of documents a suggestion\n     * should appear in. Defaults to 0f and is not enabled.\n     * @returns {TermSuggester} returns `this` so that calls can be chained.\n     */\n    minDocFreq(limit) {\n        this._suggestOpts.min_doc_freq = limit;\n        return this;\n    }\n\n    /**\n     * Sets the maximum threshold in number of documents a suggest text token can\n     * exist in order to be included. Can be a relative percentage number (e.g 0.4)\n     * or an absolute number to represent document frequencies. If an value higher\n     * than 1 is specified then fractional can not be specified. Defaults to 0.01f.\n     * This can be used to exclude high frequency terms from being spellchecked.\n     * High frequency terms are usually spelled correctly on top of this also\n     * improves the spellcheck performance. The shard level document frequencies are\n     * used for this option.\n     *\n     * @param {number} limit Maximum threshold in number of documents a suggest text\n     * token can exist in order to be included. Defaults to 0.01f.\n     * @returns {TermSuggester} returns `this` so that calls can be chained.\n     */\n    maxTermFreq(limit) {\n        this._suggestOpts.max_term_freq = limit;\n        return this;\n    }\n\n    /**\n     * Sets the string distance implementation to use for comparing how similar\n     * suggested terms are.\n     *\n     * Five possible values can be specified:\n     *   - `internal`: The default based on `damerau_levenshtein` but highly optimized for\n     *     comparing string distance for terms inside the index.\n     *   - `damerau_levenshtein`: String distance algorithm based on Damerau-Levenshtein\n     *     algorithm.\n     *   - `levenstein`: String distance algorithm based on Levenstein edit distance\n     *     algorithm.\n     *   - `jarowinkler`: String distance algorithm based on Jaro-Winkler algorithm.\n     *   - `ngram`: String distance algorithm based on character n-grams.\n     *\n     * @param {string} implMethod One of `internal`, `damerau_levenshtein`, `levenstein`,\n     * `jarowinkler`, `ngram`\n     * @returns {TermSuggester} returns `this` so that calls can be chained.\n     * @throws {Error} If `implMethod` is not one of `internal`, `damerau_levenshtein`,\n     * `levenstein`, `jarowinkler` or ngram`.\n     */\n    stringDistance(implMethod) {\n        if (_.isNil(implMethod)) invalidStringDistanceParam(implMethod);\n\n        const implMethodLower = implMethod.toLowerCase();\n        if (!STRING_DISTANCE_SET.has(implMethodLower)) {\n            invalidStringDistanceParam(implMethod);\n        }\n\n        this._suggestOpts.string_distance = implMethodLower;\n        return this;\n    }\n}\n\nmodule.exports = TermSuggester;\n"
  },
  {
    "path": "test/.eslintrc.yml",
    "content": "root: true\nparser: '@babel/eslint-parser'\nparserOptions:\n  ecmaVersion: 2020\n  sourceType: module\n  requireConfigFile: false\nenv:\n  node: true\n  es6: true\nglobals:\n  describe: readonly\n  test: readonly\n  expect: readonly\n  beforeEach: readonly\n  afterEach: readonly\n  beforeAll: readonly\n  afterAll: readonly\n  vi: readonly\nextends:\n  - 'eslint:recommended'\n  - 'plugin:prettier/recommended'\nplugins:\n  - prettier\nrules:\n  id-length: off\n  no-unexpected-multiline: off\n  no-unused-vars:\n    - error\n    - argsIgnorePattern: '^_'\n  no-console: off\n  no-irregular-whitespace:\n    - error\n    - skipComments: true\n"
  },
  {
    "path": "test/_/index.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport * as _ from '../../src/_';\n\ndescribe('Type-checking utilities', () => {\n    describe('isNil', () => {\n        const truthyCases = [\n            { name: 'returns true for null', value: null },\n            { name: 'returns true for undefined', value: undefined }\n        ];\n\n        truthyCases.forEach(tc => {\n            test(tc.name, () => {\n                expect(_.isNil(tc.value)).toBe(true);\n            });\n        });\n\n        const falsyCases = [\n            { name: 'returns false for empty string', value: '' },\n            { name: 'returns false for string', value: 'hello' },\n            { name: 'returns false for number zero', value: 0 },\n            { name: 'returns false for number', value: 42 },\n            { name: 'returns false for empty object', value: {} },\n            { name: 'returns false for object', value: { key: 'value' } },\n            { name: 'returns false for empty array', value: [] },\n            { name: 'returns false for array', value: [1, 2, 3] },\n            { name: 'returns false for boolean false', value: false }\n        ];\n\n        falsyCases.forEach(tc => {\n            test(tc.name, () => {\n                expect(_.isNil(tc.value)).toBe(false);\n            });\n        });\n    });\n\n    describe('isString', () => {\n        const truthyCases = [\n            { name: 'returns true for empty string', value: '' },\n            { name: 'returns true for string', value: 'hello' }\n        ];\n\n        truthyCases.forEach(tc => {\n            test(tc.name, () => {\n                expect(_.isString(tc.value)).toBe(true);\n            });\n        });\n\n        const falsyCases = [\n            { name: 'returns false for number', value: 42 },\n            { name: 'returns false for object', value: {} },\n            { name: 'returns false for null', value: null },\n            { name: 'returns false for undefined', value: undefined },\n            { name: 'returns false for array', value: [] },\n            { name: 'returns false for boolean', value: true }\n        ];\n\n        falsyCases.forEach(tc => {\n            test(tc.name, () => {\n                expect(_.isString(tc.value)).toBe(false);\n            });\n        });\n    });\n\n    describe('isObject', () => {\n        const truthyCases = [\n            { name: 'returns true for object', value: { key: 'value' } },\n            { name: 'returns true for empty object', value: {} }\n        ];\n\n        truthyCases.forEach(tc => {\n            test(tc.name, () => {\n                expect(_.isObject(tc.value)).toBe(true);\n            });\n        });\n\n        const falsyCases = [\n            { name: 'returns false for array', value: [] },\n            { name: 'returns false for null', value: null },\n            { name: 'returns false for undefined', value: undefined },\n            { name: 'returns false for string', value: 'string' },\n            { name: 'returns false for number', value: 42 },\n            { name: 'returns false for boolean', value: true },\n            { name: 'returns false for function', value: () => {} }\n        ];\n\n        falsyCases.forEach(tc => {\n            test(tc.name, () => {\n                expect(_.isObject(tc.value)).toBe(false);\n            });\n        });\n    });\n});\n\ndescribe('Object manipulation utilities', () => {\n    describe('has', () => {\n        test('returns true for own property', () => {\n            const obj = { key: 'value' };\n            expect(_.has(obj, 'key')).toBe(true);\n        });\n\n        test('returns true for hasOwnProperty key', () => {\n            const obj = { hasOwnProperty: 'custom' };\n            expect(_.has(obj, 'hasOwnProperty')).toBe(true);\n        });\n\n        const falsyCases = [\n            {\n                name: 'returns false for inherited property',\n                obj: Object.create({ inherited: 'value' }),\n                prop: 'inherited'\n            },\n            {\n                name: 'returns false for missing property',\n                obj: { key: 'value' },\n                prop: 'missing'\n            },\n            { name: 'returns false for null object', obj: null, prop: 'key' },\n            {\n                name: 'returns false for undefined object',\n                obj: undefined,\n                prop: 'key'\n            }\n        ];\n\n        falsyCases.forEach(tc => {\n            test(tc.name, () => {\n                expect(_.has(tc.obj, tc.prop)).toBe(false);\n            });\n        });\n    });\n\n    describe('hasIn', () => {\n        const truthyCases = [\n            {\n                name: 'returns true for own property',\n                obj: { key: 'value' },\n                prop: 'key'\n            },\n            {\n                name: 'returns true for inherited property',\n                obj: Object.create({ inherited: 'value' }),\n                prop: 'inherited'\n            },\n            {\n                name: 'returns true for prototype chain property',\n                obj: {},\n                prop: 'toString'\n            }\n        ];\n\n        truthyCases.forEach(tc => {\n            test(tc.name, () => {\n                expect(_.hasIn(tc.obj, tc.prop)).toBe(true);\n            });\n        });\n\n        const falsyCases = [\n            {\n                name: 'returns false for missing property',\n                obj: { key: 'value' },\n                prop: 'missing'\n            },\n            { name: 'returns false for null object', obj: null, prop: 'key' },\n            {\n                name: 'returns false for undefined object',\n                obj: undefined,\n                prop: 'key'\n            }\n        ];\n\n        falsyCases.forEach(tc => {\n            test(tc.name, () => {\n                expect(_.hasIn(tc.obj, tc.prop)).toBe(false);\n            });\n        });\n    });\n\n    describe('omit', () => {\n        const testCases = [\n            {\n                name: 'creates new object excluding single key',\n                obj: { a: 1, b: 2, c: 3 },\n                keys: ['b'],\n                expected: { a: 1, c: 3 }\n            },\n            {\n                name: 'creates new object excluding multiple keys',\n                obj: { a: 1, b: 2, c: 3, d: 4 },\n                keys: ['b', 'd'],\n                expected: { a: 1, c: 3 }\n            },\n            {\n                name: 'returns object with all keys when empty array provided',\n                obj: { a: 1, b: 2 },\n                keys: [],\n                expected: { a: 1, b: 2 }\n            },\n            {\n                name: 'ignores non-existent keys',\n                obj: { a: 1, b: 2 },\n                keys: ['c', 'd'],\n                expected: { a: 1, b: 2 }\n            },\n            {\n                name: 'returns empty object for empty source object',\n                obj: {},\n                keys: ['a', 'b'],\n                expected: {}\n            },\n            {\n                name: 'returns empty object for null',\n                obj: null,\n                keys: ['a'],\n                expected: {}\n            },\n            {\n                name: 'returns empty object for undefined',\n                obj: undefined,\n                keys: ['a'],\n                expected: {}\n            }\n        ];\n\n        testCases.forEach(tc => {\n            test(tc.name, () => {\n                const result = _.omit(tc.obj, tc.keys);\n                expect(result).toEqual(tc.expected);\n            });\n        });\n\n        test('does not include inherited properties', () => {\n            const parent = { inherited: 'value' };\n            const obj = Object.create(parent);\n            obj.own = 'ownValue';\n            const result = _.omit(obj, []);\n            expect(result).toEqual({ own: 'ownValue' });\n        });\n    });\n});\n\ndescribe('Collection utilities', () => {\n    describe('isEmpty', () => {\n        const truthyCases = [\n            { name: 'returns true for null', value: null },\n            { name: 'returns true for undefined', value: undefined },\n            { name: 'returns true for empty string', value: '' },\n            { name: 'returns true for empty array', value: [] },\n            { name: 'returns true for empty object', value: {} }\n        ];\n\n        truthyCases.forEach(tc => {\n            test(tc.name, () => {\n                expect(_.isEmpty(tc.value)).toBe(true);\n            });\n        });\n\n        const falsyCases = [\n            { name: 'returns false for non-empty string', value: 'hello' },\n            { name: 'returns false for non-empty array', value: [1, 2, 3] },\n            {\n                name: 'returns false for non-empty object',\n                value: { key: 'value' }\n            },\n            { name: 'returns false for number', value: 42 },\n            { name: 'returns false for number zero', value: 0 },\n            { name: 'returns false for boolean true', value: true },\n            { name: 'returns false for boolean false', value: false }\n        ];\n\n        falsyCases.forEach(tc => {\n            test(tc.name, () => {\n                expect(_.isEmpty(tc.value)).toBe(false);\n            });\n        });\n    });\n\n    describe('head', () => {\n        const testCases = [\n            {\n                name: 'returns first element of array',\n                input: [1, 2, 3],\n                expected: 1\n            },\n            {\n                name: 'returns first element of string array',\n                input: ['a', 'b', 'c'],\n                expected: 'a'\n            },\n            {\n                name: 'returns undefined for empty array',\n                input: [],\n                expected: undefined\n            },\n            {\n                name: 'returns undefined for null',\n                input: null,\n                expected: undefined\n            },\n            {\n                name: 'returns undefined for undefined',\n                input: undefined,\n                expected: undefined\n            },\n            {\n                name: 'returns only element for single-element array',\n                input: [42],\n                expected: 42\n            }\n        ];\n\n        testCases.forEach(tc => {\n            test(tc.name, () => {\n                expect(_.head(tc.input)).toBe(tc.expected);\n            });\n        });\n\n        test('works with array of objects', () => {\n            const arr = [{ id: 1 }, { id: 2 }];\n            expect(_.head(arr)).toEqual({ id: 1 });\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/adjacency-matrix-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { AdjacencyMatrixAggregation, termQuery } from '../../src';\n\nconst getInstance = () => new AdjacencyMatrixAggregation('my_adj_mat_agg');\n\nconst filterQryA = termQuery('user', 'kimchy');\nconst filterQryB = termQuery('company', 'elastic');\n\ndescribe('AdjacencyMatrixAggregation', () => {\n    test('sets type as adjacency_matrix', () => {\n        const value = new AdjacencyMatrixAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { adjacency_matrix: {} }\n        });\n    });\n\n    test('field cannot be set', () => {\n        expect(() => new AdjacencyMatrixAggregation('my_agg').field()).toThrow(\n            new Error('field is not supported in AdjacencyMatrixAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new AdjacencyMatrixAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in AdjacencyMatrixAggregation')\n        );\n    });\n\n    describe('options', () => {\n        test('sets separator', () => {\n            const value = getInstance().separator('$').toJSON();\n            expect(value).toEqual({\n                my_adj_mat_agg: {\n                    adjacency_matrix: {\n                        separator: '$'\n                    }\n                }\n            });\n        });\n    });\n\n    describe('filters', () => {\n        test('filters are set', () => {\n            let value = getInstance()\n                .filter('user_kimchy', filterQryA)\n                .filter('company_elastic', filterQryB)\n                .toJSON();\n            const expected = {\n                my_adj_mat_agg: {\n                    adjacency_matrix: {\n                        filters: {\n                            user_kimchy: { term: { user: 'kimchy' } },\n                            company_elastic: { term: { company: 'elastic' } }\n                        }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n\n            value = getInstance()\n                .filters({\n                    user_kimchy: filterQryA,\n                    company_elastic: filterQryB\n                })\n                .toJSON();\n            expect(value).toEqual(expected);\n        });\n\n        test('filters are merged', () => {\n            const agg = getInstance().filters({ user_kimchy: filterQryA });\n            agg.filters({ company_elastic: filterQryB });\n\n            const value = agg.toJSON();\n            const expected = {\n                my_adj_mat_agg: {\n                    adjacency_matrix: {\n                        filters: {\n                            user_kimchy: { term: { user: 'kimchy' } },\n                            company_elastic: { term: { company: 'elastic' } }\n                        }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/auto-date-histogram-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { AutoDateHistogramAggregation } from '../../src';\n\ndescribe('AutoDateHistogramAggregation', () => {\n    test('sets type as auto_date_histogram', () => {\n        const value = new AutoDateHistogramAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { auto_date_histogram: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets arguments', () => {\n            const value = new AutoDateHistogramAggregation(\n                    'sale_date',\n                    'date',\n                    10\n                ).toJSON(),\n                expected = {\n                    sale_date: {\n                        auto_date_histogram: {\n                            field: 'date',\n                            buckets: 10\n                        }\n                    }\n                };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('options', () => {\n        test('sets buckets', () => {\n            const value = new AutoDateHistogramAggregation('by_day', 'date', 10)\n                .buckets(20)\n                .toJSON();\n            const expected = {\n                by_day: {\n                    auto_date_histogram: {\n                        field: 'date',\n                        buckets: 20\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets format', () => {\n            const value = new AutoDateHistogramAggregation('by_day', 'date', 10)\n                .format('yyyy-MM-dd')\n                .toJSON();\n            const expected = {\n                by_day: {\n                    auto_date_histogram: {\n                        field: 'date',\n                        buckets: 10,\n                        format: 'yyyy-MM-dd'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets minimumInterval', () => {\n            const value = new AutoDateHistogramAggregation('by_day', 'date', 10)\n                .minimumInterval('minute')\n                .toJSON();\n            const expected = {\n                by_day: {\n                    auto_date_histogram: {\n                        field: 'date',\n                        buckets: 10,\n                        minimum_interval: 'minute'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets timeZone', () => {\n            const value = new AutoDateHistogramAggregation('by_day', 'date', 10)\n                .timeZone('-01:00')\n                .toJSON();\n            const expected = {\n                by_day: {\n                    auto_date_histogram: {\n                        field: 'date',\n                        buckets: 10,\n                        time_zone: '-01:00'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets missing', () => {\n            const value = new AutoDateHistogramAggregation('by_day', 'date', 10)\n                .missing('2000/01/01')\n                .toJSON();\n            const expected = {\n                by_day: {\n                    auto_date_histogram: {\n                        field: 'date',\n                        buckets: 10,\n                        missing: '2000/01/01'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/avg-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { AvgAggregation } from '../../src';\n\ndescribe('AvgAggregation', () => {\n    test('sets type as avg', () => {\n        const value = new AvgAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { avg: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets field', () => {\n            const value = new AvgAggregation('my_agg', 'my_field').toJSON();\n            const expected = {\n                my_agg: {\n                    avg: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/avg-bucket-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { AvgBucketAggregation } from '../../src';\n\ndescribe('AvgBucketAggregation', () => {\n    test('sets type as avg_bucket', () => {\n        const value = new AvgBucketAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { avg_bucket: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const value = new AvgBucketAggregation(\n                'my_agg',\n                'my_buckets_path'\n            ).toJSON();\n            const expected = {\n                my_agg: {\n                    avg_bucket: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/bucket-agg-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { Script } from '../../src';\nimport { BucketAggregationBase } from '../../src/aggregations/bucket-aggregations';\n\nconst getInstance = (...args) =>\n    new BucketAggregationBase('my_agg', 'my_type', ...args);\n\ndescribe('BucketAggregationBase', () => {\n    describe('constructor', () => {\n        test('can be instantiated', () => {\n            expect(getInstance()).toBeTruthy();\n        });\n\n        test('sets arguments', () => {\n            const value = getInstance('my_field').toJSON(),\n                myOtherAgg = getInstance().field('my_field').toJSON();\n            expect(value).toEqual(myOtherAgg);\n        });\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('script()', () => {\n                expect(() => getInstance().script(value)).toThrow(\n                    new TypeError('Argument must be an instance of Script')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets field', () => {\n            const value = getInstance().field('my_field').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        field: 'my_field'\n                    }\n                }\n            });\n        });\n\n        test('sets script', () => {\n            const value = getInstance()\n                .script(\n                    new Script()\n                        .lang('groovy')\n                        .file('calculate-score')\n                        .params({ my_modifier: 2 })\n                )\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        script: {\n                            lang: 'groovy',\n                            file: 'calculate-score',\n                            params: { my_modifier: 2 }\n                        }\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/bucket-script-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { BucketScriptAggregation, Script } from '../../src';\n\nconst getInstance = bucketsPath =>\n    new BucketScriptAggregation('my_agg', bucketsPath);\n\ndescribe('BucketScriptAggregation', () => {\n    test('sets type as bucket_script', () => {\n        const value = new BucketScriptAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { bucket_script: {} }\n        });\n    });\n\n    describe('options', () => {\n        test('sets script with string', () => {\n            const value = getInstance()\n                .script('params.my_var1 / params.my_var2')\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    bucket_script: {\n                        script: 'params.my_var1 / params.my_var2'\n                    }\n                }\n            });\n        });\n\n        test('sets script with Script instance', () => {\n            const scriptInstance = new Script(\n                'inline',\n                'params.my_var1 / params.my_var2'\n            );\n            const value = getInstance().script(scriptInstance).toJSON();\n            const expected = {\n                my_agg: {\n                    bucket_script: {\n                        script: scriptInstance.toJSON()\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const value = getInstance({\n                my_var1: 'the_sum',\n                my_var2: 'the_value_count'\n            }).toJSON();\n            const expected = {\n                my_agg: {\n                    bucket_script: {\n                        buckets_path: {\n                            my_var1: 'the_sum',\n                            my_var2: 'the_value_count'\n                        }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/bucket-selector-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { BucketSelectorAggregation, Script } from '../../src';\n\nconst getInstance = bucketsPath =>\n    new BucketSelectorAggregation('my_agg', bucketsPath);\n\ndescribe('BucketSelectorAggregation', () => {\n    test('sets type as bucket_selector', () => {\n        const value = new BucketSelectorAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { bucket_selector: {} }\n        });\n    });\n\n    test('format cannot be set', () => {\n        expect(() => new BucketSelectorAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in BucketSelectorAggregation')\n        );\n    });\n\n    describe('options', () => {\n        test('sets script with string', () => {\n            const value = getInstance()\n                .script('params.my_var1 / params.my_var2')\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    bucket_selector: {\n                        script: 'params.my_var1 / params.my_var2'\n                    }\n                }\n            });\n        });\n\n        test('sets script with Script instance', () => {\n            const scriptInstance = new Script(\n                'inline',\n                'params.my_var1 / params.my_var2'\n            );\n            const value = getInstance().script(scriptInstance).toJSON();\n            const expected = {\n                my_agg: {\n                    bucket_selector: {\n                        script: scriptInstance.toJSON()\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    test('constructor sets buckets_path', () => {\n        const value = getInstance({\n            my_var1: 'the_sum',\n            my_var2: 'the_value_count'\n        }).toJSON();\n        const expected = {\n            my_agg: {\n                bucket_selector: {\n                    buckets_path: {\n                        my_var1: 'the_sum',\n                        my_var2: 'the_value_count'\n                    }\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/bucket-sort-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { BucketSortAggregation, Sort } from '../../src';\n\nconst getInstance = () => new BucketSortAggregation('my_agg');\n\ndescribe('BucketSortAggregation', () => {\n    test('sets type as bucket_sort', () => {\n        const value = new BucketSortAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { bucket_sort: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('can be instantiated', () => {\n            const value = getInstance().toJSON();\n            const expected = {\n                my_agg: {\n                    bucket_sort: {}\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('options', () => {\n        test('sort from and size are set', () => {\n            const value = getInstance()\n                .sort([new Sort('myField', 'desc')])\n                .from(5)\n                .size(10)\n                .toJSON();\n\n            const expected = {\n                my_agg: {\n                    bucket_sort: {\n                        sort: [\n                            {\n                                myField: 'desc'\n                            }\n                        ],\n                        from: 5,\n                        size: 10\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/cardinality-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { CardinalityAggregation } from '../../src';\n\nconst getInstance = field => new CardinalityAggregation('my_agg', field);\n\ndescribe('CardinalityAggregation', () => {\n    test('sets type as cardinality', () => {\n        const value = new CardinalityAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { cardinality: {} }\n        });\n    });\n\n    test('format cannot be set', () => {\n        expect(() => new CardinalityAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in CardinalityAggregation')\n        );\n    });\n\n    describe('options', () => {\n        test('sets precisionThreshold', () => {\n            const value = getInstance('my_field')\n                .precisionThreshold(5000)\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    cardinality: {\n                        field: 'my_field',\n                        precision_threshold: 5000\n                    }\n                }\n            });\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets field', () => {\n            const value = getInstance('my_field').toJSON();\n            const expected = {\n                my_agg: {\n                    cardinality: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/children-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ChildrenAggregation } from '../../src';\n\ndescribe('ChildrenAggregation', () => {\n    test('sets type as children', () => {\n        const value = new ChildrenAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { children: {} }\n        });\n    });\n\n    test('field cannot be set', () => {\n        expect(() => new ChildrenAggregation('my_agg').field()).toThrow(\n            new Error('field is not supported in ChildrenAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new ChildrenAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in ChildrenAggregation')\n        );\n    });\n\n    test('type is set', () => {\n        const value = new ChildrenAggregation('to_answers')\n            .type('answer')\n            .toJSON();\n        const expected = {\n            to_answers: {\n                children: {\n                    type: 'answer'\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/composite-agg-values-sources-test/date-histogram-values-source.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { CompositeAggregation } from '../../../src';\n\nconst { DateHistogramValuesSource } = CompositeAggregation;\n\nconst getInstance = (...args) =>\n    new DateHistogramValuesSource('my_val_src', ...args);\n\ndescribe('DateHistogramValuesSource', () => {\n    test('constructor sets arguments', () => {\n        const value = getInstance('my_field', '1d').toJSON();\n        const expected = {\n            my_val_src: {\n                date_histogram: {\n                    field: 'my_field',\n                    interval: '1d'\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n\n    test('calendar interval is set', () => {\n        const value = getInstance('field_name', 'date')\n            .calendarInterval('month')\n            .toJSON();\n        const expected = {\n            my_val_src: {\n                date_histogram: {\n                    field: 'field_name',\n                    calendar_interval: 'month',\n                    interval: 'date'\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n\n    test('fixed interval is set', () => {\n        const value = getInstance('field_name', 'date')\n            .fixedInterval('90s')\n            .toJSON();\n        const expected = {\n            my_val_src: {\n                date_histogram: {\n                    field: 'field_name',\n                    fixed_interval: '90s',\n                    interval: 'date'\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n\n    test('sets type as date_histogram', () => {\n        const value = new DateHistogramValuesSource('my_val_src').toJSON();\n        expect(value).toEqual({\n            my_val_src: { date_histogram: {} }\n        });\n    });\n\n    describe('options', () => {\n        test('sets interval', () => {\n            const value = getInstance().interval(5).toJSON();\n            expect(value).toEqual({\n                my_val_src: {\n                    date_histogram: {\n                        interval: 5\n                    }\n                }\n            });\n        });\n\n        test('sets timeZone', () => {\n            const value = getInstance()\n                .timeZone('America/Los_Angeles')\n                .toJSON();\n            expect(value).toEqual({\n                my_val_src: {\n                    date_histogram: {\n                        time_zone: 'America/Los_Angeles'\n                    }\n                }\n            });\n        });\n\n        test('sets format', () => {\n            const value = getInstance().format('yyyy-MM-dd').toJSON();\n            expect(value).toEqual({\n                my_val_src: {\n                    date_histogram: {\n                        format: 'yyyy-MM-dd'\n                    }\n                }\n            });\n        });\n\n        test('sets calendarInterval', () => {\n            const value = getInstance().calendarInterval('month').toJSON();\n            expect(value).toEqual({\n                my_val_src: {\n                    date_histogram: {\n                        calendar_interval: 'month'\n                    }\n                }\n            });\n        });\n\n        test('sets fixedInterval', () => {\n            const value = getInstance().fixedInterval('90s').toJSON();\n            expect(value).toEqual({\n                my_val_src: {\n                    date_histogram: {\n                        fixed_interval: '90s'\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/composite-agg-values-sources-test/histogram-values-source.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { CompositeAggregation } from '../../../src';\n\nconst { HistogramValuesSource } = CompositeAggregation;\n\nconst getInstance = (...args) =>\n    new HistogramValuesSource('my_val_src', ...args);\n\ndescribe('HistogramValuesSource', () => {\n    test('constructor sets arguments', () => {\n        const value = getInstance('my_field', 10).toJSON();\n        const expected = {\n            my_val_src: {\n                histogram: {\n                    field: 'my_field',\n                    interval: 10\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n\n    test('sets type as histogram', () => {\n        const value = new HistogramValuesSource('my_val_src').toJSON();\n        expect(value).toEqual({\n            my_val_src: { histogram: {} }\n        });\n    });\n\n    describe('options', () => {\n        test('sets interval', () => {\n            const value = getInstance().interval(5).toJSON();\n            expect(value).toEqual({\n                my_val_src: {\n                    histogram: {\n                        interval: 5\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/composite-agg-values-sources-test/terms-values-source.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { Script } from '../../../src';\nimport {\n    TermsValuesSource,\n    ValuesSourceBase\n} from '../../../src/aggregations/bucket-aggregations/composite-agg-values-sources';\n\nconst getInstance = field => new TermsValuesSource('my_val_src', field);\n\ndescribe('TermsValuesSource', () => {\n    test('base class type cannot be empty', () => {\n        expect(() => new ValuesSourceBase()).toThrow(\n            new Error('ValuesSourceBase `valueSrcType` cannot be empty')\n        );\n    });\n\n    test('constructor sets arguments', () => {\n        const value = getInstance('my_field').toJSON();\n        const expected = {\n            my_val_src: {\n                terms: { field: 'my_field' }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n\n    test('sets type as terms', () => {\n        const value = new TermsValuesSource('my_val_src').toJSON();\n        expect(value).toEqual({\n            my_val_src: { terms: {} }\n        });\n    });\n\n    describe('order() validation', () => {\n        test.each([\n            { name: 'accepts valid value: asc', value: 'asc' },\n            {\n                name: 'accepts valid value: ASC (case-insensitive)',\n                value: 'ASC'\n            },\n            { name: 'accepts valid value: desc', value: 'desc' },\n            {\n                name: 'accepts valid value: DESC (case-insensitive)',\n                value: 'DESC'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().order(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_order' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().order(value)).toThrow(\n                new Error(\n                    \"The 'order' parameter should be one of 'asc' or 'desc'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets missing', () => {\n            const value = getInstance().missing(42).toJSON();\n            expect(value).toEqual({\n                my_val_src: {\n                    terms: {\n                        missing: 42\n                    }\n                }\n            });\n        });\n\n        test('sets missingBucket', () => {\n            const value = getInstance().missingBucket(true).toJSON();\n            expect(value).toEqual({\n                my_val_src: {\n                    terms: {\n                        missing_bucket: true\n                    }\n                }\n            });\n        });\n\n        test('sets field', () => {\n            const value = getInstance().field('my_field').toJSON();\n            expect(value).toEqual({\n                my_val_src: {\n                    terms: {\n                        field: 'my_field'\n                    }\n                }\n            });\n        });\n\n        test('sets script', () => {\n            const scriptInstance = new Script()\n                .lang('groovy')\n                .file('calculate-score')\n                .params({ my_modifier: 2 });\n            const value = getInstance().script(scriptInstance).toJSON();\n            const expected = {\n                my_val_src: {\n                    terms: {\n                        script: scriptInstance.toJSON()\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets valueType', () => {\n            const value = getInstance().valueType('date').toJSON();\n            expect(value).toEqual({\n                my_val_src: {\n                    terms: {\n                        value_type: 'date'\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/composite-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { CompositeAggregation } from '../../src';\nimport {\n    DateHistogramValuesSource,\n    TermsValuesSource\n} from '../../src/aggregations/bucket-aggregations/composite-agg-values-sources';\n\nconst getInstance = () => new CompositeAggregation('my_cmpt_agg');\n\ndescribe('CompositeAggregation', () => {\n    test('sets type as composite', () => {\n        const value = new CompositeAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: {\n                composite: {\n                    sources: []\n                }\n            }\n        });\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('sources()', () => {\n                expect(() => getInstance().sources(value)).toThrow(\n                    new TypeError(\n                        'Argument must be an instance of ValuesSourceBase'\n                    )\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets sources', () => {\n            const dateHistoValSrc = new DateHistogramValuesSource(\n                'date',\n                'timestamp',\n                '1d'\n            );\n            const termsValSrc = new TermsValuesSource('product', 'product');\n\n            const value = getInstance()\n                .sources(dateHistoValSrc, termsValSrc)\n                .toJSON();\n            const expected = {\n                my_cmpt_agg: {\n                    composite: {\n                        sources: [\n                            dateHistoValSrc.toJSON(),\n                            termsValSrc.toJSON()\n                        ]\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets size', () => {\n            const value = getInstance().size(2).toJSON();\n            expect(value).toEqual({\n                my_cmpt_agg: {\n                    composite: {\n                        sources: [],\n                        size: 2\n                    }\n                }\n            });\n        });\n\n        test('sets after', () => {\n            const value = getInstance()\n                .after({ date: 1494288000000, product: 'mad max' })\n                .toJSON();\n            expect(value).toEqual({\n                my_cmpt_agg: {\n                    composite: {\n                        sources: [],\n                        after: { date: 1494288000000, product: 'mad max' }\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/cumulative-sum-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { CumulativeSumAggregation } from '../../src';\n\ndescribe('CumulativeSumAggregation', () => {\n    test('sets type as cumulative_sum', () => {\n        const value = new CumulativeSumAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { cumulative_sum: {} }\n        });\n    });\n\n    test('gapPolicy cannot be set', () => {\n        expect(() =>\n            new CumulativeSumAggregation('my_agg').gapPolicy()\n        ).toThrow(\n            new Error('gapPolicy is not supported in CumulativeSumAggregation')\n        );\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const value = new CumulativeSumAggregation(\n                'my_agg',\n                'my_buckets_path'\n            ).toJSON();\n            const expected = {\n                my_agg: {\n                    cumulative_sum: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/date-histogram-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { DateHistogramAggregation } from '../../src';\n\ndescribe('DateHistogramAggregation', () => {\n    test('sets type as date_histogram', () => {\n        const value = new DateHistogramAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { date_histogram: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets optional arguments', () => {\n            const value = new DateHistogramAggregation(\n                    'sale_date',\n                    'date',\n                    'year'\n                ).toJSON(),\n                expected = {\n                    sale_date: {\n                        date_histogram: {\n                            field: 'date',\n                            interval: 'year'\n                        }\n                    }\n                };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('options', () => {\n        test('time zone is set', () => {\n            const value = new DateHistogramAggregation('by_day', 'date', 'day')\n                .timeZone('-01:00')\n                .toJSON();\n            const expected = {\n                by_day: {\n                    date_histogram: {\n                        field: 'date',\n                        interval: 'day',\n                        time_zone: '-01:00'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('calendar interval is set', () => {\n            const value = new DateHistogramAggregation('by_day', 'date')\n                .calendarInterval('month')\n                .toJSON();\n            const expected = {\n                by_day: {\n                    date_histogram: {\n                        field: 'date',\n                        calendar_interval: 'month'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('fixed interval is set', () => {\n            const value = new DateHistogramAggregation('by_day', 'date')\n                .fixedInterval('90s')\n                .toJSON();\n            const expected = {\n                by_day: {\n                    date_histogram: {\n                        field: 'date',\n                        fixed_interval: '90s'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/date-range-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { DateRangeAggregation } from '../../src';\n\nconst getInstance = (...args) =>\n    new DateRangeAggregation('my_agg', ...args).range({ to: 'now-10M/M' });\n\ndescribe('DateRangeAggregation', () => {\n    test('sets type as date_range', () => {\n        const value = getInstance().toJSON();\n        const expected = {\n            my_agg: {\n                date_range: {\n                    ranges: [{ to: 'now-10M/M' }]\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n\n    describe('constructor', () => {\n        test('sets arguments', () => {\n            const value = getInstance('date').toJSON(),\n                expected = {\n                    my_agg: {\n                        date_range: {\n                            field: 'date',\n                            ranges: [{ to: 'now-10M/M' }]\n                        }\n                    }\n                };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('options', () => {\n        test('time zone is set', () => {\n            const value = getInstance().timeZone('CET').toJSON();\n            const expected = {\n                my_agg: {\n                    date_range: {\n                        time_zone: 'CET',\n                        ranges: [{ to: 'now-10M/M' }]\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/derivative-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { DerivativeAggregation } from '../../src';\n\nconst getInstance = bucketsPath =>\n    new DerivativeAggregation('my_agg', bucketsPath);\n\ndescribe('DerivativeAggregation', () => {\n    test('sets type as derivative', () => {\n        const value = new DerivativeAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { derivative: {} }\n        });\n    });\n\n    describe('options', () => {\n        test('sets unit', () => {\n            const value = getInstance('my_buckets_path').unit('day').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    derivative: {\n                        buckets_path: 'my_buckets_path',\n                        unit: 'day'\n                    }\n                }\n            });\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const value = getInstance('my_buckets_path').toJSON();\n            const expected = {\n                my_agg: {\n                    derivative: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/diversified-sampler-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { DiversifiedSamplerAggregation } from '../../src';\n\nconst getInstance = () =>\n    new DiversifiedSamplerAggregation('my_samples', 'my_field');\n\ndescribe('DiversifiedSamplerAggregation', () => {\n    test('sets type as diversified_sampler', () => {\n        const value = new DiversifiedSamplerAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { diversified_sampler: {} }\n        });\n    });\n\n    describe('executionHint() validation', () => {\n        test.each([\n            { name: 'accepts valid value: map', value: 'map' },\n            {\n                name: 'accepts valid value: MAP (case-insensitive)',\n                value: 'MAP'\n            },\n            {\n                name: 'accepts valid value: global_ordinals',\n                value: 'global_ordinals'\n            },\n            {\n                name: 'accepts valid value: GLOBAL_ORDINALS (case-insensitive)',\n                value: 'GLOBAL_ORDINALS'\n            },\n            {\n                name: 'accepts valid value: global_ordinals_hash',\n                value: 'global_ordinals_hash'\n            },\n            {\n                name: 'accepts valid value: GLOBAL_ORDINALS_HASH (case-insensitive)',\n                value: 'GLOBAL_ORDINALS_HASH'\n            },\n            {\n                name: 'accepts valid value: global_ordinals_low_cardinality',\n                value: 'global_ordinals_low_cardinality'\n            },\n            {\n                name: 'accepts valid value: GLOBAL_ORDINALS_LOW_CARDINALITY (case-insensitive)',\n                value: 'GLOBAL_ORDINALS_LOW_CARDINALITY'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().executionHint(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            {\n                name: 'throws for invalid value',\n                value: 'invalid_execution_hint'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().executionHint(value)).toThrow(\n                new Error(\n                    \"The 'execution_hint' parameter should be one of 'global_ordinals', 'global_ordinals_hash', 'global_ordinals_low_cardinality', 'map'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets shardSize', () => {\n            const value = getInstance().shardSize(200).toJSON();\n            expect(value).toEqual({\n                my_samples: {\n                    diversified_sampler: {\n                        field: 'my_field',\n                        shard_size: 200\n                    }\n                }\n            });\n        });\n\n        test('sets maxDocsPerValue', () => {\n            const value = getInstance().maxDocsPerValue(3).toJSON();\n            expect(value).toEqual({\n                my_samples: {\n                    diversified_sampler: {\n                        field: 'my_field',\n                        max_docs_per_value: 3\n                    }\n                }\n            });\n        });\n\n        test('sets executionHint', () => {\n            const value = getInstance().executionHint('map').toJSON();\n            expect(value).toEqual({\n                my_samples: {\n                    diversified_sampler: {\n                        field: 'my_field',\n                        execution_hint: 'map'\n                    }\n                }\n            });\n        });\n    });\n\n    test('constructor sets arguments', () => {\n        const value = getInstance().toJSON();\n        const expected = {\n            my_samples: {\n                diversified_sampler: {\n                    field: 'my_field'\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/extended-stats-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ExtendedStatsAggregation } from '../../src';\n\nconst getInstance = field => new ExtendedStatsAggregation('my_agg', field);\n\ndescribe('ExtendedStatsAggregation', () => {\n    test('sets type as extended_stats', () => {\n        const value = new ExtendedStatsAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { extended_stats: {} }\n        });\n    });\n\n    describe('options', () => {\n        test('sets sigma', () => {\n            const value = getInstance('my_field').sigma(3).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    extended_stats: {\n                        field: 'my_field',\n                        sigma: 3\n                    }\n                }\n            });\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets field', () => {\n            const valueA = getInstance('my_field').toJSON();\n            const valueB = getInstance().field('my_field').toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                my_agg: {\n                    extended_stats: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/extended-stats-bucket-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ExtendedStatsBucketAggregation } from '../../src';\n\nconst getInstance = bucketsPath =>\n    new ExtendedStatsBucketAggregation('my_agg', bucketsPath);\n\ndescribe('ExtendedStatsBucketAggregation', () => {\n    test('sets type as extended_stats_bucket', () => {\n        const value = new ExtendedStatsBucketAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { extended_stats_bucket: {} }\n        });\n    });\n\n    describe('options', () => {\n        test('sets sigma', () => {\n            const value = getInstance('my_buckets_path').sigma(3).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    extended_stats_bucket: {\n                        buckets_path: 'my_buckets_path',\n                        sigma: 3\n                    }\n                }\n            });\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const valueA = getInstance('my_buckets_path').toJSON();\n            const valueB = getInstance()\n                .bucketsPath('my_buckets_path')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                my_agg: {\n                    extended_stats_bucket: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/filter-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { FilterAggregation, TermQuery } from '../../src';\n\nconst getInstance = (...args) =>\n    new FilterAggregation('my_filter_agg', ...args);\n\nconst filterQry = new TermQuery('user', 'kimchy');\n\ndescribe('FilterAggregation', () => {\n    test('sets type as filter', () => {\n        const value = new FilterAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { filter: {} }\n        });\n    });\n\n    test('field cannot be set', () => {\n        expect(() => new FilterAggregation('my_agg').field()).toThrow(\n            new Error('field is not supported in FilterAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new FilterAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in FilterAggregation')\n        );\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('filter()', () => {\n                expect(() => getInstance().filter(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n        });\n    });\n\n    test('constructor sets arguments', () => {\n        const value = getInstance(filterQry).toJSON();\n        const expected = getInstance().filter(filterQry).toJSON();\n        expect(value).toEqual(expected);\n    });\n\n    test('filter is set', () => {\n        const value = getInstance().filter(filterQry).toJSON();\n        const expected = {\n            my_filter_agg: {\n                filter: { term: { user: 'kimchy' } }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/filters-agg.test.js",
    "content": "import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest';\nimport { FiltersAggregation, termQuery } from '../../src';\n\nconst getInstance = (...args) =>\n    new FiltersAggregation('my_filters_agg', ...args);\n\nconst filterQryA = termQuery('user', 'kimchy');\nconst filterQryB = termQuery('company', 'elastic');\n\ndescribe('FiltersAggregation', () => {\n    test('sets type as filters', () => {\n        const value = new FiltersAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { filters: {} }\n        });\n    });\n\n    test('field cannot be set', () => {\n        expect(() => new FiltersAggregation('my_agg').field()).toThrow(\n            new Error('field is not supported in FiltersAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new FiltersAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in FiltersAggregation')\n        );\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('filter()', () => {\n                expect(() => getInstance().filter(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets otherBucket', () => {\n            const value = getInstance().otherBucket(true).toJSON();\n            expect(value).toEqual({\n                my_filters_agg: {\n                    filters: {\n                        other_bucket: true\n                    }\n                }\n            });\n        });\n\n        test('sets otherBucketKey', () => {\n            const value = getInstance()\n                .otherBucketKey('other_messages')\n                .toJSON();\n            expect(value).toEqual({\n                my_filters_agg: {\n                    filters: {\n                        other_bucket_key: 'other_messages'\n                    }\n                }\n            });\n        });\n    });\n\n    describe('filter methods', () => {\n        test('named filters are set', () => {\n            let value = getInstance()\n                .filter('user_kimchy', filterQryA)\n                .filter('company_elastic', filterQryB)\n                .toJSON();\n            const expected = {\n                my_filters_agg: {\n                    filters: {\n                        filters: {\n                            user_kimchy: { term: { user: 'kimchy' } },\n                            company_elastic: { term: { company: 'elastic' } }\n                        }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n\n            value = getInstance()\n                .filters({\n                    user_kimchy: filterQryA,\n                    company_elastic: filterQryB\n                })\n                .toJSON();\n            expect(value).toEqual(expected);\n        });\n\n        test('anonymous filters are set', () => {\n            let value = getInstance()\n                .anonymousFilter(filterQryA)\n                .anonymousFilter(filterQryB)\n                .toJSON();\n            const expected = {\n                my_filters_agg: {\n                    filters: {\n                        filters: [\n                            { term: { user: 'kimchy' } },\n                            { term: { company: 'elastic' } }\n                        ]\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n\n            value = getInstance()\n                .anonymousFilters([filterQryA, filterQryB])\n                .toJSON();\n            expect(value).toEqual(expected);\n        });\n\n        test('mixed representation', () => {\n            let value = getInstance()\n                .filter('user_kimchy', filterQryA)\n                .anonymousFilter(filterQryB)\n                .toJSON();\n            let expected = {\n                my_filters_agg: {\n                    filters: {\n                        filters: [{ term: { company: 'elastic' } }]\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n\n            value = getInstance()\n                .anonymousFilter(filterQryA)\n                .filter('company_elastic', filterQryB)\n                .toJSON();\n            expected = {\n                my_filters_agg: {\n                    filters: {\n                        filters: {\n                            company_elastic: { term: { company: 'elastic' } }\n                        }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('other bucket key is set', () => {\n            const value = getInstance()\n                .otherBucket(true, 'other_messages')\n                .toJSON();\n            const expected = {\n                my_filters_agg: {\n                    filters: {\n                        other_bucket: true,\n                        other_bucket_key: 'other_messages'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('mixed representation logs warning', () => {\n        let spy;\n\n        beforeEach(() => {\n            spy = vi.spyOn(console, 'warn').mockImplementation(() => {});\n        });\n\n        afterEach(() => {\n            spy.mockRestore();\n        });\n\n        test('logs warning for mixed representation', () => {\n            getInstance()\n                .filter('user_kimchy', filterQryA)\n                .anonymousFilter(filterQryB)\n                .toJSON();\n\n            expect(spy).toHaveBeenCalledTimes(2);\n            expect(spy).toHaveBeenNthCalledWith(\n                1,\n                '[FiltersAggregation] Do not mix named and anonymous filters!'\n            );\n            expect(spy).toHaveBeenNthCalledWith(\n                2,\n                '[FiltersAggregation] Overwriting named filters.'\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/geo-bounds-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GeoBoundsAggregation } from '../../src';\n\nconst getInstance = field => new GeoBoundsAggregation('my_agg', field);\n\ndescribe('GeoBoundsAggregation', () => {\n    test('sets type as geo_bounds', () => {\n        const value = new GeoBoundsAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { geo_bounds: {} }\n        });\n    });\n\n    test('format cannot be set', () => {\n        expect(() => new GeoBoundsAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in GeoBoundsAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new GeoBoundsAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in GeoBoundsAggregation')\n        );\n    });\n\n    describe('options', () => {\n        test('sets wrapLongitude', () => {\n            const value = getInstance('my_field')\n                .wrapLongitude('true')\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    geo_bounds: {\n                        field: 'my_field',\n                        wrap_longitude: 'true'\n                    }\n                }\n            });\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets field', () => {\n            const value = getInstance('my_field').toJSON();\n            const expected = {\n                my_agg: {\n                    geo_bounds: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/geo-centroid-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GeoCentroidAggregation } from '../../src';\n\nconst getInstance = field => new GeoCentroidAggregation('my_agg', field);\n\ndescribe('GeoCentroidAggregation', () => {\n    test('sets type as geo_centroid', () => {\n        const value = new GeoCentroidAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { geo_centroid: {} }\n        });\n    });\n\n    test('format cannot be set', () => {\n        expect(() => new GeoCentroidAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in GeoCentroidAggregation')\n        );\n    });\n\n    describe('constructor', () => {\n        test('sets field', () => {\n            const value = getInstance('my_field').toJSON();\n            const expected = {\n                my_agg: {\n                    geo_centroid: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/geo-distance-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GeoDistanceAggregation, geoPoint } from '../../src';\n\nconst getInstance = () =>\n    new GeoDistanceAggregation('my_geo_agg').range({ to: 100 });\n\ndescribe('GeoDistanceAggregation', () => {\n    test('sets type as geo_distance', () => {\n        const value = getInstance().toJSON();\n        const expected = {\n            my_geo_agg: {\n                geo_distance: {\n                    ranges: [{ to: 100 }]\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n\n    test('format cannot be set', () => {\n        expect(() => new GeoDistanceAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in GeoDistanceAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new GeoDistanceAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in GeoDistanceAggregation')\n        );\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('origin()', () => {\n                expect(() => getInstance().origin(value)).toThrow(\n                    new TypeError('Argument must be an instance of GeoPoint')\n                );\n            });\n        });\n    });\n\n    describe('unit() validation', () => {\n        test.each([\n            { name: 'accepts valid value: in', value: 'in' },\n            { name: 'accepts valid value: inch', value: 'inch' },\n            { name: 'accepts valid value: yd', value: 'yd' },\n            { name: 'accepts valid value: yards', value: 'yards' },\n            { name: 'accepts valid value: ft', value: 'ft' },\n            { name: 'accepts valid value: feet', value: 'feet' },\n            { name: 'accepts valid value: km', value: 'km' },\n            { name: 'accepts valid value: kilometers', value: 'kilometers' },\n            { name: 'accepts valid value: NM', value: 'NM' },\n            { name: 'accepts valid value: nmi', value: 'nmi' },\n            {\n                name: 'accepts valid value: nauticalmiles',\n                value: 'nauticalmiles'\n            },\n            { name: 'accepts valid value: mm', value: 'mm' },\n            { name: 'accepts valid value: millimeters', value: 'millimeters' },\n            { name: 'accepts valid value: cm', value: 'cm' },\n            { name: 'accepts valid value: centimeters', value: 'centimeters' },\n            { name: 'accepts valid value: mi', value: 'mi' },\n            { name: 'accepts valid value: miles', value: 'miles' },\n            { name: 'accepts valid value: m', value: 'm' },\n            { name: 'accepts valid value: meters', value: 'meters' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().unit(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_unit' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().unit(value)).toThrow(\n                new Error(\n                    \"The 'unit' parameter should be one of 'NM', 'centimeters', 'cm', 'feet', 'ft', 'in', 'inch', 'kilometers', 'km', 'm', 'meters', 'mi', 'miles', 'millimeters', 'mm', 'nauticalmiles', 'nmi', 'yards', 'yd'\"\n                )\n            );\n        });\n    });\n\n    describe('distanceType() validation', () => {\n        test.each([\n            { name: 'accepts valid value: plane', value: 'plane' },\n            {\n                name: 'accepts valid value: PLANE (case-insensitive)',\n                value: 'PLANE'\n            },\n            { name: 'accepts valid value: arc', value: 'arc' },\n            {\n                name: 'accepts valid value: ARC (case-insensitive)',\n                value: 'ARC'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().distanceType(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_distance_type' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().distanceType(value)).toThrow(\n                new Error(\n                    \"The 'distance_type' parameter should be one of 'plane' or 'arc'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets origin', () => {\n            const value = getInstance()\n                .origin(geoPoint().object({ lat: 41.12, lon: -71.34 }))\n                .toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geo_distance: {\n                        ranges: [{ to: 100 }],\n                        origin: { lat: 41.12, lon: -71.34 }\n                    }\n                }\n            });\n        });\n\n        test('sets unit', () => {\n            const value = getInstance().unit('km').toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geo_distance: {\n                        ranges: [{ to: 100 }],\n                        unit: 'km'\n                    }\n                }\n            });\n        });\n\n        test('sets distanceType', () => {\n            const value = getInstance().distanceType('plane').toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geo_distance: {\n                        ranges: [{ to: 100 }],\n                        distance_type: 'plane'\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/geo-hash-grid-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GeoHashGridAggregation } from '../../src';\n\nconst getInstance = () => new GeoHashGridAggregation('my_geo_agg');\n\ndescribe('GeoHashGridAggregation', () => {\n    test('sets type as geohash_grid', () => {\n        const value = new GeoHashGridAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { geohash_grid: {} }\n        });\n    });\n\n    test('format cannot be set', () => {\n        expect(() => new GeoHashGridAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in GeoHashGridAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new GeoHashGridAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in GeoHashGridAggregation')\n        );\n    });\n\n    describe('options', () => {\n        test('sets precision', () => {\n            const value = getInstance().precision(8).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geohash_grid: {\n                        precision: 8\n                    }\n                }\n            });\n        });\n\n        test('sets size', () => {\n            const value = getInstance().size(10000).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geohash_grid: {\n                        size: 10000\n                    }\n                }\n            });\n        });\n\n        test('sets shardSize', () => {\n            const value = getInstance().shardSize(3).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geohash_grid: {\n                        shard_size: 3\n                    }\n                }\n            });\n        });\n    });\n\n    describe('precision() validation', () => {\n        test.each([\n            { name: 'throws for value below minimum (0)', value: 0 },\n            { name: 'throws for value above maximum (13)', value: 13 },\n            { name: 'throws for null value', value: null },\n            { name: 'throws for undefined value', value: undefined }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().precision(value)).toThrow(\n                new Error('`precision` can only be value from 1 to 12.')\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/geo-hex-grid-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GeoHexGridAggregation } from '../../src';\n\nconst getInstance = () => new GeoHexGridAggregation('my_geo_agg');\n\ndescribe('GeoHexGridAggregation', () => {\n    test('sets type as geohex_grid', () => {\n        const value = new GeoHexGridAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { geohex_grid: {} }\n        });\n    });\n\n    test('format cannot be set', () => {\n        expect(() => new GeoHexGridAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in GeoHexGridAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new GeoHexGridAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in GeoHexGridAggregation')\n        );\n    });\n\n    describe('options', () => {\n        test('sets precision', () => {\n            const value = getInstance().precision(8).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geohex_grid: {\n                        precision: 8\n                    }\n                }\n            });\n        });\n\n        test('sets size', () => {\n            const value = getInstance().size(10000).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geohex_grid: {\n                        size: 10000\n                    }\n                }\n            });\n        });\n\n        test('sets shardSize', () => {\n            const value = getInstance().shardSize(3).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geohex_grid: {\n                        shard_size: 3\n                    }\n                }\n            });\n        });\n    });\n\n    describe('precision() validation', () => {\n        test.each([\n            { name: 'throws for value below minimum (-1)', value: -1 },\n            { name: 'throws for value above maximum (16)', value: 16 },\n            { name: 'throws for null value', value: null },\n            { name: 'throws for undefined value', value: undefined }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().precision(value)).toThrow(\n                new Error('`precision` can only be value from 0 to 15.')\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/geo-tile-grid-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GeoTileGridAggregation, GeoPoint } from '../../src';\n\nconst getInstance = () => new GeoTileGridAggregation('my_geo_agg');\n\nconst pt1 = new GeoPoint().lat(40.73).lon(-74.1);\nconst pt2 = new GeoPoint().lat(40.1).lon(-71.12);\n\ndescribe('GeoTileGridAggregation', () => {\n    test('sets type as geotile_grid', () => {\n        const value = new GeoTileGridAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { geotile_grid: {} }\n        });\n    });\n\n    test('format cannot be set', () => {\n        expect(() => new GeoTileGridAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in GeoTileGridAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new GeoTileGridAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in GeoTileGridAggregation')\n        );\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('topLeft()', () => {\n                expect(() => getInstance().topLeft(value)).toThrow(\n                    new TypeError('Argument must be an instance of GeoPoint')\n                );\n            });\n\n            test('bottomRight()', () => {\n                expect(() => getInstance().bottomRight(value)).toThrow(\n                    new TypeError('Argument must be an instance of GeoPoint')\n                );\n            });\n\n            test('topRight()', () => {\n                expect(() => getInstance().topRight(value)).toThrow(\n                    new TypeError('Argument must be an instance of GeoPoint')\n                );\n            });\n\n            test('bottomLeft()', () => {\n                expect(() => getInstance().bottomLeft(value)).toThrow(\n                    new TypeError('Argument must be an instance of GeoPoint')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets precision', () => {\n            const value = getInstance().precision(8).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geotile_grid: {\n                        precision: 8\n                    }\n                }\n            });\n        });\n\n        test('sets size', () => {\n            const value = getInstance().size(10000).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geotile_grid: {\n                        size: 10000\n                    }\n                }\n            });\n        });\n\n        test('sets shardSize', () => {\n            const value = getInstance().shardSize(3).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geotile_grid: {\n                        shard_size: 3\n                    }\n                }\n            });\n        });\n    });\n\n    describe('bounds options', () => {\n        test('sets topLeft', () => {\n            const value = getInstance().topLeft(pt1).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geotile_grid: {\n                        bounds: {\n                            top_left: { lat: 40.73, lon: -74.1 }\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets bottomRight', () => {\n            const value = getInstance().bottomRight(pt2).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geotile_grid: {\n                        bounds: {\n                            bottom_right: { lat: 40.1, lon: -71.12 }\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets topRight', () => {\n            const value = getInstance().topRight(pt1).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geotile_grid: {\n                        bounds: {\n                            top_right: { lat: 40.73, lon: -74.1 }\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets bottomLeft', () => {\n            const value = getInstance().bottomLeft(pt2).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geotile_grid: {\n                        bounds: {\n                            bottom_left: { lat: 40.1, lon: -71.12 }\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets top', () => {\n            const value = getInstance().top(40.73).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geotile_grid: {\n                        bounds: {\n                            top: 40.73\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets left', () => {\n            const value = getInstance().left(-74.1).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geotile_grid: {\n                        bounds: {\n                            left: -74.1\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets bottom', () => {\n            const value = getInstance().bottom(40.1).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geotile_grid: {\n                        bounds: {\n                            bottom: 40.1\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets right', () => {\n            const value = getInstance().right(-71.12).toJSON();\n            expect(value).toEqual({\n                my_geo_agg: {\n                    geotile_grid: {\n                        bounds: {\n                            right: -71.12\n                        }\n                    }\n                }\n            });\n        });\n    });\n\n    describe('precision() validation', () => {\n        test.each([\n            { name: 'throws for value below minimum (-1)', value: -1 },\n            { name: 'throws for value above maximum (30)', value: 30 },\n            { name: 'throws for null value', value: null },\n            { name: 'throws for undefined value', value: undefined }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().precision(value)).toThrow(\n                new Error('`precision` can only be value from 0 to 29.')\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/global-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GlobalAggregation } from '../../src';\n\ndescribe('GlobalAggregation', () => {\n    test('sets type as global', () => {\n        const value = new GlobalAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { global: {} }\n        });\n    });\n\n    test('field cannot be set', () => {\n        expect(() => new GlobalAggregation('my_agg').field()).toThrow(\n            new Error('field is not supported in GlobalAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new GlobalAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in GlobalAggregation')\n        );\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/histogram-agg-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { HistogramAggregationBase } from '../../src/aggregations/bucket-aggregations';\n\nconst getInstance = (...args) =>\n    new HistogramAggregationBase('my_agg', 'my_type', ...args);\n\ndescribe('HistogramAggregationBase', () => {\n    describe('options', () => {\n        test('sets interval', () => {\n            const value = getInstance().interval('year').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        interval: 'year'\n                    }\n                }\n            });\n        });\n\n        test('sets format', () => {\n            const value = getInstance().format('####.00').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        format: '####.00'\n                    }\n                }\n            });\n        });\n\n        test('sets offset', () => {\n            const value = getInstance().offset(10).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        offset: 10\n                    }\n                }\n            });\n        });\n\n        test('sets minDocCount', () => {\n            const value = getInstance().minDocCount(1).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        min_doc_count: 1\n                    }\n                }\n            });\n        });\n\n        test('sets missing', () => {\n            const value = getInstance().missing(0).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        missing: 0\n                    }\n                }\n            });\n        });\n\n        test('sets keyed', () => {\n            const value = getInstance().keyed(true).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        keyed: true\n                    }\n                }\n            });\n        });\n\n        test('sets extendedBounds', () => {\n            const value = getInstance().extendedBounds(0, 500).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        extended_bounds: { min: 0, max: 500 }\n                    }\n                }\n            });\n        });\n\n        test('sets hardBounds', () => {\n            const value = getInstance().hardBounds(0, 500).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        hard_bounds: { min: 0, max: 500 }\n                    }\n                }\n            });\n        });\n\n        test('sets order with string', () => {\n            const value = getInstance().order('my_field').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        order: { my_field: 'desc' }\n                    }\n                }\n            });\n        });\n\n        test('sets order with array', () => {\n            const value = getInstance().order('my_field', 'asc').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        order: { my_field: 'asc' }\n                    }\n                }\n            });\n        });\n    });\n\n    test('constructor sets arguments', () => {\n        const value = getInstance('my_field', 10).toJSON();\n        const expected = {\n            my_agg: {\n                my_type: {\n                    field: 'my_field',\n                    interval: 10\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n\n    describe('order() validation', () => {\n        test.each([\n            {\n                name: 'accepts default (no direction specified)',\n                field: 'my_field',\n                direction: undefined\n            },\n            {\n                name: 'accepts direction: asc',\n                field: 'my_field',\n                direction: 'asc'\n            },\n            {\n                name: 'accepts direction: ASC (case-insensitive)',\n                field: 'my_field',\n                direction: 'ASC'\n            },\n            {\n                name: 'accepts direction: desc',\n                field: 'my_field',\n                direction: 'desc'\n            },\n            {\n                name: 'accepts direction: DESC (case-insensitive)',\n                field: 'my_field',\n                direction: 'DESC'\n            }\n        ])('$name', ({ field, direction }) => {\n            expect(() => getInstance().order(field, direction)).not.toThrow();\n        });\n\n        test.each([\n            {\n                name: 'throws for invalid direction',\n                field: 'my_field',\n                direction: 'invalid_direction'\n            },\n            {\n                name: 'throws for null direction',\n                field: 'my_field',\n                direction: null\n            }\n        ])('$name', ({ field, direction }) => {\n            expect(() => getInstance().order(field, direction)).toThrow(\n                new Error(\n                    \"The 'direction' parameter should be one of 'asc' or 'desc'\"\n                )\n            );\n        });\n    });\n\n    test('multiple order criteria can be set', () => {\n        const value = getInstance('my_field').order('my_field_a', 'asc');\n        let expected = {\n            my_agg: {\n                my_type: {\n                    field: 'my_field',\n                    order: { my_field_a: 'asc' }\n                }\n            }\n        };\n        expect(value.toJSON()).toEqual(expected);\n\n        value.order('my_field_b', 'desc');\n        expected = {\n            my_agg: {\n                my_type: {\n                    field: 'my_field',\n                    order: [{ my_field_a: 'asc' }, { my_field_b: 'desc' }]\n                }\n            }\n        };\n        expect(value.toJSON()).toEqual(expected);\n\n        value.order('my_field_c', 'asc');\n        expected = {\n            my_agg: {\n                my_type: {\n                    field: 'my_field',\n                    order: [\n                        { my_field_a: 'asc' },\n                        { my_field_b: 'desc' },\n                        { my_field_c: 'asc' }\n                    ]\n                }\n            }\n        };\n        expect(value.toJSON()).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/histogram-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { HistogramAggregation } from '../../src';\n\ndescribe('HistogramAggregation', () => {\n    test('sets type as histogram', () => {\n        const value = new HistogramAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { histogram: {} }\n        });\n    });\n\n    test('constructor sets arguments', () => {\n        const value = new HistogramAggregation(\n            'my_agg',\n            'my_field',\n            10\n        ).toJSON();\n        const expected = {\n            my_agg: {\n                histogram: {\n                    field: 'my_field',\n                    interval: 10\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/ip-range-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { IpRangeAggregation } from '../../src';\n\nconst getInstance = (...args) =>\n    new IpRangeAggregation('my_agg', ...args).range({ to: '10.0.0.5' });\n\ndescribe('IpRangeAggregation', () => {\n    test('format cannot be set', () => {\n        expect(() => new IpRangeAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in IpRangeAggregation')\n        );\n    });\n\n    describe('constructor', () => {\n        test('sets type as ip_range', () => {\n            const value = getInstance().toJSON();\n            const expected = {\n                my_agg: {\n                    ip_range: {\n                        ranges: [{ to: '10.0.0.5' }]\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets arguments', () => {\n            const value = getInstance('ip').toJSON(),\n                expected = {\n                    my_agg: {\n                        ip_range: {\n                            field: 'ip',\n                            ranges: [{ to: '10.0.0.5' }]\n                        }\n                    }\n                };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('range() validation', () => {\n        test.each([\n            { name: 'throws for empty object', value: {} },\n            {\n                name: 'throws for object with only key property',\n                value: { key: 'invalid' }\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().range(value)).toThrow(\n                new Error(\n                    'Invalid Range! Range must have at least one of from,to,mask'\n                )\n            );\n        });\n\n        test.each([\n            {\n                name: 'does not throw for object with to property',\n                value: { to: '10.0.0.5' }\n            },\n            {\n                name: 'does not throw for object with to and key properties',\n                value: { to: '10.0.0.5', key: 'my_ip_range_key' }\n            },\n            {\n                name: 'does not throw for object with from property',\n                value: { from: '10.0.0.5' }\n            },\n            {\n                name: 'does not throw for object with from and key properties',\n                value: { from: '10.0.0.5', key: 'my_ip_range_key' }\n            },\n            {\n                name: 'does not throw for object with mask property',\n                value: { mask: '10.0.0.0/25' }\n            },\n            {\n                name: 'does not throw for object with mask and key properties',\n                value: { mask: '10.0.0.0/25', key: 'my_ip_range_key' }\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().range(value)).not.toThrow();\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/matrix-stats-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MatrixStatsAggregation } from '../../src';\n\nconst getInstance = fields => new MatrixStatsAggregation('my_agg', fields);\n\ndescribe('MatrixStatsAggregation', () => {\n    test('sets type as matrix_stats', () => {\n        const value = new MatrixStatsAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { matrix_stats: {} }\n        });\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('fields()', () => {\n                expect(() => getInstance().fields(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets fields', () => {\n            const value = getInstance().fields(['fieldA', 'fieldB']).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    matrix_stats: {\n                        fields: ['fieldA', 'fieldB']\n                    }\n                }\n            });\n        });\n\n        test('sets mode', () => {\n            const value = getInstance().mode('avg').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    matrix_stats: {\n                        mode: 'avg'\n                    }\n                }\n            });\n        });\n\n        test('sets missing', () => {\n            const value = getInstance().missing({ income: 50000 }).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    matrix_stats: {\n                        missing: { income: 50000 }\n                    }\n                }\n            });\n        });\n    });\n\n    test('constructor sets arguments', () => {\n        const valueA = getInstance(['fieldA', 'fieldB']).toJSON();\n        const valueB = getInstance().fields(['fieldA', 'fieldB']).toJSON();\n        expect(valueA).toEqual(valueB);\n\n        const expected = {\n            my_agg: {\n                matrix_stats: {\n                    fields: ['fieldA', 'fieldB']\n                }\n            }\n        };\n        expect(valueA).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/max-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MaxAggregation } from '../../src';\n\ndescribe('MaxAggregation', () => {\n    test('sets type as max', () => {\n        const value = new MaxAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { max: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets field', () => {\n            const value = new MaxAggregation('my_agg', 'my_field').toJSON();\n            const expected = {\n                my_agg: {\n                    max: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/max-bucket-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MaxBucketAggregation } from '../../src';\n\ndescribe('MaxBucketAggregation', () => {\n    test('sets type as max_bucket', () => {\n        const value = new MaxBucketAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { max_bucket: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const value = new MaxBucketAggregation(\n                'my_agg',\n                'my_buckets_path'\n            ).toJSON();\n            const expected = {\n                my_agg: {\n                    max_bucket: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/metrics-agg-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { Script } from '../../src';\nimport { MetricsAggregationBase } from '../../src/aggregations/metrics-aggregations';\n\nconst getInstance = field =>\n    new MetricsAggregationBase('my_agg', 'my_type', field);\n\ndescribe('MetricsAggregationBase', () => {\n    test('can be instantiated', () => {\n        expect(getInstance()).toBeTruthy();\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('script()', () => {\n                expect(() => getInstance().script(value)).toThrow(\n                    new TypeError('Argument must be an instance of Script')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets field', () => {\n            const value = getInstance().field('my_field').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        field: 'my_field'\n                    }\n                }\n            });\n        });\n\n        test('sets script', () => {\n            const scriptInstance = new Script()\n                .lang('groovy')\n                .file('calculate-score')\n                .params({ my_modifier: 2 });\n            const value = getInstance().script(scriptInstance).toJSON();\n            const expected = {\n                my_agg: {\n                    my_type: {\n                        script: scriptInstance.toJSON()\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets missing', () => {\n            const value = getInstance().missing(1).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        missing: 1\n                    }\n                }\n            });\n        });\n\n        test('sets format', () => {\n            const value = getInstance().format('####.00').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        format: '####.00'\n                    }\n                }\n            });\n        });\n    });\n\n    test('constructor sets field', () => {\n        const valueA = getInstance('my_field').toJSON();\n        const valueB = getInstance().field('my_field').toJSON();\n        expect(valueA).toEqual(valueB);\n\n        const expected = {\n            my_agg: {\n                my_type: {\n                    field: 'my_field'\n                }\n            }\n        };\n        expect(valueA).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/min-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MinAggregation } from '../../src';\n\ndescribe('MinAggregation', () => {\n    test('sets type as min', () => {\n        const value = new MinAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { min: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets field', () => {\n            const value = new MinAggregation('my_agg', 'my_field').toJSON();\n            const expected = {\n                my_agg: {\n                    min: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/min-bucket-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MinBucketAggregation } from '../../src';\n\ndescribe('MinBucketAggregation', () => {\n    test('sets type as min_bucket', () => {\n        const value = new MinBucketAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { min_bucket: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const value = new MinBucketAggregation(\n                'my_agg',\n                'my_buckets_path'\n            ).toJSON();\n            const expected = {\n                my_agg: {\n                    min_bucket: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/missing-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MissingAggregation } from '../../src';\n\ndescribe('MissingAggregation', () => {\n    test('sets type as missing', () => {\n        const value = new MissingAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { missing: {} }\n        });\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new MissingAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in MissingAggregation')\n        );\n    });\n\n    describe('constructor', () => {\n        test('sets arguments', () => {\n            const value = new MissingAggregation('my_agg', 'my_field').toJSON();\n            const expected = {\n                my_agg: {\n                    missing: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/moving-average-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MovingAverageAggregation } from '../../src';\n\nconst getInstance = bucketsPath =>\n    new MovingAverageAggregation('my_agg', bucketsPath);\n\ndescribe('MovingAverageAggregation', () => {\n    test('sets type as moving_avg', () => {\n        const value = new MovingAverageAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { moving_avg: {} }\n        });\n    });\n\n    test('format cannot be set', () => {\n        expect(() => new MovingAverageAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in MovingAverageAggregation')\n        );\n    });\n\n    describe('model() validation', () => {\n        test.each([\n            { name: 'accepts valid value: ewma', value: 'ewma' },\n            {\n                name: 'accepts valid value: EWMA (case-insensitive)',\n                value: 'EWMA'\n            },\n            { name: 'accepts valid value: holt', value: 'holt' },\n            {\n                name: 'accepts valid value: HOLT (case-insensitive)',\n                value: 'HOLT'\n            },\n            {\n                name: 'accepts valid value: holt_winters',\n                value: 'holt_winters'\n            },\n            {\n                name: 'accepts valid value: HOLT_WINTERS (case-insensitive)',\n                value: 'HOLT_WINTERS'\n            },\n            { name: 'accepts valid value: linear', value: 'linear' },\n            {\n                name: 'accepts valid value: LINEAR (case-insensitive)',\n                value: 'LINEAR'\n            },\n            { name: 'accepts valid value: simple', value: 'simple' },\n            {\n                name: 'accepts valid value: SIMPLE (case-insensitive)',\n                value: 'SIMPLE'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().model(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_model' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().model(value)).toThrow(\n                new Error(\n                    \"The 'model' parameter should be one of 'ewma', 'holt', 'holt_winters', 'linear', 'simple'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets model', () => {\n            const value = getInstance().model('simple').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    moving_avg: {\n                        model: 'simple'\n                    }\n                }\n            });\n        });\n\n        test('sets window', () => {\n            const value = getInstance().window(7).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    moving_avg: {\n                        window: 7\n                    }\n                }\n            });\n        });\n\n        test('sets minimize', () => {\n            const value = getInstance().minimize(true).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    moving_avg: {\n                        minimize: true\n                    }\n                }\n            });\n        });\n\n        test('sets settings', () => {\n            const value = getInstance().settings({ alpha: 0.8 }).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    moving_avg: {\n                        settings: { alpha: 0.8 }\n                    }\n                }\n            });\n        });\n\n        test('sets predict', () => {\n            const value = getInstance().predict(10).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    moving_avg: {\n                        predict: 10\n                    }\n                }\n            });\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const value = getInstance('my_buckets_path').toJSON();\n            const expected = {\n                my_agg: {\n                    moving_avg: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/moving-function-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MovingFunctionAggregation } from '../../src';\n\nconst getInstance = (bucketsPath, window, script) =>\n    new MovingFunctionAggregation('my_agg', bucketsPath, window, script);\n\ndescribe('MovingFunctionAggregation', () => {\n    test('sets type as moving_fn', () => {\n        const value = new MovingFunctionAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { moving_fn: {} }\n        });\n    });\n\n    describe('options', () => {\n        test('sets format', () => {\n            const value = getInstance().format('####.00').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    moving_fn: {\n                        format: '####.00'\n                    }\n                }\n            });\n        });\n\n        test('sets window', () => {\n            const value = getInstance().window(7).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    moving_fn: {\n                        window: 7\n                    }\n                }\n            });\n        });\n\n        test('sets shift', () => {\n            const value = getInstance().shift(0).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    moving_fn: {\n                        shift: 0\n                    }\n                }\n            });\n        });\n\n        test('sets script', () => {\n            const value = getInstance()\n                .script('MovingFunctions.unweightedAvg(values)')\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    moving_fn: {\n                        script: 'MovingFunctions.unweightedAvg(values)'\n                    }\n                }\n            });\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const value = getInstance(\n                'my_buckets_path',\n                10,\n                'MovingFunctions.unweightedAvg(values)'\n            ).toJSON();\n            const expected = {\n                my_agg: {\n                    moving_fn: {\n                        buckets_path: 'my_buckets_path',\n                        window: 10,\n                        script: 'MovingFunctions.unweightedAvg(values)'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/nested-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { NestedAggregation } from '../../src';\n\nconst getInstance = (...args) => new NestedAggregation('my_agg', ...args);\n\ndescribe('NestedAggregation', () => {\n    test('sets type as nested', () => {\n        const value = new NestedAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { nested: {} }\n        });\n    });\n\n    test('field cannot be set', () => {\n        expect(() => new NestedAggregation('my_agg').field()).toThrow(\n            new Error('field is not supported in NestedAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new NestedAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in NestedAggregation')\n        );\n    });\n\n    describe('options', () => {\n        test('sets path', () => {\n            const value = getInstance().path('nested_path').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    nested: {\n                        path: 'nested_path'\n                    }\n                }\n            });\n        });\n    });\n\n    test('constructor sets arguments', () => {\n        const value = getInstance('nested_path').toJSON();\n        const expected = {\n            my_agg: {\n                nested: {\n                    path: 'nested_path'\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/parent-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ParentAggregation } from '../../src';\n\ndescribe('ParentAggregation', () => {\n    test('sets type as parent', () => {\n        const value = new ParentAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { parent: {} }\n        });\n    });\n\n    test('field cannot be set', () => {\n        expect(() => new ParentAggregation('my_agg').field()).toThrow(\n            new Error('field is not supported in ParentAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new ParentAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in ParentAggregation')\n        );\n    });\n\n    test('constructor sets type', () => {\n        const value = new ParentAggregation('to_questions', 'answer').toJSON();\n        const expected = {\n            to_questions: {\n                parent: {\n                    type: 'answer'\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n\n    test('type is set', () => {\n        const value = new ParentAggregation('to_questions')\n            .type('answer')\n            .toJSON();\n        const expected = {\n            to_questions: {\n                parent: {\n                    type: 'answer'\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/percentile-ranks-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { PercentileRanksAggregation } from '../../src';\n\nconst getInstance = (field, values) =>\n    new PercentileRanksAggregation('my_agg', field, values);\n\ndescribe('PercentileRanksAggregation', () => {\n    test('sets type as percentile_ranks', () => {\n        const value = new PercentileRanksAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { percentile_ranks: {} }\n        });\n    });\n\n    test('format cannot be set', () => {\n        expect(() => new PercentileRanksAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in PercentileRanksAggregation')\n        );\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('values()', () => {\n                expect(() => getInstance().values(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets keyed', () => {\n            const value = getInstance().keyed(true).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    percentile_ranks: {\n                        keyed: true\n                    }\n                }\n            });\n        });\n\n        test('sets values', () => {\n            const value = getInstance().values([15, 30]).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    percentile_ranks: {\n                        values: [15, 30]\n                    }\n                }\n            });\n        });\n\n        test('sets tdigest', () => {\n            const value = getInstance().tdigest(200).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    percentile_ranks: {\n                        tdigest: { compression: 200 }\n                    }\n                }\n            });\n        });\n\n        test('sets hdr', () => {\n            const value = getInstance().hdr(3).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    percentile_ranks: {\n                        hdr: { number_of_significant_value_digits: 3 }\n                    }\n                }\n            });\n        });\n    });\n\n    test('compression same as tdigest', () => {\n        expect(getInstance().tdigest(3).toJSON()).toEqual(\n            getInstance().compression(3).toJSON()\n        );\n    });\n\n    test('constructor sets arguments', () => {\n        const valueA = getInstance('my_field', [15, 30]).toJSON();\n        const valueB = getInstance()\n            .field('my_field')\n            .values([15, 30])\n            .toJSON();\n        expect(valueA).toEqual(valueB);\n\n        const expected = {\n            my_agg: {\n                percentile_ranks: {\n                    field: 'my_field',\n                    values: [15, 30]\n                }\n            }\n        };\n        expect(valueA).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/percentiles-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { PercentilesAggregation } from '../../src';\n\nconst getInstance = field => new PercentilesAggregation('my_agg', field);\n\ndescribe('PercentilesAggregation', () => {\n    test('sets type as percentiles', () => {\n        const value = new PercentilesAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { percentiles: {} }\n        });\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('percents()', () => {\n                expect(() => getInstance().percents(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets keyed', () => {\n            const value = getInstance().keyed(true).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    percentiles: {\n                        keyed: true\n                    }\n                }\n            });\n        });\n\n        test('sets percents', () => {\n            const value = getInstance().percents([95, 99, 99.9]).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    percentiles: {\n                        percents: [95, 99, 99.9]\n                    }\n                }\n            });\n        });\n\n        test('sets tdigest', () => {\n            const value = getInstance().tdigest(200).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    percentiles: {\n                        tdigest: { compression: 200 }\n                    }\n                }\n            });\n        });\n\n        test('sets hdr', () => {\n            const value = getInstance().hdr(3).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    percentiles: {\n                        hdr: { number_of_significant_value_digits: 3 }\n                    }\n                }\n            });\n        });\n    });\n\n    test('compression same as tdigest', () => {\n        expect(getInstance().tdigest(3).toJSON()).toEqual(\n            getInstance().compression(3).toJSON()\n        );\n    });\n\n    test('constructor sets field', () => {\n        const valueA = getInstance('my_field').toJSON();\n        const valueB = getInstance().field('my_field').toJSON();\n        expect(valueA).toEqual(valueB);\n\n        const expected = {\n            my_agg: {\n                percentiles: {\n                    field: 'my_field'\n                }\n            }\n        };\n        expect(valueA).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/percentiles-bucket-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { PercentilesBucketAggregation } from '../../src';\n\nconst getInstance = bucketsPath =>\n    new PercentilesBucketAggregation('my_agg', bucketsPath);\n\ndescribe('PercentilesBucketAggregation', () => {\n    test('sets type as percentiles_bucket', () => {\n        const value = new PercentilesBucketAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { percentiles_bucket: {} }\n        });\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('percents()', () => {\n                expect(() => getInstance().percents(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets percents', () => {\n            const value = getInstance('my_buckets_path')\n                .percents([25.0, 50.0, 75.0])\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    percentiles_bucket: {\n                        buckets_path: 'my_buckets_path',\n                        percents: [25.0, 50.0, 75.0]\n                    }\n                }\n            });\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const value = getInstance('my_buckets_path').toJSON();\n            const expected = {\n                my_agg: {\n                    percentiles_bucket: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/pipeline-agg-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { PipelineAggregationBase } from '../../src/aggregations/pipeline-aggregations';\n\nconst getInstance = bucketsPath =>\n    new PipelineAggregationBase('my_agg', 'my_type', '', bucketsPath);\n\ndescribe('PipelineAggregationBase', () => {\n    describe('gapPolicy() validation', () => {\n        test.each([\n            { name: 'accepts valid value: skip', value: 'skip' },\n            {\n                name: 'accepts valid value: SKIP (case-insensitive)',\n                value: 'SKIP'\n            },\n            {\n                name: 'accepts valid value: insert_zeros',\n                value: 'insert_zeros'\n            },\n            {\n                name: 'accepts valid value: INSERT_ZEROS (case-insensitive)',\n                value: 'INSERT_ZEROS'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().gapPolicy(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_gap_policy' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().gapPolicy(value)).toThrow(\n                new Error(\n                    \"The 'gap_policy' parameter should be one of 'skip' or 'insert_zeros'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets bucketsPath', () => {\n            const value = getInstance().bucketsPath('my_buckets_path').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            });\n        });\n\n        test('sets gapPolicy', () => {\n            const value = getInstance().gapPolicy('insert_zeros').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        gap_policy: 'insert_zeros'\n                    }\n                }\n            });\n        });\n\n        test('sets format', () => {\n            const value = getInstance().format('my_format').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        format: 'my_format'\n                    }\n                }\n            });\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const value = getInstance('my_buckets_path').toJSON();\n            const expected = {\n                my_agg: {\n                    my_type: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/range-agg-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { RangeAggregationBase } from '../../src/aggregations/bucket-aggregations';\n\nconst getInstance = (...args) =>\n    new RangeAggregationBase('my_agg', 'my_type', ...args).range({\n        from: 10,\n        to: 20\n    });\n\ndescribe('RangeAggregationBase', () => {\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('range()', () => {\n                expect(() => getInstance().range(value)).toThrow(\n                    new TypeError('Argument must be an instance of Object')\n                );\n            });\n\n            test('ranges()', () => {\n                expect(() => getInstance().ranges(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets format', () => {\n            const value = getInstance().format('MM-yyy').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        ranges: [{ from: 10, to: 20 }],\n                        format: 'MM-yyy'\n                    }\n                }\n            });\n        });\n\n        test('sets missing', () => {\n            const value = getInstance().missing('01-1970').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        ranges: [{ from: 10, to: 20 }],\n                        missing: '01-1970'\n                    }\n                }\n            });\n        });\n\n        test('sets keyed', () => {\n            const value = getInstance().keyed(true).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        ranges: [{ from: 10, to: 20 }],\n                        keyed: true\n                    }\n                }\n            });\n        });\n    });\n\n    test('empty ranges throws', () => {\n        expect(() =>\n            new RangeAggregationBase('my_agg', 'my_type').toJSON()\n        ).toThrow(new Error('`ranges` cannot be empty.'));\n    });\n\n    describe('range() validation', () => {\n        test.each([\n            { name: 'throws for empty object', value: {} },\n            {\n                name: 'throws for object with only key property',\n                value: { key: 'invalid' }\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().range(value)).toThrow(\n                new Error(\n                    'Invalid Range! Range must have at least one of from,to'\n                )\n            );\n        });\n\n        test.each([\n            {\n                name: 'does not throw for object with to property',\n                value: { to: 50 }\n            },\n            {\n                name: 'does not throw for object with to and key properties',\n                value: { to: 50, key: 'fifty' }\n            },\n            {\n                name: 'does not throw for object with from property',\n                value: { from: 10 }\n            },\n            {\n                name: 'does not throw for object with from and key properties',\n                value: { from: 10, key: 'ten' }\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().range(value)).not.toThrow();\n        });\n    });\n\n    test('ranges are set', () => {\n        const valueA = getInstance()\n            .range({ from: 20, to: 30 })\n            .range({ from: 30, to: 40 })\n            .range({ from: 40, to: 50 })\n            .toJSON();\n        const valueB = getInstance()\n            .ranges([\n                { from: 20, to: 30 },\n                { from: 30, to: 40 },\n                { from: 40, to: 50 }\n            ])\n            .toJSON();\n        const expected = {\n            my_agg: {\n                my_type: {\n                    ranges: [\n                        { from: 10, to: 20 },\n                        { from: 20, to: 30 },\n                        { from: 30, to: 40 },\n                        { from: 40, to: 50 }\n                    ]\n                }\n            }\n        };\n        expect(valueA).toEqual(valueB);\n        expect(valueA).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/range-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { RangeAggregation } from '../../src';\n\ndescribe('RangeAggregation', () => {\n    test('sets type as range', () => {\n        const value = new RangeAggregation('my_agg', 'my_field')\n            .range({ from: 10, to: 20 })\n            .toJSON();\n        const expected = {\n            my_agg: {\n                range: {\n                    field: 'my_field',\n                    ranges: [{ from: 10, to: 20 }]\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/rare-terms-aggregation.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { RareTermsAggregation } from '../../src';\n\nconst getInstance = field => new RareTermsAggregation('my_agg', field);\n\ndescribe('RareTermsAggregation', () => {\n    test('sets type as rare_terms', () => {\n        const value = new RareTermsAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { rare_terms: {} }\n        });\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new RareTermsAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in RareTermsAggregation')\n        );\n    });\n\n    describe('options', () => {\n        test('sets maxDocCount', () => {\n            const value = getInstance('my_field').maxDocCount(42).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    rare_terms: {\n                        field: 'my_field',\n                        max_doc_count: 42\n                    }\n                }\n            });\n        });\n\n        test('sets precision', () => {\n            const value = getInstance('my_field').precision(0.001).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    rare_terms: {\n                        field: 'my_field',\n                        precision: 0.001\n                    }\n                }\n            });\n        });\n\n        test('sets include', () => {\n            const value = getInstance('my_field').include('swi*').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    rare_terms: {\n                        field: 'my_field',\n                        include: 'swi*'\n                    }\n                }\n            });\n        });\n\n        test('sets exclude', () => {\n            const value = getInstance('my_field').exclude('electro*').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    rare_terms: {\n                        field: 'my_field',\n                        exclude: 'electro*'\n                    }\n                }\n            });\n        });\n\n        test('sets missing', () => {\n            const value = getInstance('my_field').missing('N/A').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    rare_terms: {\n                        field: 'my_field',\n                        missing: 'N/A'\n                    }\n                }\n            });\n        });\n    });\n\n    describe('constructor', () => {\n        test('tries to construct agg name if not given', () => {\n            const value = new RareTermsAggregation(null, 'myfield').toJSON();\n            const expected = {\n                agg_rare_terms_myfield: {\n                    rare_terms: {\n                        field: 'myfield'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('maxDocCount() validation', () => {\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for undefined value', value: undefined },\n            { name: 'throws for value below minimum (0)', value: 0 },\n            { name: 'throws for value above maximum (101)', value: 101 }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().maxDocCount(value)).toThrow(\n                new Error('`maxDocCount` can only be value from 1 to 100.')\n            );\n        });\n    });\n\n    describe('precision() validation', () => {\n        test('throws for value below minimum', () => {\n            expect(() => getInstance().precision(0.000001)).toThrow(\n                new Error('`precision` must be greater than 0.00001.')\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/reverse-nested-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ReverseNestedAggregation } from '../../src';\n\nconst getInstance = (...args) =>\n    new ReverseNestedAggregation('my_agg', ...args);\n\ndescribe('ReverseNestedAggregation', () => {\n    test('sets type as reverse_nested', () => {\n        const value = new ReverseNestedAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { reverse_nested: {} }\n        });\n    });\n\n    test('field cannot be set', () => {\n        expect(() => new ReverseNestedAggregation('my_agg').field()).toThrow(\n            new Error('field is not supported in ReverseNestedAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new ReverseNestedAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in ReverseNestedAggregation')\n        );\n    });\n\n    describe('options', () => {\n        test('sets path', () => {\n            const value = getInstance().path('reverse_nested_path').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    reverse_nested: {\n                        path: 'reverse_nested_path'\n                    }\n                }\n            });\n        });\n    });\n\n    test('constructor sets arguments', () => {\n        const value = getInstance('reverse_nested_path').toJSON();\n        const expected = {\n            my_agg: {\n                reverse_nested: {\n                    path: 'reverse_nested_path'\n                }\n            }\n        };\n        expect(value).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/sampler-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SamplerAggregation } from '../../src';\n\nconst getInstance = (...args) => new SamplerAggregation('my_agg', ...args);\n\ndescribe('SamplerAggregation', () => {\n    test('sets type as sampler', () => {\n        const value = new SamplerAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { sampler: {} }\n        });\n    });\n\n    test('field cannot be set', () => {\n        expect(() => new SamplerAggregation('my_agg').field()).toThrow(\n            new Error('field is not supported in SamplerAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new SamplerAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in SamplerAggregation')\n        );\n    });\n\n    describe('options', () => {\n        test('sets shardSize', () => {\n            const value = getInstance().shardSize(200).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    sampler: {\n                        shard_size: 200\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/scripted-metric-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ScriptedMetricAggregation } from '../../src';\n\nconst getInstance = () => new ScriptedMetricAggregation('my_agg');\n\ndescribe('ScriptedMetricAggregation', () => {\n    test('sets type as scripted_metric', () => {\n        const value = new ScriptedMetricAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { scripted_metric: {} }\n        });\n    });\n\n    test('field cannot be set', () => {\n        expect(() => new ScriptedMetricAggregation('my_agg').field()).toThrow(\n            new Error('field is not supported in ScriptedMetricAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new ScriptedMetricAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in ScriptedMetricAggregation')\n        );\n    });\n\n    test('missing cannot be set', () => {\n        expect(() => new ScriptedMetricAggregation('my_agg').missing()).toThrow(\n            new Error('missing is not supported in ScriptedMetricAggregation')\n        );\n    });\n\n    describe('options', () => {\n        test('sets initScript', () => {\n            const value = getInstance()\n                .initScript('params._agg.transactions = []')\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    scripted_metric: {\n                        init_script: 'params._agg.transactions = []'\n                    }\n                }\n            });\n        });\n\n        test('sets mapScript', () => {\n            const value = getInstance()\n                .mapScript(\n                    \"params._agg.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)\"\n                )\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    scripted_metric: {\n                        map_script:\n                            \"params._agg.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)\"\n                    }\n                }\n            });\n        });\n\n        test('sets combineScript', () => {\n            const value = getInstance()\n                .combineScript(\n                    'double profit = 0; for (t in params._agg.transactions) { profit += t } return profit'\n                )\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    scripted_metric: {\n                        combine_script:\n                            'double profit = 0; for (t in params._agg.transactions) { profit += t } return profit'\n                    }\n                }\n            });\n        });\n\n        test('sets reduceScript', () => {\n            const value = getInstance()\n                .reduceScript(\n                    'double profit = 0; for (a in params._aggs) { profit += a } return profit'\n                )\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    scripted_metric: {\n                        reduce_script:\n                            'double profit = 0; for (a in params._aggs) { profit += a } return profit'\n                    }\n                }\n            });\n        });\n\n        test('sets params', () => {\n            // Apparently if you specify script parameters then you must specify \"_agg\": {}.\n            const value = getInstance()\n                .params({ field: 'amount', _agg: {} })\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    scripted_metric: {\n                        params: { field: 'amount', _agg: {} }\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/serial-differencing-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SerialDifferencingAggregation } from '../../src';\n\nconst getInstance = bucketsPath =>\n    new SerialDifferencingAggregation('my_agg', bucketsPath);\n\ndescribe('SerialDifferencingAggregation', () => {\n    test('sets type as serial_diff', () => {\n        const value = new SerialDifferencingAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { serial_diff: {} }\n        });\n    });\n\n    describe('options', () => {\n        test('sets lag', () => {\n            const value = getInstance('my_buckets_path').lag(2).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    serial_diff: {\n                        buckets_path: 'my_buckets_path',\n                        lag: 2\n                    }\n                }\n            });\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const value = getInstance('my_buckets_path').toJSON();\n            const expected = {\n                my_agg: {\n                    serial_diff: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/significant-agg-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { TermQuery, Script } from '../../src';\nimport { SignificantAggregationBase } from '../../src/aggregations/bucket-aggregations';\n\nconst getInstance = (...args) =>\n    new SignificantAggregationBase('my_agg', 'my_type', '', ...args);\n\nconst script = new Script()\n    .lang('groovy')\n    .file('calculate-score')\n    .params({ my_modifier: 2 });\n\ndescribe('SignificantAggregationBase', () => {\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('scriptHeuristic()', () => {\n                expect(() => getInstance().scriptHeuristic(value)).toThrow(\n                    new TypeError('Argument must be an instance of Script')\n                );\n            });\n\n            test('backgroundFilter()', () => {\n                expect(() => getInstance().backgroundFilter(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets jlh', () => {\n            const value = getInstance().jlh().toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        jlh: {}\n                    }\n                }\n            });\n        });\n\n        test('sets mutualInformation', () => {\n            const value = getInstance().mutualInformation().toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        mutual_information: {\n                            include_negatives: true,\n                            background_is_superset: true\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets mutualInformation with parameters', () => {\n            const value = getInstance().mutualInformation(true, false).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        mutual_information: {\n                            include_negatives: true,\n                            background_is_superset: false\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets chiSquare', () => {\n            const value = getInstance().chiSquare().toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        chi_square: {\n                            include_negatives: true,\n                            background_is_superset: true\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets chiSquare with parameters', () => {\n            const value = getInstance().chiSquare(true, false).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        chi_square: {\n                            include_negatives: true,\n                            background_is_superset: false\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets gnd', () => {\n            const value = getInstance().gnd().toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        gnd: { background_is_superset: true }\n                    }\n                }\n            });\n        });\n\n        test('sets gnd with parameter', () => {\n            const value = getInstance().gnd(false).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        gnd: { background_is_superset: false }\n                    }\n                }\n            });\n        });\n\n        test('sets percentage', () => {\n            const value = getInstance().percentage().toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        percentage: {}\n                    }\n                }\n            });\n        });\n\n        test('sets scriptHeuristic', () => {\n            const value = getInstance().scriptHeuristic(script).toJSON();\n            const expected = {\n                my_agg: {\n                    my_type: {\n                        script_heuristic: { script: script.toJSON() }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets backgroundFilter', () => {\n            const termQuery = new TermQuery('user', 'kimchy');\n            const value = getInstance().backgroundFilter(termQuery).toJSON();\n            const expected = {\n                my_agg: {\n                    my_type: {\n                        background_filter: termQuery.toJSON()\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    test('script cannot be set', () => {\n        expect(() =>\n            new SignificantAggregationBase('my_agg', 'my_type').script()\n        ).toThrow(\n            new Error('script is not supported in SignificantAggregationBase')\n        );\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/significant-terms-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SignificantTermsAggregation } from '../../src';\n\ndescribe('SignificantTermsAggregation', () => {\n    test('sets type as significant_terms', () => {\n        const value = new SignificantTermsAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { significant_terms: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets field', () => {\n            const value = new SignificantTermsAggregation(\n                'my_agg',\n                'my_field'\n            ).toJSON();\n            const expected = {\n                my_agg: {\n                    significant_terms: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    test('script cannot be set', () => {\n        expect(() =>\n            new SignificantTermsAggregation('my_agg').script()\n        ).toThrow(\n            new Error('script is not supported in SignificantTermsAggregation')\n        );\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/significant-text-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SignificantTextAggregation } from '../../src';\n\nconst getInstance = (...args) =>\n    new SignificantTextAggregation('my_agg', ...args);\n\ndescribe('SignificantTextAggregation', () => {\n    test('sets type as significant_text', () => {\n        const value = new SignificantTextAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { significant_text: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets field', () => {\n            const value = new SignificantTextAggregation(\n                'my_agg',\n                'my_field'\n            ).toJSON();\n            const expected = {\n                my_agg: {\n                    significant_text: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('options', () => {\n        test('sets filterDuplicateText', () => {\n            const value = getInstance().filterDuplicateText(true).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    significant_text: {\n                        filter_duplicate_text: true\n                    }\n                }\n            });\n        });\n\n        test('sets sourceFields', () => {\n            const value = getInstance()\n                .sourceFields(['content', 'title'])\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    significant_text: {\n                        source_fields: ['content', 'title']\n                    }\n                }\n            });\n        });\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new SignificantTextAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in SignificantTextAggregation')\n        );\n    });\n\n    test('missing cannot be set', () => {\n        expect(() =>\n            new SignificantTextAggregation('my_agg').missing()\n        ).toThrow(\n            new Error('missing is not supported in SignificantTextAggregation')\n        );\n    });\n\n    test('executionHint cannot be set', () => {\n        expect(() =>\n            new SignificantTextAggregation('my_agg').executionHint()\n        ).toThrow(\n            new Error(\n                'executionHint is not supported in SignificantTextAggregation'\n            )\n        );\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/stats-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { StatsAggregation } from '../../src';\n\ndescribe('StatsAggregation', () => {\n    test('sets type as stats', () => {\n        const value = new StatsAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { stats: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets field', () => {\n            const value = new StatsAggregation('my_agg', 'my_field').toJSON();\n            const expected = {\n                my_agg: {\n                    stats: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/stats-bucket-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { StatsBucketAggregation } from '../../src';\n\nconst getInstance = bucketsPath =>\n    new StatsBucketAggregation('my_agg', bucketsPath);\n\ndescribe('StatsBucketAggregation', () => {\n    test('sets type as stats_bucket', () => {\n        const value = new StatsBucketAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { stats_bucket: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const valueA = getInstance('my_buckets_path').toJSON();\n            const valueB = getInstance()\n                .bucketsPath('my_buckets_path')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                my_agg: {\n                    stats_bucket: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/sum-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SumAggregation } from '../../src';\n\ndescribe('SumAggregation', () => {\n    test('sets type as sum', () => {\n        const value = new SumAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { sum: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets field', () => {\n            const value = new SumAggregation('my_agg', 'my_field').toJSON();\n            const expected = {\n                my_agg: {\n                    sum: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/sum-bucket-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SumBucketAggregation } from '../../src';\n\nconst getInstance = bucketsPath =>\n    new SumBucketAggregation('my_agg', bucketsPath);\n\ndescribe('SumBucketAggregation', () => {\n    test('sets type as sum_bucket', () => {\n        const value = new SumBucketAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { sum_bucket: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets buckets_path', () => {\n            const valueA = getInstance('my_buckets_path').toJSON();\n            const valueB = getInstance()\n                .bucketsPath('my_buckets_path')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                my_agg: {\n                    sum_bucket: {\n                        buckets_path: 'my_buckets_path'\n                    }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/terms-agg-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { TermsAggregationBase } from '../../src/aggregations/bucket-aggregations';\n\nconst getInstance = (...args) =>\n    new TermsAggregationBase('my_agg', 'my_type', '', ...args);\n\ndescribe('TermsAggregationBase', () => {\n    describe('executionHint() validation', () => {\n        test.each([\n            { name: 'accepts valid value: map', value: 'map' },\n            {\n                name: 'accepts valid value: MAP (case-insensitive)',\n                value: 'MAP'\n            },\n            {\n                name: 'accepts valid value: global_ordinals',\n                value: 'global_ordinals'\n            },\n            {\n                name: 'accepts valid value: GLOBAL_ORDINALS (case-insensitive)',\n                value: 'GLOBAL_ORDINALS'\n            },\n            {\n                name: 'accepts valid value: global_ordinals_hash',\n                value: 'global_ordinals_hash'\n            },\n            {\n                name: 'accepts valid value: GLOBAL_ORDINALS_HASH (case-insensitive)',\n                value: 'GLOBAL_ORDINALS_HASH'\n            },\n            {\n                name: 'accepts valid value: global_ordinals_low_cardinality',\n                value: 'global_ordinals_low_cardinality'\n            },\n            {\n                name: 'accepts valid value: GLOBAL_ORDINALS_LOW_CARDINALITY (case-insensitive)',\n                value: 'GLOBAL_ORDINALS_LOW_CARDINALITY'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().executionHint(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            {\n                name: 'throws for invalid value',\n                value: 'invalid_execution_hint'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().executionHint(value)).toThrow(\n                new Error(\n                    \"The 'execution_hint' parameter should be one of 'global_ordinals', 'global_ordinals_hash', 'global_ordinals_low_cardinality', 'map'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets format', () => {\n            const value = getInstance().format('####.00').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        format: '####.00'\n                    }\n                }\n            });\n        });\n\n        test('sets minDocCount', () => {\n            const value = getInstance().minDocCount(5).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        min_doc_count: 5\n                    }\n                }\n            });\n        });\n\n        test('sets shardMinDocCount', () => {\n            const value = getInstance().shardMinDocCount(2).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        shard_min_doc_count: 2\n                    }\n                }\n            });\n        });\n\n        test('sets size', () => {\n            const value = getInstance().size(20).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        size: 20\n                    }\n                }\n            });\n        });\n\n        test('sets shardSize', () => {\n            const value = getInstance().shardSize(-1).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        shard_size: -1\n                    }\n                }\n            });\n        });\n\n        test('sets missing', () => {\n            const value = getInstance().missing(42).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        missing: 42\n                    }\n                }\n            });\n        });\n\n        test('sets include', () => {\n            const value = getInstance().include('.*sport.*').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        include: '.*sport.*'\n                    }\n                }\n            });\n        });\n\n        test('sets exclude', () => {\n            const value = getInstance().exclude('water_.*').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    my_type: {\n                        exclude: 'water_.*'\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/terms-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { TermsAggregation } from '../../src';\n\nconst getInstance = field => new TermsAggregation('my_agg', field);\n\ndescribe('TermsAggregation', () => {\n    test('sets type as terms', () => {\n        const value = new TermsAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { terms: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('tries to construct agg name if not given', () => {\n            const value = new TermsAggregation(null, 'myfield').toJSON();\n            const expected = {\n                agg_terms_myfield: {\n                    terms: {\n                        field: 'myfield'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('collectMode() validation', () => {\n        test.each([\n            { name: 'accepts valid value: depth_first', value: 'depth_first' },\n            {\n                name: 'accepts valid value: DEPTH_FIRST (case-insensitive)',\n                value: 'DEPTH_FIRST'\n            },\n            {\n                name: 'accepts valid value: breadth_first',\n                value: 'breadth_first'\n            },\n            {\n                name: 'accepts valid value: BREADTH_FIRST (case-insensitive)',\n                value: 'BREADTH_FIRST'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().collectMode(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_collect_mode' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().collectMode(value)).toThrow(\n                new Error(\n                    \"The 'mode' parameter should be one of 'breadth_first' or 'depth_first'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets showTermDocCountError', () => {\n            const value = getInstance().showTermDocCountError(true).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    terms: {\n                        show_term_doc_count_error: true\n                    }\n                }\n            });\n        });\n\n        test('sets collectMode', () => {\n            const value = getInstance().collectMode('breadth_first').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    terms: {\n                        collect_mode: 'breadth_first'\n                    }\n                }\n            });\n        });\n\n        test('sets order with field name', () => {\n            const value = getInstance().order('my_field').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    terms: {\n                        order: { my_field: 'desc' }\n                    }\n                }\n            });\n        });\n\n        test('sets order with field name and direction', () => {\n            const value = getInstance().order('my_field', 'asc').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    terms: {\n                        order: { my_field: 'asc' }\n                    }\n                }\n            });\n        });\n\n        test('include partition is set', () => {\n            const value = getInstance('my_field')\n                .includePartition(0, 20)\n                .toJSON();\n            const expected = {\n                my_agg: {\n                    terms: {\n                        field: 'my_field',\n                        include: { partition: 0, num_partitions: 20 }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('multiple order criteria can be set', () => {\n            const value = getInstance('my_field').order('my_field_a', 'asc');\n            let expected = {\n                my_agg: {\n                    terms: {\n                        field: 'my_field',\n                        order: { my_field_a: 'asc' }\n                    }\n                }\n            };\n            expect(value.toJSON()).toEqual(expected);\n\n            value.order('my_field_b', 'desc');\n            expected = {\n                my_agg: {\n                    terms: {\n                        field: 'my_field',\n                        order: [{ my_field_a: 'asc' }, { my_field_b: 'desc' }]\n                    }\n                }\n            };\n            expect(value.toJSON()).toEqual(expected);\n\n            value.order('my_field_c', 'asc');\n            expected = {\n                my_agg: {\n                    terms: {\n                        field: 'my_field',\n                        order: [\n                            { my_field_a: 'asc' },\n                            { my_field_b: 'desc' },\n                            { my_field_c: 'asc' }\n                        ]\n                    }\n                }\n            };\n            expect(value.toJSON()).toEqual(expected);\n        });\n    });\n\n    describe('order direction validation', () => {\n        test.each([\n            {\n                name: 'accepts order with field only',\n                value: 'my_field',\n                direction: undefined\n            },\n            {\n                name: 'accepts order with asc',\n                value: 'my_field',\n                direction: 'asc'\n            },\n            {\n                name: 'accepts order with ASC',\n                value: 'my_field',\n                direction: 'ASC'\n            },\n            {\n                name: 'accepts order with desc',\n                value: 'my_field',\n                direction: 'desc'\n            },\n            {\n                name: 'accepts order with DESC',\n                value: 'my_field',\n                direction: 'DESC'\n            }\n        ])('$name', ({ value, direction }) => {\n            expect(() => getInstance().order(value, direction)).not.toThrow();\n        });\n\n        test.each([\n            {\n                name: 'throws for invalid direction',\n                direction: 'invalid_direction'\n            },\n            { name: 'throws for null direction', direction: null }\n        ])('$name', ({ direction }) => {\n            expect(() => getInstance().order('my_field', direction)).toThrow(\n                new Error(\n                    \"The 'direction' parameter should be one of 'asc' or 'desc'\"\n                )\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/top-hits-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { TopHitsAggregation, Sort, Highlight, Script } from '../../src';\n\nconst getInstance = () => new TopHitsAggregation('my_agg');\n\nconst sortChannel = new Sort('channel', 'desc');\nconst sortCategories = new Sort('categories', 'desc');\n\nconst scriptA = new Script('inline', \"doc['my_field_name'].value * 2\").lang(\n    'painless'\n);\nconst scriptB = new Script('inline', \"doc['my_field_name'].value * factor\")\n    .lang('painless')\n    .params({ factor: 2.0 });\n\ndescribe('TopHitsAggregation', () => {\n    test('sets type as top_hits', () => {\n        const value = new TopHitsAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { top_hits: {} }\n        });\n    });\n\n    test('field cannot be set', () => {\n        expect(() => new TopHitsAggregation('my_agg').field()).toThrow(\n            new Error('field is not supported in TopHitsAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new TopHitsAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in TopHitsAggregation')\n        );\n    });\n\n    test('missing cannot be set', () => {\n        expect(() => new TopHitsAggregation('my_agg').missing()).toThrow(\n            new Error('missing is not supported in TopHitsAggregation')\n        );\n    });\n\n    test('format cannot be set', () => {\n        expect(() => new TopHitsAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in TopHitsAggregation')\n        );\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('sort()', () => {\n                expect(() => getInstance().sort(value)).toThrow(\n                    new TypeError('Argument must be an instance of Sort')\n                );\n            });\n\n            test('highlight()', () => {\n                expect(() => getInstance().highlight(value)).toThrow(\n                    new TypeError('Argument must be an instance of Highlight')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets from', () => {\n            const value = getInstance().from(10).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    top_hits: {\n                        from: 10\n                    }\n                }\n            });\n        });\n\n        test('sets size', () => {\n            const value = getInstance().size(10).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    top_hits: {\n                        size: 10\n                    }\n                }\n            });\n        });\n\n        test('sets sort', () => {\n            const value = getInstance().sort(sortChannel).toJSON();\n            const expected = {\n                my_agg: {\n                    top_hits: {\n                        sort: [sortChannel.toJSON()]\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets sorts', () => {\n            const value = getInstance()\n                .sorts([sortChannel, sortCategories])\n                .toJSON();\n            const expected = {\n                my_agg: {\n                    top_hits: {\n                        sort: [sortChannel.toJSON(), sortCategories.toJSON()]\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets trackScores', () => {\n            const value = getInstance().trackScores(true).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    top_hits: {\n                        track_scores: true\n                    }\n                }\n            });\n        });\n\n        test('sets version', () => {\n            const value = getInstance().version(true).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    top_hits: {\n                        version: true\n                    }\n                }\n            });\n        });\n\n        test('sets explain', () => {\n            const value = getInstance().explain(true).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    top_hits: {\n                        explain: true\n                    }\n                }\n            });\n        });\n\n        test('sets highlight', () => {\n            const highlightInstance = new Highlight(['content']).type(\n                'plain',\n                'content'\n            );\n            const value = getInstance().highlight(highlightInstance).toJSON();\n            const expected = {\n                my_agg: {\n                    top_hits: {\n                        highlight: highlightInstance.toJSON()\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets source with string', () => {\n            const value = getInstance().source('obj.*').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    top_hits: {\n                        _source: 'obj.*'\n                    }\n                }\n            });\n        });\n\n        test('sets source with false', () => {\n            const value = getInstance().source(false).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    top_hits: {\n                        _source: false\n                    }\n                }\n            });\n        });\n\n        test('sets source with array', () => {\n            const value = getInstance().source(['obj1.*', 'obj2.*']).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    top_hits: {\n                        _source: ['obj1.*', 'obj2.*']\n                    }\n                }\n            });\n        });\n\n        test('sets source with object', () => {\n            const value = getInstance()\n                .source({\n                    includes: ['obj1.*', 'obj2.*'],\n                    excludes: ['*.description']\n                })\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    top_hits: {\n                        _source: {\n                            includes: ['obj1.*', 'obj2.*'],\n                            excludes: ['*.description']\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets stored_fields(str) option', () => {\n            const value = getInstance().storedFields('_none_').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    top_hits: {\n                        stored_fields: '_none_'\n                    }\n                }\n            });\n        });\n\n        test('sets stored_fields(arr) option', () => {\n            const value = getInstance()\n                .storedFields(['user', 'postDate'])\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    top_hits: {\n                        stored_fields: ['user', 'postDate']\n                    }\n                }\n            });\n        });\n\n        test('sets scriptField', () => {\n            const value = getInstance().scriptField('test1', scriptA).toJSON();\n            const expected = {\n                my_agg: {\n                    top_hits: {\n                        script_fields: { test1: { script: scriptA.toJSON() } }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets scriptFields', () => {\n            const value = getInstance()\n                .scriptFields({\n                    test1: scriptA,\n                    test2: scriptB\n                })\n                .toJSON();\n            const expected = {\n                my_agg: {\n                    top_hits: {\n                        script_fields: {\n                            test1: { script: scriptA.toJSON() },\n                            test2: { script: scriptB.toJSON() }\n                        }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets docvalueFields', () => {\n            const value = getInstance()\n                .docvalueFields(['test1', 'test2'])\n                .toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    top_hits: {\n                        docvalue_fields: ['test1', 'test2']\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/value-count-agg.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ValueCountAggregation } from '../../src';\n\ndescribe('ValueCountAggregation', () => {\n    test('sets type as value_count', () => {\n        const value = new ValueCountAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { value_count: {} }\n        });\n    });\n\n    test('format cannot be set', () => {\n        expect(() => new ValueCountAggregation('my_agg').format()).toThrow(\n            new Error('format is not supported in ValueCountAggregation')\n        );\n    });\n\n    describe('constructor', () => {\n        test('sets field', () => {\n            const value = new ValueCountAggregation(\n                'my_agg',\n                'my_field'\n            ).toJSON();\n            const expected = {\n                my_agg: {\n                    value_count: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/variable-width-histogram-aggregation.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { VariableWidthHistogramAggregation } from '../../src';\n\ndescribe('VariableWidthHistogramAggregation', () => {\n    test('sets type as variable_width_histogram', () => {\n        const value = new VariableWidthHistogramAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: { variable_width_histogram: {} }\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets arguments', () => {\n            const value = new VariableWidthHistogramAggregation(\n                    'price',\n                    'lowestPrice',\n                    10\n                ).toJSON(),\n                expected = {\n                    price: {\n                        variable_width_histogram: {\n                            field: 'lowestPrice',\n                            buckets: 10\n                        }\n                    }\n                };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('options', () => {\n        test('sets buckets', () => {\n            const value = new VariableWidthHistogramAggregation(\n                'price',\n                'lowestPrice',\n                10\n            )\n                .buckets(20)\n                .toJSON();\n            const expected = {\n                price: {\n                    variable_width_histogram: {\n                        field: 'lowestPrice',\n                        buckets: 20\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/aggregations-test/weighted-average-aggregation.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { WeightedAverageAggregation, Script } from '../../src';\n\nconst getInstance = (...args) =>\n    new WeightedAverageAggregation('my_agg', ...args);\n\ndescribe('WeightedAverageAggregation', () => {\n    test('sets type as weighted_avg', () => {\n        const value = new WeightedAverageAggregation('my_agg').toJSON();\n        expect(value).toEqual({\n            my_agg: {\n                weighted_avg: {\n                    value: {},\n                    weight: {}\n                }\n            }\n        });\n    });\n\n    test('field cannot be set', () => {\n        expect(() => new WeightedAverageAggregation('my_agg').field()).toThrow(\n            new Error('field is not supported in WeightedAverageAggregation')\n        );\n    });\n\n    test('script cannot be set', () => {\n        expect(() => new WeightedAverageAggregation('my_agg').script()).toThrow(\n            new Error('script is not supported in WeightedAverageAggregation')\n        );\n    });\n\n    test('missing cannot be set', () => {\n        expect(() =>\n            new WeightedAverageAggregation('my_agg').missing()\n        ).toThrow(\n            new Error('missing is not supported in WeightedAverageAggregation')\n        );\n    });\n\n    describe('options', () => {\n        test('sets weight with field name', () => {\n            const value = getInstance().weight('my_weight_field').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    weighted_avg: {\n                        weight: { field: 'my_weight_field' },\n                        value: {}\n                    }\n                }\n            });\n        });\n\n        test('sets value with field name', () => {\n            const value = getInstance().value('my_value_field').toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    weighted_avg: {\n                        value: { field: 'my_value_field' },\n                        weight: {}\n                    }\n                }\n            });\n        });\n\n        test('sets weight with field and missing', () => {\n            const value = getInstance().weight('my_weight_field', 20).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    weighted_avg: {\n                        weight: { field: 'my_weight_field', missing: 20 },\n                        value: {}\n                    }\n                }\n            });\n        });\n\n        test('sets value with field and missing', () => {\n            const value = getInstance().value('my_value_field', 10).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    weighted_avg: {\n                        value: { field: 'my_value_field', missing: 10 },\n                        weight: {}\n                    }\n                }\n            });\n        });\n\n        test('sets value with script', () => {\n            const scriptInstance = new Script('inline', \"doc['field'].value\");\n            const value = getInstance().value(scriptInstance).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    weighted_avg: {\n                        value: { script: { inline: \"doc['field'].value\" } },\n                        weight: {}\n                    }\n                }\n            });\n        });\n\n        test('sets weight with script', () => {\n            const scriptInstance = new Script('inline', \"doc['field'].value\");\n            const value = getInstance().weight(scriptInstance).toJSON();\n            expect(value).toEqual({\n                my_agg: {\n                    weighted_avg: {\n                        weight: { script: { inline: \"doc['field'].value\" } },\n                        value: {}\n                    }\n                }\n            });\n        });\n    });\n\n    describe('validation', () => {\n        test('throws if value is not a script or string', () => {\n            expect(() =>\n                new WeightedAverageAggregation('my_agg').value(10, 10)\n            ).toThrow(\n                new TypeError(\n                    'Value must be either a string or instanceof Script'\n                )\n            );\n        });\n\n        test('throws if weight is not a script or string', () => {\n            expect(() =>\n                new WeightedAverageAggregation('my_agg').weight(10, 10)\n            ).toThrow(\n                new TypeError(\n                    'Weight must be either a string or instanceof Script'\n                )\n            );\n        });\n    });\n\n    describe('field/script updates', () => {\n        test('removes previously set value field/script when updated', () => {\n            const valueOne = new WeightedAverageAggregation('my_agg').value(\n                new Script('inline', 'script')\n            );\n            const expectedOne = {\n                my_agg: {\n                    weighted_avg: {\n                        value: {\n                            script: {\n                                inline: 'script'\n                            }\n                        },\n                        weight: {}\n                    }\n                }\n            };\n            expect(valueOne.toJSON()).toEqual(expectedOne);\n\n            const valueTwo = valueOne.value('my_field');\n            const expectedTwo = {\n                my_agg: {\n                    weighted_avg: {\n                        value: {\n                            field: 'my_field'\n                        },\n                        weight: {}\n                    }\n                }\n            };\n            expect(valueTwo.toJSON()).toEqual(expectedTwo);\n\n            const valueThree = valueOne.value(new Script('inline', 'script2'));\n            const expectedThree = {\n                my_agg: {\n                    weighted_avg: {\n                        value: {\n                            script: {\n                                inline: 'script2'\n                            }\n                        },\n                        weight: {}\n                    }\n                }\n            };\n            expect(valueThree.toJSON()).toEqual(expectedThree);\n        });\n\n        test('removes previously set weight field/script when updated', () => {\n            const valueOne = new WeightedAverageAggregation('my_agg').weight(\n                new Script('inline', 'script')\n            );\n            const expectedOne = {\n                my_agg: {\n                    weighted_avg: {\n                        weight: {\n                            script: {\n                                inline: 'script'\n                            }\n                        },\n                        value: {}\n                    }\n                }\n            };\n            expect(valueOne.toJSON()).toEqual(expectedOne);\n\n            const valueTwo = valueOne.weight('my_field');\n            const expectedTwo = {\n                my_agg: {\n                    weighted_avg: {\n                        weight: {\n                            field: 'my_field'\n                        },\n                        value: {}\n                    }\n                }\n            };\n            expect(valueTwo.toJSON()).toEqual(expectedTwo);\n\n            const valueThree = valueOne.weight(new Script('inline', 'script2'));\n            const expectedThree = {\n                my_agg: {\n                    weighted_avg: {\n                        weight: {\n                            script: {\n                                inline: 'script2'\n                            }\n                        },\n                        value: {}\n                    }\n                }\n            };\n            expect(valueThree.toJSON()).toEqual(expectedThree);\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets value and weight', () => {\n            const value = new WeightedAverageAggregation(\n                'my_agg',\n                'my_value',\n                'my_weight'\n            ).toJSON();\n            const expected = {\n                my_agg: {\n                    weighted_avg: {\n                        value: {\n                            field: 'my_value'\n                        },\n                        weight: {\n                            field: 'my_weight'\n                        }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets value and weight as scripts', () => {\n            const getScript = arg =>\n                new Script('inline', `doc['${arg}'].value`);\n            const value = new WeightedAverageAggregation(\n                'my_agg',\n                getScript('value'),\n                getScript('weight')\n            ).toJSON();\n            const expected = {\n                my_agg: {\n                    weighted_avg: {\n                        value: {\n                            script: {\n                                inline: \"doc['value'].value\"\n                            }\n                        },\n                        weight: {\n                            script: {\n                                inline: \"doc['weight'].value\"\n                            }\n                        }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/aggregation.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { Aggregation } from '../../src/core';\nimport {\n    TermsAggregation,\n    FilterAggregation,\n    StatsAggregation,\n    TermQuery\n} from '../../src';\n\nconst getInstance = () => new Aggregation('my_agg', 'my_type');\n\ndescribe('Aggregation', () => {\n    describe('constructor', () => {\n        test('can be instantiated', () => {\n            const value = getInstance().toJSON();\n            const expected = {\n                my_agg: {\n                    my_type: {}\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('name cannot be empty', () => {\n            expect(() => new Aggregation(null, 'my_agg').toJSON()).toThrow(\n                new Error('Aggregation name could not be determined')\n            );\n        });\n\n        test('aggType cannot be empty', () => {\n            expect(() => new Aggregation('my_agg')).toThrow(\n                new Error('Aggregation `aggType` cannot be empty')\n            );\n        });\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('aggregation()', () => {\n                expect(() => getInstance().aggregation(value)).toThrow(\n                    new TypeError('Argument must be an instance of Aggregation')\n                );\n            });\n\n            test('agg()', () => {\n                expect(() => getInstance().agg(value)).toThrow(\n                    new TypeError('Argument must be an instance of Aggregation')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets name', () => {\n            const value = new Aggregation(null, 'my_type')\n                .name('my_agg')\n                .toJSON();\n            const expected = {\n                my_agg: {\n                    my_type: {}\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets meta', () => {\n            const value = getInstance().meta({ color: 'blue' }).toJSON();\n            const expected = {\n                my_agg: {\n                    my_type: {},\n                    meta: { color: 'blue' }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe.each([\n        {\n            name: 'nested aggs',\n            input: new TermsAggregation('users', 'user'),\n            expected: {\n                my_agg: {\n                    my_type: {},\n                    aggs: {\n                        users: { terms: { field: 'user' } }\n                    }\n                }\n            }\n        },\n        {\n            name: 'deep nested aggs',\n            input: new TermsAggregation('countries', 'artist.country')\n                .order('rock>playback_stats.avg', 'desc')\n                .agg(\n                    new FilterAggregation(\n                        'rock',\n                        new TermQuery('genre', 'rock')\n                    ).agg(new StatsAggregation('playback_stats', 'play_count'))\n                ),\n            expected: {\n                my_agg: {\n                    my_type: {},\n                    aggs: {\n                        countries: {\n                            terms: {\n                                field: 'artist.country',\n                                order: { 'rock>playback_stats.avg': 'desc' }\n                            },\n                            aggs: {\n                                rock: {\n                                    filter: { term: { genre: 'rock' } },\n                                    aggs: {\n                                        playback_stats: {\n                                            stats: { field: 'play_count' }\n                                        }\n                                    }\n                                }\n                            }\n                        }\n                    }\n                }\n            }\n        }\n    ])('$name', ({ input, expected }) => {\n        test('aggregation()', () => {\n            const value = getInstance().aggregation(input).toJSON();\n            expect(value).toEqual(expected);\n        });\n\n        test('agg()', () => {\n            const value = getInstance().agg(input).toJSON();\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('multiple nested aggs', () => {\n        const input = [\n            new TermsAggregation('countries', 'country'),\n            new TermsAggregation('users', 'user')\n        ];\n        const expected = {\n            my_agg: {\n                my_type: {},\n                aggs: {\n                    countries: { terms: { field: 'country' } },\n                    users: { terms: { field: 'user' } }\n                }\n            }\n        };\n\n        test('aggregations()', () => {\n            const value = getInstance().aggregations(input).toJSON();\n            expect(value).toEqual(expected);\n        });\n\n        test('aggs()', () => {\n            const value = getInstance().aggs(input).toJSON();\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('toJSON', () => {\n        test('getDSL gets DSL', () => {\n            const value = new TermsAggregation('countries', 'artist.country')\n                .order('rock>playback_stats.avg', 'desc')\n                .agg(\n                    new FilterAggregation(\n                        'rock',\n                        new TermQuery('genre', 'rock')\n                    ).agg(new StatsAggregation('playback_stats', 'play_count'))\n                );\n            expect(value.toJSON()).toEqual(value.getDSL());\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/geo-point.test.js",
    "content": "import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest';\nimport { GeoPoint } from '../../src';\n\ndescribe('GeoPoint', () => {\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('object()', () => {\n                expect(() => new GeoPoint().object(value)).toThrow(\n                    new TypeError('Argument must be an instance of Object')\n                );\n            });\n\n            test('array()', () => {\n                expect(() => new GeoPoint().array(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n        });\n    });\n\n    describe('representation setters', () => {\n        test('sets lat lon', () => {\n            const value = new GeoPoint().lat(41.12).lon(-71.34).toJSON();\n            expect(value).toEqual({ lat: 41.12, lon: -71.34 });\n        });\n\n        test('sets object', () => {\n            const value = new GeoPoint()\n                .object({ lat: 41.12, lon: -71.34 })\n                .toJSON();\n            expect(value).toEqual({ lat: 41.12, lon: -71.34 });\n        });\n\n        test('sets array', () => {\n            const value = new GeoPoint().array([-71.34, 41.12]).toJSON();\n            expect(value).toEqual([-71.34, 41.12]);\n        });\n\n        test('sets string', () => {\n            const value = new GeoPoint().string('41.12,-71.34').toJSON();\n            const expected = '41.12,-71.34';\n            expect(value).toBe(expected);\n        });\n    });\n\n    describe('mixed representation', () => {\n        test('lat then array overwrites', () => {\n            const value = new GeoPoint()\n                .lat(41.12)\n                .array([-71.34, 41.12])\n                .toJSON();\n            expect(value).toEqual([-71.34, 41.12]);\n        });\n\n        test('string then lat/lon overwrites', () => {\n            const value = new GeoPoint()\n                .string('41.12,-71.34')\n                .lat(41.12)\n                .lon(-71.34)\n                .toJSON();\n            expect(value).toEqual({ lat: 41.12, lon: -71.34 });\n        });\n\n        test('array then object overwrites', () => {\n            const value = new GeoPoint()\n                .array([-71.34, 41.12])\n                .object({ lat: 41.12, lon: -71.34 })\n                .toJSON();\n            expect(value).toEqual({ lat: 41.12, lon: -71.34 });\n        });\n\n        test('lat then string overwrites', () => {\n            const value = new GeoPoint()\n                .lat(41.12)\n                .string('41.12,-71.34')\n                .toJSON();\n            expect(value).toEqual('41.12,-71.34');\n        });\n    });\n\n    describe('mixed representation warnings', () => {\n        let spy;\n\n        beforeEach(() => {\n            spy = vi.spyOn(console, 'warn').mockImplementation(() => {});\n        });\n\n        afterEach(() => {\n            spy.mockRestore();\n        });\n\n        test('lat then array logs warnings', () => {\n            const geoPoint = new GeoPoint().lat(41.12).array([-71.34, 41.12]);\n            geoPoint.toJSON();\n            expect(spy).toHaveBeenCalledTimes(2);\n            expect(spy).toHaveBeenNthCalledWith(\n                1,\n                '[GeoPoint] Do not mix with other representation!'\n            );\n            expect(spy).toHaveBeenNthCalledWith(2, '[GeoPoint] Overwriting.');\n        });\n\n        test('array then object logs warnings', () => {\n            const geoPoint = new GeoPoint()\n                .array([-71.34, 41.12])\n                .object({ lat: 41.12, lon: -71.34 });\n            geoPoint.toJSON();\n            expect(spy).toHaveBeenCalledTimes(2);\n            expect(spy).toHaveBeenNthCalledWith(\n                1,\n                '[GeoPoint] Do not mix with other representation!'\n            );\n            expect(spy).toHaveBeenNthCalledWith(2, '[GeoPoint] Overwriting.');\n        });\n\n        test('lat then string logs warnings', () => {\n            const geoPoint = new GeoPoint().lat(41.12).string('41.12,-71.34');\n            geoPoint.toJSON();\n            expect(spy).toHaveBeenCalledTimes(2);\n            expect(spy).toHaveBeenNthCalledWith(\n                1,\n                '[GeoPoint] Do not mix with other representation!'\n            );\n            expect(spy).toHaveBeenNthCalledWith(2, '[GeoPoint] Overwriting.');\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/geo-shape.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GeoShape, geoShape } from '../../src';\n\ndescribe('GeoShape', () => {\n    describe('parameter validation', () => {\n        test.each([\n            {\n                name: 'coordinates() throws TypeError for null parameter',\n                value: null\n            },\n            {\n                name: 'coordinates() throws TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            expect(() => new GeoShape().coordinates(value)).toThrow(\n                new TypeError('Argument must be an instance of Array')\n            );\n        });\n    });\n\n    describe('type() validation', () => {\n        test.each([\n            { name: 'accepts valid type: point', value: 'point' },\n            {\n                name: 'accepts valid type: POINT (case-insensitive)',\n                value: 'POINT'\n            },\n            { name: 'accepts valid type: linestring', value: 'linestring' },\n            {\n                name: 'accepts valid type: LINESTRING (case-insensitive)',\n                value: 'LINESTRING'\n            },\n            { name: 'accepts valid type: polygon', value: 'polygon' },\n            {\n                name: 'accepts valid type: POLYGON (case-insensitive)',\n                value: 'POLYGON'\n            },\n            { name: 'accepts valid type: multipoint', value: 'multipoint' },\n            {\n                name: 'accepts valid type: MULTIPOINT (case-insensitive)',\n                value: 'MULTIPOINT'\n            },\n            {\n                name: 'accepts valid type: multilinestring',\n                value: 'multilinestring'\n            },\n            {\n                name: 'accepts valid type: MULTILINESTRING (case-insensitive)',\n                value: 'MULTILINESTRING'\n            },\n            { name: 'accepts valid type: multipolygon', value: 'multipolygon' },\n            {\n                name: 'accepts valid type: MULTIPOLYGON (case-insensitive)',\n                value: 'MULTIPOLYGON'\n            },\n            {\n                name: 'accepts valid type: geometrycollection',\n                value: 'geometrycollection'\n            },\n            {\n                name: 'accepts valid type: GEOMETRYCOLLECTION (case-insensitive)',\n                value: 'GEOMETRYCOLLECTION'\n            },\n            { name: 'accepts valid type: envelope', value: 'envelope' },\n            {\n                name: 'accepts valid type: ENVELOPE (case-insensitive)',\n                value: 'ENVELOPE'\n            },\n            { name: 'accepts valid type: circle', value: 'circle' },\n            {\n                name: 'accepts valid type: CIRCLE (case-insensitive)',\n                value: 'CIRCLE'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => geoShape().type(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null type', value: null },\n            { name: 'throws for invalid type', value: 'invalid_type' }\n        ])('$name', ({ value }) => {\n            expect(() => geoShape().type(value)).toThrow(\n                /The 'type' parameter should be one of/\n            );\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets arguments', () => {\n            let value = new GeoShape('multipoint', [\n                [102.0, 2.0],\n                [103.0, 2.0]\n            ]).toJSON();\n            const expected = {\n                type: 'multipoint',\n                coordinates: [\n                    [102.0, 2.0],\n                    [103.0, 2.0]\n                ]\n            };\n            expect(value).toEqual(expected);\n\n            value = new GeoShape('multipoint')\n                .coordinates([\n                    [102.0, 2.0],\n                    [103.0, 2.0]\n                ])\n                .toJSON();\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('options', () => {\n        test('sets type and coordinates', () => {\n            const value = new GeoShape()\n                .type('envelope')\n                .coordinates([\n                    [-45.0, 45.0],\n                    [45.0, -45.0]\n                ])\n                .toJSON();\n            const expected = {\n                type: 'envelope',\n                coordinates: [\n                    [-45.0, 45.0],\n                    [45.0, -45.0]\n                ]\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets radius', () => {\n            const value = new GeoShape()\n                .type('circle')\n                .coordinates([-45.0, 45.0])\n                .radius('100m')\n                .toJSON();\n            const expected = {\n                type: 'circle',\n                coordinates: [-45.0, 45.0],\n                radius: '100m'\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('toJSON', () => {\n        test('throws when both type and coordinates are missing', () => {\n            expect(() => new GeoShape().toJSON()).toThrow(\n                'For all types, both the inner `type` and `coordinates` fields are required.'\n            );\n        });\n\n        test('throws when coordinates is missing', () => {\n            expect(() => new GeoShape().type('point').toJSON()).toThrow(\n                'For all types, both the inner `type` and `coordinates` fields are required.'\n            );\n        });\n\n        test('throws when type is missing', () => {\n            expect(() =>\n                new GeoShape().coordinates([-45.0, 45.0]).toJSON()\n            ).toThrow(\n                'For all types, both the inner `type` and `coordinates` fields are required.'\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/highlight.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport {\n    Highlight,\n    highlight,\n    BoolQuery,\n    MatchQuery,\n    MatchPhraseQuery\n} from '../../src';\n\ndescribe('Highlight', () => {\n    describe('parameter type validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('fields()', () => {\n                expect(() => new Highlight().fields(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n\n            test('matchedFields()', () => {\n                expect(() => new Highlight().matchedFields(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n\n            test('highlightQuery()', () => {\n                expect(() => new Highlight().highlightQuery(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n        });\n    });\n\n    describe('encoder() validation', () => {\n        test.each([\n            { name: 'accepts valid value: default', value: 'default' },\n            {\n                name: 'accepts valid value: DEFAULT (case-insensitive)',\n                value: 'DEFAULT'\n            },\n            { name: 'accepts valid value: html', value: 'html' },\n            {\n                name: 'accepts valid value: HTML (case-insensitive)',\n                value: 'HTML'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => highlight().encoder(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_value' }\n        ])('$name', ({ value }) => {\n            expect(() => highlight().encoder(value)).toThrow(\n                new Error(\n                    \"The 'encoder' parameter should be one of 'default' or 'html'\"\n                )\n            );\n        });\n    });\n\n    describe('type() validation', () => {\n        test.each([\n            { name: 'accepts valid value: plain', value: 'plain' },\n            {\n                name: 'accepts valid value: PLAIN (case-insensitive)',\n                value: 'PLAIN'\n            },\n            { name: 'accepts valid value: postings', value: 'postings' },\n            {\n                name: 'accepts valid value: POSTINGS (case-insensitive)',\n                value: 'POSTINGS'\n            },\n            { name: 'accepts valid value: unified', value: 'unified' },\n            {\n                name: 'accepts valid value: UNIFIED (case-insensitive)',\n                value: 'UNIFIED'\n            },\n            { name: 'accepts valid value: fvh', value: 'fvh' },\n            {\n                name: 'accepts valid value: FVH (case-insensitive)',\n                value: 'FVH'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => highlight().type(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_value' }\n        ])('$name', ({ value }) => {\n            expect(() => highlight().type(value)).toThrow(\n                new Error(\n                    \"The 'type' parameter should be one of 'plain', 'postings', 'unified' or 'fvh'\"\n                )\n            );\n        });\n    });\n\n    describe('fragmenter() validation', () => {\n        test.each([\n            { name: 'accepts valid value: simple', value: 'simple' },\n            {\n                name: 'accepts valid value: SIMPLE (case-insensitive)',\n                value: 'SIMPLE'\n            },\n            { name: 'accepts valid value: span', value: 'span' },\n            {\n                name: 'accepts valid value: SPAN (case-insensitive)',\n                value: 'SPAN'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => highlight().fragmenter(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_value' }\n        ])('$name', ({ value }) => {\n            expect(() => highlight().fragmenter(value)).toThrow(\n                new Error(\n                    \"The 'fragmenter' parameter should be one of 'simple' or 'span'\"\n                )\n            );\n        });\n    });\n\n    describe('preTags() option setter', () => {\n        const value = ['<tag1>', '<tag2>'];\n\n        test('sets global option', () => {\n            const result = new Highlight().preTags(value).toJSON();\n            expect(result).toEqual({\n                fields: {},\n                pre_tags: value\n            });\n        });\n\n        test('sets field-specific option when field provided', () => {\n            const result = new Highlight().preTags(value, 'my_field').toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { pre_tags: value }\n                }\n            });\n        });\n\n        test('sets field-specific option when field in constructor', () => {\n            const result = new Highlight('my_field')\n                .preTags(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { pre_tags: value }\n                }\n            });\n        });\n\n        test('sets option for one field in multi-field highlight', () => {\n            const result = new Highlight(['my_field_a', 'my_field_b'])\n                .preTags(value, 'my_field_a')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field_a: { pre_tags: value },\n                    my_field_b: {}\n                }\n            });\n        });\n\n        test('sets option for multiple fields separately', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.preTags(value, 'my_field_a');\n            instance.preTags(value, 'my_field_b');\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { pre_tags: value },\n                    my_field_b: { pre_tags: value }\n                }\n            });\n        });\n\n        test('sets both field-specific and global option', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.preTags(value, 'my_field_a');\n            instance.preTags(value, 'my_field_b');\n            instance.preTags(value);\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { pre_tags: value },\n                    my_field_b: { pre_tags: value }\n                },\n                pre_tags: value\n            });\n        });\n\n        test('sets field-specific then global option', () => {\n            const result = new Highlight()\n                .preTags(value, 'my_field')\n                .preTags(value)\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { pre_tags: value }\n                },\n                pre_tags: value\n            });\n        });\n\n        test('sets pre_tags option with string converted to array', () => {\n            const result = new Highlight().preTags('<tag1>').toJSON();\n            expect(result).toEqual({\n                fields: {},\n                pre_tags: ['<tag1>']\n            });\n        });\n    });\n\n    describe('postTags() option setter', () => {\n        const value = ['</tag1>', '</tag2>'];\n\n        test('sets global option', () => {\n            const result = new Highlight().postTags(value).toJSON();\n            expect(result).toEqual({\n                fields: {},\n                post_tags: value\n            });\n        });\n\n        test('sets field-specific option when field provided', () => {\n            const result = new Highlight().postTags(value, 'my_field').toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { post_tags: value }\n                }\n            });\n        });\n\n        test('sets field-specific option when field in constructor', () => {\n            const result = new Highlight('my_field')\n                .postTags(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { post_tags: value }\n                }\n            });\n        });\n\n        test('sets option for one field in multi-field highlight', () => {\n            const result = new Highlight(['my_field_a', 'my_field_b'])\n                .postTags(value, 'my_field_a')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field_a: { post_tags: value },\n                    my_field_b: {}\n                }\n            });\n        });\n\n        test('sets option for multiple fields separately', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.postTags(value, 'my_field_a');\n            instance.postTags(value, 'my_field_b');\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { post_tags: value },\n                    my_field_b: { post_tags: value }\n                }\n            });\n        });\n\n        test('sets both field-specific and global option', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.postTags(value, 'my_field_a');\n            instance.postTags(value, 'my_field_b');\n            instance.postTags(value);\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { post_tags: value },\n                    my_field_b: { post_tags: value }\n                },\n                post_tags: value\n            });\n        });\n\n        test('sets field-specific then global option', () => {\n            const result = new Highlight()\n                .postTags(value, 'my_field')\n                .postTags(value)\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { post_tags: value }\n                },\n                post_tags: value\n            });\n        });\n\n        test('sets post_tags option with string converted to array', () => {\n            const result = new Highlight().postTags('</tag1>').toJSON();\n            expect(result).toEqual({\n                fields: {},\n                post_tags: ['</tag1>']\n            });\n        });\n    });\n\n    describe('fragmentSize() option setter', () => {\n        const value = 150;\n\n        test('sets global option', () => {\n            const result = new Highlight().fragmentSize(value).toJSON();\n            expect(result).toEqual({\n                fields: {},\n                fragment_size: value\n            });\n        });\n\n        test('sets field-specific option when field provided', () => {\n            const result = new Highlight()\n                .fragmentSize(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { fragment_size: value }\n                }\n            });\n        });\n\n        test('sets field-specific option when field in constructor', () => {\n            const result = new Highlight('my_field')\n                .fragmentSize(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { fragment_size: value }\n                }\n            });\n        });\n\n        test('sets option for one field in multi-field highlight', () => {\n            const result = new Highlight(['my_field_a', 'my_field_b'])\n                .fragmentSize(value, 'my_field_a')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field_a: { fragment_size: value },\n                    my_field_b: {}\n                }\n            });\n        });\n\n        test('sets option for multiple fields separately', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.fragmentSize(value, 'my_field_a');\n            instance.fragmentSize(value, 'my_field_b');\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { fragment_size: value },\n                    my_field_b: { fragment_size: value }\n                }\n            });\n        });\n\n        test('sets both field-specific and global option', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.fragmentSize(value, 'my_field_a');\n            instance.fragmentSize(value, 'my_field_b');\n            instance.fragmentSize(value);\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { fragment_size: value },\n                    my_field_b: { fragment_size: value }\n                },\n                fragment_size: value\n            });\n        });\n\n        test('sets field-specific then global option', () => {\n            const result = new Highlight()\n                .fragmentSize(value, 'my_field')\n                .fragmentSize(value)\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { fragment_size: value }\n                },\n                fragment_size: value\n            });\n        });\n    });\n\n    describe('numberOfFragments() option setter', () => {\n        const value = 3;\n\n        test('sets global option', () => {\n            const result = new Highlight().numberOfFragments(value).toJSON();\n            expect(result).toEqual({\n                fields: {},\n                number_of_fragments: value\n            });\n        });\n\n        test('sets field-specific option when field provided', () => {\n            const result = new Highlight()\n                .numberOfFragments(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { number_of_fragments: value }\n                }\n            });\n        });\n\n        test('sets field-specific option when field in constructor', () => {\n            const result = new Highlight('my_field')\n                .numberOfFragments(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { number_of_fragments: value }\n                }\n            });\n        });\n\n        test('sets option for one field in multi-field highlight', () => {\n            const result = new Highlight(['my_field_a', 'my_field_b'])\n                .numberOfFragments(value, 'my_field_a')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field_a: { number_of_fragments: value },\n                    my_field_b: {}\n                }\n            });\n        });\n\n        test('sets option for multiple fields separately', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.numberOfFragments(value, 'my_field_a');\n            instance.numberOfFragments(value, 'my_field_b');\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { number_of_fragments: value },\n                    my_field_b: { number_of_fragments: value }\n                }\n            });\n        });\n\n        test('sets both field-specific and global option', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.numberOfFragments(value, 'my_field_a');\n            instance.numberOfFragments(value, 'my_field_b');\n            instance.numberOfFragments(value);\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { number_of_fragments: value },\n                    my_field_b: { number_of_fragments: value }\n                },\n                number_of_fragments: value\n            });\n        });\n\n        test('sets field-specific then global option', () => {\n            const result = new Highlight()\n                .numberOfFragments(value, 'my_field')\n                .numberOfFragments(value)\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { number_of_fragments: value }\n                },\n                number_of_fragments: value\n            });\n        });\n    });\n\n    describe('noMatchSize() option setter', () => {\n        const value = 150;\n\n        test('sets global option', () => {\n            const result = new Highlight().noMatchSize(value).toJSON();\n            expect(result).toEqual({\n                fields: {},\n                no_match_size: value\n            });\n        });\n\n        test('sets field-specific option when field provided', () => {\n            const result = new Highlight()\n                .noMatchSize(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { no_match_size: value }\n                }\n            });\n        });\n\n        test('sets field-specific option when field in constructor', () => {\n            const result = new Highlight('my_field')\n                .noMatchSize(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { no_match_size: value }\n                }\n            });\n        });\n\n        test('sets option for one field in multi-field highlight', () => {\n            const result = new Highlight(['my_field_a', 'my_field_b'])\n                .noMatchSize(value, 'my_field_a')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field_a: { no_match_size: value },\n                    my_field_b: {}\n                }\n            });\n        });\n\n        test('sets option for multiple fields separately', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.noMatchSize(value, 'my_field_a');\n            instance.noMatchSize(value, 'my_field_b');\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { no_match_size: value },\n                    my_field_b: { no_match_size: value }\n                }\n            });\n        });\n\n        test('sets both field-specific and global option', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.noMatchSize(value, 'my_field_a');\n            instance.noMatchSize(value, 'my_field_b');\n            instance.noMatchSize(value);\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { no_match_size: value },\n                    my_field_b: { no_match_size: value }\n                },\n                no_match_size: value\n            });\n        });\n\n        test('sets field-specific then global option', () => {\n            const result = new Highlight()\n                .noMatchSize(value, 'my_field')\n                .noMatchSize(value)\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { no_match_size: value }\n                },\n                no_match_size: value\n            });\n        });\n    });\n\n    describe('highlightQuery() option setter', () => {\n        const value = new BoolQuery()\n            .must(new MatchQuery('content', 'foo bar'))\n            .should(\n                new MatchPhraseQuery('content', 'foo bar').slop(1).boost(10.0)\n            )\n            .minimumShouldMatch(0);\n        const expectedValue = value.toJSON();\n\n        test('sets global option', () => {\n            const result = new Highlight().highlightQuery(value).toJSON();\n            expect(result).toEqual({\n                fields: {},\n                highlight_query: expectedValue\n            });\n        });\n\n        test('sets field-specific option when field provided', () => {\n            const result = new Highlight()\n                .highlightQuery(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { highlight_query: expectedValue }\n                }\n            });\n        });\n\n        test('sets field-specific option when field in constructor', () => {\n            const result = new Highlight('my_field')\n                .highlightQuery(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { highlight_query: expectedValue }\n                }\n            });\n        });\n\n        test('sets option for one field in multi-field highlight', () => {\n            const result = new Highlight(['my_field_a', 'my_field_b'])\n                .highlightQuery(value, 'my_field_a')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field_a: { highlight_query: expectedValue },\n                    my_field_b: {}\n                }\n            });\n        });\n\n        test('sets option for multiple fields separately', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.highlightQuery(value, 'my_field_a');\n            instance.highlightQuery(value, 'my_field_b');\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { highlight_query: expectedValue },\n                    my_field_b: { highlight_query: expectedValue }\n                }\n            });\n        });\n\n        test('sets both field-specific and global option', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.highlightQuery(value, 'my_field_a');\n            instance.highlightQuery(value, 'my_field_b');\n            instance.highlightQuery(value);\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { highlight_query: expectedValue },\n                    my_field_b: { highlight_query: expectedValue }\n                },\n                highlight_query: expectedValue\n            });\n        });\n\n        test('sets field-specific then global option', () => {\n            const result = new Highlight()\n                .highlightQuery(value, 'my_field')\n                .highlightQuery(value)\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { highlight_query: expectedValue }\n                },\n                highlight_query: expectedValue\n            });\n        });\n    });\n\n    describe('requireFieldMatch() option setter', () => {\n        const value = false;\n\n        test('sets global option', () => {\n            const result = new Highlight().requireFieldMatch(value).toJSON();\n            expect(result).toEqual({\n                fields: {},\n                require_field_match: value\n            });\n        });\n\n        test('sets field-specific option when field provided', () => {\n            const result = new Highlight()\n                .requireFieldMatch(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { require_field_match: value }\n                }\n            });\n        });\n\n        test('sets field-specific option when field in constructor', () => {\n            const result = new Highlight('my_field')\n                .requireFieldMatch(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { require_field_match: value }\n                }\n            });\n        });\n\n        test('sets option for one field in multi-field highlight', () => {\n            const result = new Highlight(['my_field_a', 'my_field_b'])\n                .requireFieldMatch(value, 'my_field_a')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field_a: { require_field_match: value },\n                    my_field_b: {}\n                }\n            });\n        });\n\n        test('sets option for multiple fields separately', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.requireFieldMatch(value, 'my_field_a');\n            instance.requireFieldMatch(value, 'my_field_b');\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { require_field_match: value },\n                    my_field_b: { require_field_match: value }\n                }\n            });\n        });\n\n        test('sets both field-specific and global option', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.requireFieldMatch(value, 'my_field_a');\n            instance.requireFieldMatch(value, 'my_field_b');\n            instance.requireFieldMatch(value);\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { require_field_match: value },\n                    my_field_b: { require_field_match: value }\n                },\n                require_field_match: value\n            });\n        });\n\n        test('sets field-specific then global option', () => {\n            const result = new Highlight()\n                .requireFieldMatch(value, 'my_field')\n                .requireFieldMatch(value)\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { require_field_match: value }\n                },\n                require_field_match: value\n            });\n        });\n    });\n\n    describe('boundaryMaxScan() option setter', () => {\n        const value = 25;\n\n        test('sets global option', () => {\n            const result = new Highlight().boundaryMaxScan(value).toJSON();\n            expect(result).toEqual({\n                fields: {},\n                boundary_max_scan: value\n            });\n        });\n\n        test('sets field-specific option when field provided', () => {\n            const result = new Highlight()\n                .boundaryMaxScan(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { boundary_max_scan: value }\n                }\n            });\n        });\n\n        test('sets field-specific option when field in constructor', () => {\n            const result = new Highlight('my_field')\n                .boundaryMaxScan(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { boundary_max_scan: value }\n                }\n            });\n        });\n\n        test('sets option for one field in multi-field highlight', () => {\n            const result = new Highlight(['my_field_a', 'my_field_b'])\n                .boundaryMaxScan(value, 'my_field_a')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field_a: { boundary_max_scan: value },\n                    my_field_b: {}\n                }\n            });\n        });\n\n        test('sets option for multiple fields separately', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.boundaryMaxScan(value, 'my_field_a');\n            instance.boundaryMaxScan(value, 'my_field_b');\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { boundary_max_scan: value },\n                    my_field_b: { boundary_max_scan: value }\n                }\n            });\n        });\n\n        test('sets both field-specific and global option', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.boundaryMaxScan(value, 'my_field_a');\n            instance.boundaryMaxScan(value, 'my_field_b');\n            instance.boundaryMaxScan(value);\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { boundary_max_scan: value },\n                    my_field_b: { boundary_max_scan: value }\n                },\n                boundary_max_scan: value\n            });\n        });\n\n        test('sets field-specific then global option', () => {\n            const result = new Highlight()\n                .boundaryMaxScan(value, 'my_field')\n                .boundaryMaxScan(value)\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { boundary_max_scan: value }\n                },\n                boundary_max_scan: value\n            });\n        });\n    });\n\n    describe('boundaryChars() option setter', () => {\n        const value = '.,!?; \\t\\n';\n\n        test('sets global option', () => {\n            const result = new Highlight().boundaryChars(value).toJSON();\n            expect(result).toEqual({\n                fields: {},\n                boundary_chars: value\n            });\n        });\n\n        test('sets field-specific option when field provided', () => {\n            const result = new Highlight()\n                .boundaryChars(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { boundary_chars: value }\n                }\n            });\n        });\n\n        test('sets field-specific option when field in constructor', () => {\n            const result = new Highlight('my_field')\n                .boundaryChars(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { boundary_chars: value }\n                }\n            });\n        });\n\n        test('sets option for one field in multi-field highlight', () => {\n            const result = new Highlight(['my_field_a', 'my_field_b'])\n                .boundaryChars(value, 'my_field_a')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field_a: { boundary_chars: value },\n                    my_field_b: {}\n                }\n            });\n        });\n\n        test('sets option for multiple fields separately', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.boundaryChars(value, 'my_field_a');\n            instance.boundaryChars(value, 'my_field_b');\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { boundary_chars: value },\n                    my_field_b: { boundary_chars: value }\n                }\n            });\n        });\n\n        test('sets both field-specific and global option', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.boundaryChars(value, 'my_field_a');\n            instance.boundaryChars(value, 'my_field_b');\n            instance.boundaryChars(value);\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { boundary_chars: value },\n                    my_field_b: { boundary_chars: value }\n                },\n                boundary_chars: value\n            });\n        });\n\n        test('sets field-specific then global option', () => {\n            const result = new Highlight()\n                .boundaryChars(value, 'my_field')\n                .boundaryChars(value)\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { boundary_chars: value }\n                },\n                boundary_chars: value\n            });\n        });\n    });\n\n    describe('forceSource() option setter', () => {\n        const value = true;\n\n        test('sets global option', () => {\n            const result = new Highlight().forceSource(value).toJSON();\n            expect(result).toEqual({\n                fields: {},\n                force_source: value\n            });\n        });\n\n        test('sets field-specific option when field provided', () => {\n            const result = new Highlight()\n                .forceSource(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { force_source: value }\n                }\n            });\n        });\n\n        test('sets field-specific option when field in constructor', () => {\n            const result = new Highlight('my_field')\n                .forceSource(value, 'my_field')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { force_source: value }\n                }\n            });\n        });\n\n        test('sets option for one field in multi-field highlight', () => {\n            const result = new Highlight(['my_field_a', 'my_field_b'])\n                .forceSource(value, 'my_field_a')\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field_a: { force_source: value },\n                    my_field_b: {}\n                }\n            });\n        });\n\n        test('sets option for multiple fields separately', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.forceSource(value, 'my_field_a');\n            instance.forceSource(value, 'my_field_b');\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { force_source: value },\n                    my_field_b: { force_source: value }\n                }\n            });\n        });\n\n        test('sets both field-specific and global option', () => {\n            const instance = new Highlight(['my_field_a', 'my_field_b']);\n            instance.forceSource(value, 'my_field_a');\n            instance.forceSource(value, 'my_field_b');\n            instance.forceSource(value);\n            expect(instance.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { force_source: value },\n                    my_field_b: { force_source: value }\n                },\n                force_source: value\n            });\n        });\n\n        test('sets field-specific then global option', () => {\n            const result = new Highlight()\n                .forceSource(value, 'my_field')\n                .forceSource(value)\n                .toJSON();\n            expect(result).toEqual({\n                fields: {\n                    my_field: { force_source: value }\n                },\n                force_source: value\n            });\n        });\n    });\n\n    describe('field management', () => {\n        test('constructor sets single field', () => {\n            const value = new Highlight('my_field').toJSON();\n            expect(value).toEqual({\n                fields: {\n                    my_field: {}\n                }\n            });\n        });\n\n        test('constructor sets multiple fields', () => {\n            const value = new Highlight(['my_field_a', 'my_field_b']).toJSON();\n            expect(value).toEqual({\n                fields: {\n                    my_field_a: {},\n                    my_field_b: {}\n                }\n            });\n        });\n\n        test('sets field', () => {\n            const value = new Highlight().field('my_field').toJSON();\n            expect(value).toEqual({\n                fields: {\n                    my_field: {}\n                }\n            });\n        });\n\n        test('sets multiple fields', () => {\n            const value = new Highlight()\n                .fields(['my_field_a', 'my_field_b'])\n                .toJSON();\n            expect(value).toEqual({\n                fields: {\n                    my_field_a: {},\n                    my_field_b: {}\n                }\n            });\n        });\n    });\n\n    describe('special methods', () => {\n        test('sets tags_schema as styled', () => {\n            const value = new Highlight().styledTagsSchema().toJSON();\n            expect(value).toEqual({\n                fields: {},\n                tags_schema: 'styled'\n            });\n        });\n\n        test('sets phrase_limit option', () => {\n            const value = new Highlight().phraseLimit(512).toJSON();\n            expect(value).toEqual({\n                fields: {},\n                phrase_limit: 512\n            });\n        });\n    });\n\n    describe('scoreOrder()', () => {\n        test('sets global score order', () => {\n            const value = new Highlight().scoreOrder().toJSON();\n            expect(value).toEqual({\n                fields: {},\n                order: 'score'\n            });\n        });\n\n        test('sets field-specific score order', () => {\n            const value = new Highlight().scoreOrder('my_field').toJSON();\n            expect(value).toEqual({\n                fields: {\n                    my_field: { order: 'score' }\n                }\n            });\n        });\n\n        test('sets score order for constructor field', () => {\n            const value = new Highlight('my_field')\n                .scoreOrder('my_field')\n                .toJSON();\n            expect(value).toEqual({\n                fields: {\n                    my_field: { order: 'score' }\n                }\n            });\n        });\n\n        test('sets score order for one field in multi-field highlight', () => {\n            const value = new Highlight(['my_field_a', 'my_field_b'])\n                .scoreOrder('my_field_a')\n                .toJSON();\n            expect(value).toEqual({\n                fields: {\n                    my_field_a: { order: 'score' },\n                    my_field_b: {}\n                }\n            });\n        });\n\n        test('sets score order for multiple fields separately', () => {\n            const value = new Highlight(['my_field_a', 'my_field_b']);\n            value.scoreOrder('my_field_a');\n            value.scoreOrder('my_field_b');\n            expect(value.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { order: 'score' },\n                    my_field_b: { order: 'score' }\n                }\n            });\n        });\n\n        test('sets both field-specific and global score order', () => {\n            const value = new Highlight(['my_field_a', 'my_field_b'])\n                .scoreOrder('my_field_a')\n                .scoreOrder('my_field_b')\n                .scoreOrder();\n            expect(value.toJSON()).toEqual({\n                fields: {\n                    my_field_a: { order: 'score' },\n                    my_field_b: { order: 'score' }\n                },\n                order: 'score'\n            });\n        });\n\n        test('sets field-specific then global score order', () => {\n            const value = new Highlight()\n                .scoreOrder('my_field')\n                .scoreOrder()\n                .toJSON();\n            expect(value).toEqual({\n                fields: {\n                    my_field: { order: 'score' }\n                },\n                order: 'score'\n            });\n        });\n    });\n\n    describe('matchedFields()', () => {\n        test('sets matched_fields option', () => {\n            const value = new Highlight()\n                .matchedFields(['content', 'content.plain'], 'content')\n                .toJSON();\n            expect(value).toEqual({\n                fields: {\n                    content: {\n                        matched_fields: ['content', 'content.plain'],\n                        type: 'fvh'\n                    }\n                }\n            });\n        });\n\n        test('throws error when no field provided', () => {\n            expect(() =>\n                new Highlight().matchedFields(['content', 'content.plain'])\n            ).toThrow(\n                new Error('`matched_fields` requires field name to be passed')\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/indexed-shape.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { IndexedShape, indexedShape } from '../../src';\n\ndescribe('IndexedShape', () => {\n    describe('constructor', () => {\n        test('sets options', () => {\n            const value = new IndexedShape('DEU', 'countries').toJSON();\n            const expected = {\n                id: 'DEU',\n                type: 'countries'\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('options', () => {\n        test('sets id option', () => {\n            const result = indexedShape().id('DEU').toJSON();\n            const expected = { id: 'DEU' };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets type option', () => {\n            const result = indexedShape().type('countries').toJSON();\n            const expected = { type: 'countries' };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets index option', () => {\n            const result = indexedShape().index('shapes').toJSON();\n            const expected = { index: 'shapes' };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets path option', () => {\n            const result = indexedShape().path('location').toJSON();\n            const expected = { path: 'location' };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/inner-hits.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { InnerHits, innerHits, Sort, Script, Highlight } from '../../src';\n\ndescribe('InnerHits', () => {\n    describe('constructor', () => {\n        test('sets name', () => {\n            const value = new InnerHits('my_inner_hits').toJSON();\n            const expected = {\n                name: 'my_inner_hits'\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('sort()', () => {\n                expect(() => new InnerHits().sort(value)).toThrow(\n                    new TypeError('Argument must be an instance of Sort')\n                );\n            });\n\n            test('highlight()', () => {\n                expect(() => new InnerHits().highlight(value)).toThrow(\n                    new TypeError('Argument must be an instance of Highlight')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets from option', () => {\n            const result = innerHits().from(10).toJSON();\n            const expected = { from: 10 };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets size option', () => {\n            const result = innerHits().size(10).toJSON();\n            const expected = { size: 10 };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets version option', () => {\n            const result = innerHits().version(true).toJSON();\n            const expected = { version: true };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets explain option', () => {\n            const result = innerHits().explain(true).toJSON();\n            const expected = { explain: true };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets sort option', () => {\n            const sortChannel = new Sort('channel', 'desc');\n            const result = innerHits().sort(sortChannel).toJSON();\n            const expected = { sort: [sortChannel.toJSON()] };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets sorts option', () => {\n            const sortChannel = new Sort('channel', 'desc');\n            const sortCategories = new Sort('categories', 'desc');\n            const result = innerHits()\n                .sorts([sortChannel, sortCategories])\n                .toJSON();\n            const expected = {\n                sort: [sortChannel.toJSON(), sortCategories.toJSON()]\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets highlight option', () => {\n            const result = innerHits()\n                .highlight(new Highlight(['content']).type('plain', 'content'))\n                .toJSON();\n            const expected = {\n                highlight: {\n                    fields: {\n                        content: { type: 'plain' }\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        describe('source option', () => {\n            test.each([\n                {\n                    name: 'sets source(str) option',\n                    value: 'obj.*',\n                    expected: { _source: 'obj.*' }\n                },\n                {\n                    name: 'sets source(bool) option',\n                    value: false,\n                    expected: { _source: false }\n                },\n                {\n                    name: 'sets source(arr) option',\n                    value: ['obj1.*', 'obj2.*'],\n                    expected: { _source: ['obj1.*', 'obj2.*'] }\n                },\n                {\n                    name: 'sets source(obj) option',\n                    value: {\n                        includes: ['obj1.*', 'obj2.*'],\n                        excludes: ['*.description']\n                    },\n                    expected: {\n                        _source: {\n                            includes: ['obj1.*', 'obj2.*'],\n                            excludes: ['*.description']\n                        }\n                    }\n                }\n            ])('$name', ({ value, expected }) => {\n                const result = innerHits().source(value).toJSON();\n                expect(result).toEqual(expected);\n            });\n        });\n\n        test('sets storedFields option', () => {\n            const result = innerHits().storedFields(['comments.text']).toJSON();\n            const expected = { stored_fields: ['comments.text'] };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets scriptField option', () => {\n            const scriptA = new Script(\n                'inline',\n                \"doc['my_field_name'].value * 2\"\n            ).lang('painless');\n            const result = innerHits().scriptField('test1', scriptA).toJSON();\n            const expected = {\n                script_fields: { test1: { script: scriptA.toJSON() } }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets scriptFields option', () => {\n            const scriptA = new Script(\n                'inline',\n                \"doc['my_field_name'].value * 2\"\n            ).lang('painless');\n            const scriptB = new Script(\n                'inline',\n                \"doc['my_field_name'].value * factor\"\n            )\n                .lang('painless')\n                .params({ factor: 2.0 });\n            const result = innerHits()\n                .scriptFields({\n                    test1: scriptA,\n                    test2: scriptB\n                })\n                .toJSON();\n            const expected = {\n                script_fields: {\n                    test1: { script: scriptA.toJSON() },\n                    test2: { script: scriptB.toJSON() }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets docvalueFields option', () => {\n            const result = innerHits()\n                .docvalueFields(['test1', 'test2'])\n                .toJSON();\n            const expected = { docvalue_fields: ['test1', 'test2'] };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/knn.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { KNN, TermQuery } from '../../src';\n\ndescribe('KNN', () => {\n    describe('constructor', () => {\n        test('can be instantiated', () => {\n            const knn = new KNN('my_field', 5, 10).queryVector([1, 2, 3]);\n            const json = knn.toJSON();\n            expect(json).toEqual({\n                field: 'my_field',\n                k: 5,\n                num_candidates: 10,\n                query_vector: [1, 2, 3],\n                filter: []\n            });\n        });\n\n        test('throws error if numCandidates is less than k', () => {\n            expect(() =>\n                new KNN('my_field', 10, 5).queryVector([1, 2, 3])\n            ).toThrow(new Error('KNN numCandidates cannot be less than k'));\n        });\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: 'not_a_query'\n            }\n        ])('$name', ({ value }) => {\n            test('filter()', () => {\n                const knn = new KNN('my_field', 5, 10).queryVector([1, 2, 3]);\n                expect(() => knn.filter(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('queryVector sets correctly', () => {\n            const vector = [1, 2, 3];\n            const knn = new KNN('my_field', 5, 10).queryVector(vector);\n            const json = knn.toJSON();\n            expect(json.query_vector).toEqual(vector);\n        });\n\n        test('queryVectorBuilder sets correctly', () => {\n            const modelId = 'model_123';\n            const modelText = 'Sample model text';\n            const knn = new KNN('my_field', 5, 10).queryVectorBuilder(\n                modelId,\n                modelText\n            );\n            const json = knn.toJSON();\n            expect(json.query_vector_builder.text_embeddings).toEqual({\n                model_id: modelId,\n                model_text: modelText\n            });\n        });\n\n        test('boost sets correctly', () => {\n            const knn = new KNN('my_field', 5, 10)\n                .boost(1.5)\n                .queryVector([1, 2, 3]);\n            const json = knn.toJSON();\n            expect(json.boost).toBe(1.5);\n        });\n\n        test('similarity sets correctly', () => {\n            const knn = new KNN('my_field', 5, 10)\n                .similarity(0.8)\n                .queryVector([1, 2, 3]);\n            const json = knn.toJSON();\n            expect(json.similarity).toBe(0.8);\n        });\n    });\n\n    describe('filter method', () => {\n        test('adds single query correctly', () => {\n            const knn = new KNN('my_field', 5, 10).queryVector([1, 2, 3]);\n            const query = new TermQuery('field', 'value');\n            knn.filter(query);\n            const json = knn.toJSON();\n            expect(json.filter).toEqual([query.toJSON()]);\n        });\n\n        test('adds queries as array correctly', () => {\n            const knn = new KNN('my_field', 5, 10).queryVector([1, 2, 3]);\n            const query1 = new TermQuery('field1', 'value1');\n            const query2 = new TermQuery('field2', 'value2');\n            knn.filter([query1, query2]);\n            const json = knn.toJSON();\n            expect(json.filter).toEqual([query1.toJSON(), query2.toJSON()]);\n        });\n    });\n\n    describe('toJSON', () => {\n        test('returns correct DSL', () => {\n            const knn = new KNN('my_field', 5, 10)\n                .queryVector([1, 2, 3])\n                .filter(new TermQuery('field', 'value'));\n\n            const expectedDSL = {\n                field: 'my_field',\n                k: 5,\n                num_candidates: 10,\n                query_vector: [1, 2, 3],\n                filter: [{ term: { field: 'value' } }]\n            };\n\n            expect(knn.toJSON()).toEqual(expectedDSL);\n        });\n\n        test('throws error if neither query_vector nor query_vector_builder is provided', () => {\n            const knn = new KNN('my_field', 5, 10);\n            expect(() => knn.toJSON()).toThrow(\n                new Error(\n                    'either query_vector_builder or query_vector must be provided'\n                )\n            );\n        });\n    });\n\n    describe('query_vector and query_vector_builder mutual exclusivity', () => {\n        test('throws error when first queryVector and then queryVectorBuilder are set', () => {\n            const knn = new KNN('my_field', 5, 10).queryVector([1, 2, 3]);\n            expect(() => {\n                knn.queryVectorBuilder('model_123', 'Sample model text');\n            }).toThrow(\n                new Error(\n                    'cannot provide both query_vector_builder and query_vector'\n                )\n            );\n        });\n\n        test('throws error when first queryVectorBuilder and then queryVector are set', () => {\n            const knn = new KNN('my_field', 5, 10).queryVectorBuilder(\n                'model_123',\n                'Sample model text'\n            );\n            expect(() => {\n                knn.queryVector([1, 2, 3]);\n            }).toThrow(\n                new Error(\n                    'cannot provide both query_vector_builder and query_vector'\n                )\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { Query } from '../../src/core';\n\nconst getInstance = () => new Query('my_type');\n\ndescribe('Query', () => {\n    describe('options', () => {\n        test('sets boost option', () => {\n            const result = getInstance().boost(10).toJSON();\n            const expected = {\n                my_type: {\n                    boost: 10\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets name option', () => {\n            const result = getInstance().name('my_name').toJSON();\n            const expected = {\n                my_type: {\n                    _name: 'my_name'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('toJSON', () => {\n        test('getDSL gets DSL', () => {\n            const valueA = getInstance().boost(10).toJSON();\n            const valueB = getInstance().boost(10).getDSL();\n            const expected = {\n                my_type: {\n                    boost: 10\n                }\n            };\n\n            expect(valueA).toEqual(valueB);\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/request-body-search.test.js",
    "content": "/* eslint-disable max-lines */\nimport { describe, test, expect } from 'vitest';\nimport {\n    RequestBodySearch,\n    requestBodySearch,\n    MatchQuery,\n    MatchPhraseQuery,\n    TermQuery,\n    BoolQuery,\n    FunctionScoreQuery,\n    TermsAggregation,\n    TermSuggester,\n    ScriptScoreFunction,\n    Sort,\n    Script,\n    Highlight,\n    Rescore,\n    InnerHits,\n    RuntimeField,\n    KNN\n} from '../../src';\nimport { recursiveToJSON } from '../../src/core/util';\n\nconst getInstance = () => new RequestBodySearch();\n\ndescribe('RequestBodySearch', () => {\n    describe('parameter type validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('query()', () => {\n                expect(() => getInstance().query(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n\n            test('aggregation()', () => {\n                expect(() => getInstance().aggregation(value)).toThrow(\n                    new TypeError('Argument must be an instance of Aggregation')\n                );\n            });\n\n            test('agg()', () => {\n                expect(() => getInstance().agg(value)).toThrow(\n                    new TypeError('Argument must be an instance of Aggregation')\n                );\n            });\n\n            test('suggest()', () => {\n                expect(() => getInstance().suggest(value)).toThrow(\n                    new TypeError('Argument must be an instance of Suggester')\n                );\n            });\n\n            test('sort()', () => {\n                expect(() => getInstance().sort(value)).toThrow(\n                    new TypeError('Argument must be an instance of Sort')\n                );\n            });\n\n            test('scriptFields()', () => {\n                expect(() => getInstance().scriptFields(value)).toThrow(\n                    new TypeError('Argument must be an instance of Object')\n                );\n            });\n\n            test('highlight()', () => {\n                expect(() => getInstance().highlight(value)).toThrow(\n                    new TypeError('Argument must be an instance of Highlight')\n                );\n            });\n\n            test('rescore()', () => {\n                expect(() => getInstance().rescore(value)).toThrow(\n                    new TypeError('Argument must be an instance of Rescore')\n                );\n            });\n\n            test('postFilter()', () => {\n                expect(() => getInstance().postFilter(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n\n            test('kNN()', () => {\n                expect(() => getInstance().kNN(value)).toThrow(\n                    new TypeError('Argument must be an instance of KNN')\n                );\n            });\n        });\n    });\n\n    describe('simple option setters', () => {\n        test('sets query option', () => {\n            const searchQry = new MatchQuery('desc', 'elastic builder');\n            const result = requestBodySearch().query(searchQry).toJSON();\n            expect(result).toEqual({\n                query: searchQry.toJSON()\n            });\n        });\n\n        test('sets timeout option', () => {\n            const result = requestBodySearch().timeout('5s').toJSON();\n            expect(result).toEqual({\n                timeout: '5s'\n            });\n        });\n\n        test('sets from option', () => {\n            const result = requestBodySearch().from(10).toJSON();\n            expect(result).toEqual({\n                from: 10\n            });\n        });\n\n        test('sets size option', () => {\n            const result = requestBodySearch().size(10).toJSON();\n            expect(result).toEqual({\n                size: 10\n            });\n        });\n\n        test('sets terminate_after option', () => {\n            const result = requestBodySearch().terminateAfter(100).toJSON();\n            expect(result).toEqual({\n                terminate_after: 100\n            });\n        });\n\n        test('sets track_scores option', () => {\n            const result = requestBodySearch().trackScores(true).toJSON();\n            expect(result).toEqual({\n                track_scores: true\n            });\n        });\n\n        test('sets track_total_hits option', () => {\n            const result = requestBodySearch().trackTotalHits(true).toJSON();\n            expect(result).toEqual({\n                track_total_hits: true\n            });\n        });\n\n        test('sets version option', () => {\n            const result = requestBodySearch().version(true).toJSON();\n            expect(result).toEqual({\n                version: true\n            });\n        });\n\n        test('sets explain option', () => {\n            const result = requestBodySearch().explain(true).toJSON();\n            expect(result).toEqual({\n                explain: true\n            });\n        });\n\n        test('sets highlight option', () => {\n            const result = requestBodySearch()\n                .highlight(new Highlight(['content']).type('plain', 'content'))\n                .toJSON();\n            expect(result).toEqual({\n                highlight: new Highlight(['content'])\n                    .type('plain', 'content')\n                    .toJSON()\n            });\n        });\n\n        test('sets post_filter option', () => {\n            const filterQry = new BoolQuery()\n                .filter(new TermQuery('user', 'Idan'))\n                .filter(new TermQuery('level', 'INFO'));\n            const result = requestBodySearch().postFilter(filterQry).toJSON();\n            expect(result).toEqual({\n                post_filter: filterQry.toJSON()\n            });\n        });\n\n        test('sets rescore option', () => {\n            const rescoreA = new Rescore(\n                50,\n                new MatchPhraseQuery('message', 'the quick brown').slop(2)\n            )\n                .queryWeight(0.7)\n                .rescoreQueryWeight(1.2);\n            const result = requestBodySearch().rescore(rescoreA).toJSON();\n            expect(result).toEqual({\n                rescore: rescoreA.toJSON()\n            });\n        });\n\n        test('sets min_score option', () => {\n            const result = requestBodySearch().minScore(0.5).toJSON();\n            expect(result).toEqual({\n                min_score: 0.5\n            });\n        });\n    });\n\n    describe('aggregation setters', () => {\n        test('sets aggregation option (aggregation method)', () => {\n            const aggA = new TermsAggregation('user_term_agg', 'user');\n            const result = requestBodySearch().aggregation(aggA).toJSON();\n            expect(result).toEqual({\n                aggs: {\n                    user_term_agg: {\n                        terms: { field: 'user' }\n                    }\n                }\n            });\n        });\n\n        test('sets aggregation option (agg method)', () => {\n            const aggA = new TermsAggregation('user_term_agg', 'user');\n            const result = requestBodySearch().agg(aggA).toJSON();\n            expect(result).toEqual({\n                aggs: {\n                    user_term_agg: {\n                        terms: { field: 'user' }\n                    }\n                }\n            });\n        });\n\n        test('sets multiple aggs', () => {\n            const aggA = new TermsAggregation('user_term_agg', 'user');\n            const aggB = new TermsAggregation('keyword_term_agg', 'keyword');\n            const value = new RequestBodySearch().agg(aggA).agg(aggB).toJSON();\n            expect(value).toEqual({\n                aggs: {\n                    user_term_agg: {\n                        terms: { field: 'user' }\n                    },\n                    keyword_term_agg: {\n                        terms: { field: 'keyword' }\n                    }\n                }\n            });\n        });\n\n        test('sets multiple aggs in a single call', () => {\n            const aggA = new TermsAggregation('user_term_agg', 'user');\n            const aggB = new TermsAggregation('keyword_term_agg', 'keyword');\n            const value = new RequestBodySearch().aggs([aggA, aggB]).toJSON();\n            expect(value).toEqual({\n                aggs: {\n                    user_term_agg: {\n                        terms: { field: 'user' }\n                    },\n                    keyword_term_agg: {\n                        terms: { field: 'keyword' }\n                    }\n                }\n            });\n        });\n\n        test('sets aggs with nested', () => {\n            const aggB = new TermsAggregation('keyword_term_agg', 'keyword');\n            const nestedAgg = new TermsAggregation('user_term_agg', 'user').agg(\n                aggB\n            );\n            const value = new RequestBodySearch().agg(nestedAgg).toJSON();\n            expect(value).toEqual({\n                aggs: {\n                    user_term_agg: {\n                        terms: { field: 'user' },\n                        aggs: {\n                            keyword_term_agg: {\n                                terms: { field: 'keyword' }\n                            }\n                        }\n                    }\n                }\n            });\n        });\n    });\n\n    describe('suggest setters', () => {\n        test('sets suggest option', () => {\n            const suggest = new TermSuggester(\n                'my-suggestion',\n                'message',\n                'tring out Elasticsearch'\n            );\n            const result = requestBodySearch().suggest(suggest).toJSON();\n            expect(result).toEqual({\n                suggest: suggest.toJSON()\n            });\n        });\n\n        test('sets suggestText option', () => {\n            const result = requestBodySearch()\n                .suggestText('suggest-text')\n                .toJSON();\n            expect(result).toEqual({\n                suggest: { text: 'suggest-text' }\n            });\n        });\n    });\n\n    describe('sort setters', () => {\n        test('sets sort option', () => {\n            const sortChannel = new Sort('channel', 'desc');\n            const result = requestBodySearch().sort(sortChannel).toJSON();\n            expect(result).toEqual({\n                sort: [recursiveToJSON(sortChannel)]\n            });\n        });\n\n        test('sets sorts option', () => {\n            const sortChannel = new Sort('channel', 'desc');\n            const sortCategories = new Sort('categories', 'desc');\n            const result = requestBodySearch()\n                .sorts([sortChannel, sortCategories])\n                .toJSON();\n            expect(result).toEqual({\n                sort: [\n                    recursiveToJSON(sortChannel),\n                    recursiveToJSON(sortCategories)\n                ]\n            });\n        });\n    });\n\n    describe('kNN setters', () => {\n        test('sets kNN option with query vector builder', () => {\n            const kNNVectorBuilder = new KNN('my_field', 5, 10)\n                .similarity(0.6)\n                .filter(new TermQuery('field', 'value'))\n                .queryVectorBuilder('model_123', 'Sample model text');\n            const result = requestBodySearch().kNN(kNNVectorBuilder).toJSON();\n            expect(result).toEqual({\n                knn: kNNVectorBuilder.toJSON()\n            });\n        });\n\n        test('kNN setup query vector builder', () => {\n            const kNNVectorBuilder = new KNN('my_field', 5, 10)\n                .similarity(0.6)\n                .filter(new TermQuery('field', 'value'))\n                .queryVectorBuilder('model_123', 'Sample model text');\n            const value = new RequestBodySearch()\n                .kNN(kNNVectorBuilder)\n                .toJSON();\n            expect(value).toEqual({\n                knn: {\n                    field: 'my_field',\n                    k: 5,\n                    filter: [\n                        {\n                            term: {\n                                field: 'value'\n                            }\n                        }\n                    ],\n                    num_candidates: 10,\n                    query_vector_builder: {\n                        text_embeddings: {\n                            model_id: 'model_123',\n                            model_text: 'Sample model text'\n                        }\n                    },\n                    similarity: 0.6\n                }\n            });\n        });\n\n        test('kNN setup query vector', () => {\n            const kNNVector = new KNN('my_field', 5, 10).queryVector([1, 2, 3]);\n            const value = new RequestBodySearch().kNN(kNNVector).toJSON();\n            expect(value).toEqual({\n                knn: {\n                    field: 'my_field',\n                    k: 5,\n                    filter: [],\n                    num_candidates: 10,\n                    query_vector: [1, 2, 3]\n                }\n            });\n        });\n\n        test('kNN setup query vector array', () => {\n            const kNNVector = new KNN('my_field', 5, 10).queryVector([1, 2, 3]);\n            const kNNVectorBuilder = new KNN('my_field', 5, 10)\n                .similarity(0.6)\n                .filter(new TermQuery('field', 'value'))\n                .queryVectorBuilder('model_123', 'Sample model text');\n            const value = new RequestBodySearch()\n                .kNN([kNNVector, kNNVectorBuilder])\n                .toJSON();\n            expect(value).toEqual({\n                knn: [\n                    {\n                        field: 'my_field',\n                        k: 5,\n                        filter: [],\n                        num_candidates: 10,\n                        query_vector: [1, 2, 3]\n                    },\n                    {\n                        field: 'my_field',\n                        filter: [\n                            {\n                                term: {\n                                    field: 'value'\n                                }\n                            }\n                        ],\n                        k: 5,\n                        num_candidates: 10,\n                        query_vector_builder: {\n                            text_embeddings: {\n                                model_id: 'model_123',\n                                model_text: 'Sample model text'\n                            }\n                        },\n                        similarity: 0.6\n                    }\n                ]\n            });\n        });\n    });\n\n    describe('source option variations', () => {\n        test.each([\n            {\n                name: 'sets source(str) option',\n                value: 'obj.*',\n                expected: 'obj.*'\n            },\n            {\n                name: 'sets source(bool) option',\n                value: false,\n                expected: false\n            },\n            {\n                name: 'sets source(arr) option',\n                value: ['obj1.*', 'obj2.*'],\n                expected: ['obj1.*', 'obj2.*']\n            },\n            {\n                name: 'sets source(obj) option',\n                value: {\n                    includes: ['obj1.*', 'obj2.*'],\n                    excludes: ['*.description']\n                },\n                expected: {\n                    includes: ['obj1.*', 'obj2.*'],\n                    excludes: ['*.description']\n                }\n            }\n        ])('$name', ({ value, expected }) => {\n            const result = requestBodySearch().source(value).toJSON();\n            expect(result).toEqual({\n                _source: expected\n            });\n        });\n    });\n\n    describe('stored_fields option variations', () => {\n        test.each([\n            {\n                name: 'sets stored_fields(str) option',\n                value: '_none_',\n                expected: '_none_'\n            },\n            {\n                name: 'sets stored_fields(arr) option',\n                value: ['user', 'postDate'],\n                expected: ['user', 'postDate']\n            }\n        ])('$name', ({ value, expected }) => {\n            const result = requestBodySearch().storedFields(value).toJSON();\n            expect(result).toEqual({\n                stored_fields: expected\n            });\n        });\n    });\n\n    describe('runtime mapping setters', () => {\n        test('sets runtimeMapping option', () => {\n            const runtimeFieldA = new RuntimeField(\n                'keyword',\n                \"emit(doc['name'].value)\"\n            );\n            const result = requestBodySearch()\n                .runtimeMapping('test1', runtimeFieldA)\n                .toJSON();\n            expect(result).toEqual({\n                runtime_mappings: {\n                    test1: {\n                        type: 'keyword',\n                        script: {\n                            source: \"emit(doc['name'].value)\"\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets runtimeMappings option', () => {\n            const runtimeFieldA = new RuntimeField(\n                'keyword',\n                \"emit(doc['name'].value)\"\n            );\n            const runtimeFieldB = new RuntimeField(\n                'boolean',\n                \"emit(doc['qty'].value > 10)\"\n            );\n            const result = requestBodySearch()\n                .runtimeMappings({ test1: runtimeFieldA, test2: runtimeFieldB })\n                .toJSON();\n            expect(result).toEqual({\n                runtime_mappings: {\n                    test1: {\n                        type: 'keyword',\n                        script: {\n                            source: \"emit(doc['name'].value)\"\n                        }\n                    },\n                    test2: {\n                        type: 'boolean',\n                        script: {\n                            source: \"emit(doc['qty'].value > 10)\"\n                        }\n                    }\n                }\n            });\n        });\n\n        test('runtime mapping with lang and params', () => {\n            const runtimeFieldC = new RuntimeField(\n                'keyword',\n                \"emit(doc['my_field_name'].value * params.factor)\"\n            )\n                .lang('painless')\n                .params({ factor: 2.0 });\n            const result = requestBodySearch()\n                .runtimeMapping('test1', runtimeFieldC)\n                .toJSON();\n            expect(result).toEqual({\n                runtime_mappings: {\n                    test1: {\n                        type: 'keyword',\n                        script: {\n                            lang: 'painless',\n                            source: \"emit(doc['my_field_name'].value * params.factor)\",\n                            params: {\n                                factor: 2.0\n                            }\n                        }\n                    }\n                }\n            });\n        });\n    });\n\n    describe('script field setters', () => {\n        test('sets scriptField option', () => {\n            const scriptA = new Script(\n                'inline',\n                \"doc['my_field_name'].value * 2\"\n            ).lang('painless');\n            const result = requestBodySearch()\n                .scriptField('test1', scriptA)\n                .toJSON();\n            expect(result).toEqual({\n                script_fields: {\n                    test1: { script: scriptA.toJSON() }\n                }\n            });\n        });\n\n        test('sets scriptFields option', () => {\n            const scriptA = new Script(\n                'inline',\n                \"doc['my_field_name'].value * 2\"\n            ).lang('painless');\n            const scriptB = new Script(\n                'inline',\n                \"doc['my_field_name'].value * factor\"\n            )\n                .lang('painless')\n                .params({ factor: 2.0 });\n            const result = requestBodySearch()\n                .scriptFields({\n                    test1: scriptA,\n                    test2: scriptB\n                })\n                .toJSON();\n            expect(result).toEqual({\n                script_fields: {\n                    test1: { script: scriptA.toJSON() },\n                    test2: { script: scriptB.toJSON() }\n                }\n            });\n        });\n    });\n\n    describe('docvalue fields setter', () => {\n        test('sets docvalueFields option', () => {\n            const result = requestBodySearch()\n                .docvalueFields(['test1', 'test2'])\n                .toJSON();\n            expect(result).toEqual({\n                docvalue_fields: ['test1', 'test2']\n            });\n        });\n    });\n\n    describe('indices boost setters', () => {\n        test('sets indicesBoost option', () => {\n            const result = requestBodySearch()\n                .indicesBoost('alias1', 1.4)\n                .toJSON();\n            expect(result).toEqual({\n                indices_boost: [{ alias1: 1.4 }]\n            });\n        });\n\n        test('sets indexBoost option', () => {\n            const result = requestBodySearch()\n                .indexBoost('alias1', 1.4)\n                .toJSON();\n            expect(result).toEqual({\n                indices_boost: [{ alias1: 1.4 }]\n            });\n        });\n\n        test('sets multiple indices_boost', () => {\n            const value = new RequestBodySearch()\n                .indicesBoost('alias1', 1.4)\n                .indicesBoost('index*', 1.3)\n                .toJSON();\n            expect(value).toEqual({\n                indices_boost: [{ alias1: 1.4 }, { 'index*': 1.3 }]\n            });\n        });\n    });\n\n    describe('collapse setter', () => {\n        test('sets collapse option with field', () => {\n            const result = requestBodySearch().collapse('user').toJSON();\n            expect(result).toEqual({\n                collapse: { field: 'user' }\n            });\n        });\n\n        test('sets collapse with inner_hits option', () => {\n            const innerHits = new InnerHits()\n                .name('last_tweets')\n                .size(5)\n                .sort(new Sort('date', 'desc'));\n            const result = requestBodySearch()\n                .collapse(\n                    'user',\n                    new InnerHits()\n                        .name('last_tweets')\n                        .size(5)\n                        .sort(new Sort('date', 'desc')),\n                    4\n                )\n                .toJSON();\n            expect(result).toEqual({\n                collapse: {\n                    field: 'user',\n                    inner_hits: innerHits.toJSON(),\n                    max_concurrent_group_searches: 4\n                }\n            });\n        });\n    });\n\n    describe('searchAfter setter', () => {\n        test('sets searchAfter option', () => {\n            const result = requestBodySearch()\n                .searchAfter([1463538857, 'tweet#654323'])\n                .toJSON();\n            expect(result).toEqual({\n                search_after: [1463538857, 'tweet#654323']\n            });\n        });\n    });\n\n    describe('rescore handling', () => {\n        test('sets two rescores as array', () => {\n            const scoreScript = new Script(\n                'inline',\n                'Math.log10(doc.likes.value + 2)'\n            );\n            const rescoreA = new Rescore(\n                50,\n                new MatchPhraseQuery('message', 'the quick brown').slop(2)\n            )\n                .queryWeight(0.7)\n                .rescoreQueryWeight(1.2);\n            const rescoreB = new Rescore(\n                10,\n                new FunctionScoreQuery().function(\n                    new ScriptScoreFunction(scoreScript)\n                )\n            ).scoreMode('multiply');\n\n            const value = new RequestBodySearch()\n                .rescore(rescoreA)\n                .rescore(rescoreB)\n                .toJSON();\n\n            expect(value).toEqual({\n                rescore: [\n                    {\n                        query: {\n                            rescore_query: {\n                                match_phrase: {\n                                    message: {\n                                        query: 'the quick brown',\n                                        slop: 2\n                                    }\n                                }\n                            },\n                            query_weight: 0.7,\n                            rescore_query_weight: 1.2\n                        },\n                        window_size: 50\n                    },\n                    {\n                        query: {\n                            rescore_query: {\n                                function_score: {\n                                    functions: [\n                                        {\n                                            script_score: {\n                                                script: {\n                                                    inline: 'Math.log10(doc.likes.value + 2)'\n                                                }\n                                            }\n                                        }\n                                    ]\n                                }\n                            },\n                            score_mode: 'multiply'\n                        },\n                        window_size: 10\n                    }\n                ]\n            });\n        });\n\n        test('adds third rescore to existing array', () => {\n            const scoreScript = new Script(\n                'inline',\n                'Math.log10(doc.likes.value + 2)'\n            );\n            const rescoreA = new Rescore(\n                50,\n                new MatchPhraseQuery('message', 'the quick brown').slop(2)\n            )\n                .queryWeight(0.7)\n                .rescoreQueryWeight(1.2);\n            const rescoreB = new Rescore(\n                10,\n                new FunctionScoreQuery().function(\n                    new ScriptScoreFunction(scoreScript)\n                )\n            ).scoreMode('multiply');\n            const rescoreC = new Rescore(\n                25,\n                new MatchPhraseQuery('message', 'fox').slop(2)\n            )\n                .queryWeight(0.9)\n                .rescoreQueryWeight(1.5);\n\n            const value = new RequestBodySearch()\n                .rescore(rescoreA)\n                .rescore(rescoreB)\n                .rescore(rescoreC)\n                .toJSON();\n\n            expect(value).toEqual({\n                rescore: [\n                    {\n                        query: {\n                            rescore_query: {\n                                match_phrase: {\n                                    message: {\n                                        query: 'the quick brown',\n                                        slop: 2\n                                    }\n                                }\n                            },\n                            query_weight: 0.7,\n                            rescore_query_weight: 1.2\n                        },\n                        window_size: 50\n                    },\n                    {\n                        query: {\n                            rescore_query: {\n                                function_score: {\n                                    functions: [\n                                        {\n                                            script_score: {\n                                                script: {\n                                                    inline: 'Math.log10(doc.likes.value + 2)'\n                                                }\n                                            }\n                                        }\n                                    ]\n                                }\n                            },\n                            score_mode: 'multiply'\n                        },\n                        window_size: 10\n                    },\n                    {\n                        query: {\n                            rescore_query: {\n                                match_phrase: {\n                                    message: {\n                                        query: 'fox',\n                                        slop: 2\n                                    }\n                                }\n                            },\n                            query_weight: 0.9,\n                            rescore_query_weight: 1.5\n                        },\n                        window_size: 25\n                    }\n                ]\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/rescore.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { Rescore, rescore, MatchPhraseQuery } from '../../src';\n\ndescribe('Rescore', () => {\n    describe('constructor', () => {\n        test('sets arguments', () => {\n            const rescoreQry = new MatchPhraseQuery(\n                'message',\n                'the quick brown'\n            );\n            const valueA = new Rescore(50, rescoreQry).toJSON();\n            const valueB = new Rescore()\n                .windowSize(50)\n                .rescoreQuery(rescoreQry)\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                query: {\n                    rescore_query: {\n                        match_phrase: { message: 'the quick brown' }\n                    }\n                },\n                window_size: 50\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n\n    describe('parameter validation', () => {\n        test.each([\n            {\n                name: 'rescoreQuery() throws TypeError for null parameter',\n                value: null\n            },\n            {\n                name: 'rescoreQuery() throws TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            expect(() => new Rescore().rescoreQuery(value)).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n        });\n    });\n\n    describe('scoreMode() validation', () => {\n        test.each([\n            { name: 'accepts valid scoreMode: total', value: 'total' },\n            {\n                name: 'accepts valid scoreMode: TOTAL (case-insensitive)',\n                value: 'TOTAL'\n            },\n            { name: 'accepts valid scoreMode: multiply', value: 'multiply' },\n            {\n                name: 'accepts valid scoreMode: MULTIPLY (case-insensitive)',\n                value: 'MULTIPLY'\n            },\n            { name: 'accepts valid scoreMode: avg', value: 'avg' },\n            {\n                name: 'accepts valid scoreMode: AVG (case-insensitive)',\n                value: 'AVG'\n            },\n            { name: 'accepts valid scoreMode: max', value: 'max' },\n            {\n                name: 'accepts valid scoreMode: MAX (case-insensitive)',\n                value: 'MAX'\n            },\n            { name: 'accepts valid scoreMode: min', value: 'min' },\n            {\n                name: 'accepts valid scoreMode: MIN (case-insensitive)',\n                value: 'MIN'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => rescore().scoreMode(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null scoreMode', value: null },\n            {\n                name: 'throws for invalid scoreMode',\n                value: 'invalid_score_mode'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => rescore().scoreMode(value)).toThrow(\n                /The 'score_mode' parameter should be one of/\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets rescoreQuery option', () => {\n            const rescoreQry = new MatchPhraseQuery(\n                'message',\n                'the quick brown'\n            );\n            const result = rescore().rescoreQuery(rescoreQry).toJSON();\n            const expected = {\n                query: { rescore_query: rescoreQry.toJSON() }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets queryWeight option', () => {\n            const result = rescore().queryWeight(0.7).toJSON();\n            const expected = {\n                query: { query_weight: 0.7 }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets rescoreQueryWeight option', () => {\n            const result = rescore().rescoreQueryWeight(1.7).toJSON();\n            const expected = {\n                query: { rescore_query_weight: 1.7 }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets window_size option', () => {\n            const value = new Rescore().windowSize(50).toJSON();\n            const expected = {\n                query: {},\n                window_size: 50\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/runtime-field.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport RuntimeField from '../../src/core/runtime-field';\n\ndescribe('RuntimeField', () => {\n    describe('constructor', () => {\n        test('sets arguments', () => {\n            const valueA = new RuntimeField(\n                'keyword',\n                \"emit(doc['name'].value)\"\n            ).toJSON();\n\n            const expected = {\n                type: 'keyword',\n                script: {\n                    source: \"emit(doc['name'].value)\"\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n\n        test('throws error when type is not set', () => {\n            const field = new RuntimeField();\n            expect(() => field.toJSON()).toThrow(\n                new Error('`type` should be set')\n            );\n        });\n\n        test('throws error when script is not set', () => {\n            const field = new RuntimeField('keyword');\n            expect(() => field.toJSON()).toThrow(\n                new Error('`script` should be set')\n            );\n        });\n    });\n\n    describe('type() validation', () => {\n        test.each([\n            { name: 'accepts valid value: boolean', value: 'boolean' },\n            {\n                name: 'accepts valid value: BOOLEAN (case-insensitive)',\n                value: 'BOOLEAN'\n            },\n            { name: 'accepts valid value: composite', value: 'composite' },\n            {\n                name: 'accepts valid value: COMPOSITE (case-insensitive)',\n                value: 'COMPOSITE'\n            },\n            { name: 'accepts valid value: date', value: 'date' },\n            {\n                name: 'accepts valid value: DATE (case-insensitive)',\n                value: 'DATE'\n            },\n            { name: 'accepts valid value: double', value: 'double' },\n            {\n                name: 'accepts valid value: DOUBLE (case-insensitive)',\n                value: 'DOUBLE'\n            },\n            { name: 'accepts valid value: geo_point', value: 'geo_point' },\n            {\n                name: 'accepts valid value: GEO_POINT (case-insensitive)',\n                value: 'GEO_POINT'\n            },\n            { name: 'accepts valid value: ip', value: 'ip' },\n            { name: 'accepts valid value: IP (case-insensitive)', value: 'IP' },\n            { name: 'accepts valid value: keyword', value: 'keyword' },\n            {\n                name: 'accepts valid value: KEYWORD (case-insensitive)',\n                value: 'KEYWORD'\n            },\n            { name: 'accepts valid value: long', value: 'long' },\n            {\n                name: 'accepts valid value: LONG (case-insensitive)',\n                value: 'LONG'\n            },\n            { name: 'accepts valid value: lookup', value: 'lookup' },\n            {\n                name: 'accepts valid value: LOOKUP (case-insensitive)',\n                value: 'LOOKUP'\n            }\n        ])('$name', ({ value }) => {\n            expect(() =>\n                new RuntimeField('keyword', \"emit(doc['name'].value)\").type(\n                    value\n                )\n            ).not.toThrow();\n        });\n\n        test('throws for null value', () => {\n            expect(() =>\n                new RuntimeField('keyword', \"emit(doc['name'].value)\").type(\n                    null\n                )\n            ).toThrow(\n                expect.objectContaining({\n                    name: 'TypeError',\n                    message: expect.stringContaining('toLowerCase')\n                })\n            );\n        });\n\n        test('throws for invalid value', () => {\n            expect(() =>\n                new RuntimeField('keyword', \"emit(doc['name'].value)\").type(\n                    'invalid'\n                )\n            ).toThrow(\n                new Error(\n                    '`type` must be one of boolean, composite, date, double, geo_point, ip, keyword, long, lookup'\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('script method sets script source', () => {\n            const fieldA = new RuntimeField('keyword');\n            fieldA.script(\"emit(doc['name'].value)\");\n            const expected = {\n                type: 'keyword',\n                script: {\n                    source: \"emit(doc['name'].value)\"\n                }\n            };\n            expect(fieldA.toJSON()).toEqual(expected);\n        });\n\n        test('sets script, lang and params', () => {\n            const fieldA = new RuntimeField('keyword');\n            fieldA.script(\"emit(doc['my_field_name'].value * params.factor)\");\n            fieldA.lang('painless');\n            fieldA.params({ factor: 2.0 });\n            const expected = {\n                type: 'keyword',\n                script: {\n                    lang: 'painless',\n                    source: \"emit(doc['my_field_name'].value * params.factor)\",\n                    params: {\n                        factor: 2.0\n                    }\n                }\n            };\n            expect(fieldA.toJSON()).toEqual(expected);\n        });\n\n        test(\"doesn't set lang and params if script is not set\", () => {\n            const fieldA = new RuntimeField('keyword');\n            fieldA.lang('painless');\n            fieldA.params({ factor: 2.0 });\n            expect(() => fieldA.toJSON()).toThrow(\n                new Error('`script` should be set')\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/script.test.js",
    "content": "import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest';\nimport { Script, script } from '../../src';\n\ndescribe('Script', () => {\n    describe('options', () => {\n        test('sets inline option', () => {\n            const result = script()\n                .inline(\"doc['my_field'] * multiplier\")\n                .toJSON();\n            const expected = { inline: \"doc['my_field'] * multiplier\" };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets source option', () => {\n            const result = script()\n                .source(\"doc['my_field'] * multiplier\")\n                .toJSON();\n            const expected = { source: \"doc['my_field'] * multiplier\" };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets file option', () => {\n            const result = script().file('calculate-score').toJSON();\n            const expected = { file: 'calculate-score' };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets stored option', () => {\n            const result = script().stored('calculate-score').toJSON();\n            const expected = { stored: 'calculate-score' };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets id option', () => {\n            const result = script().id('calculate-score').toJSON();\n            const expected = { id: 'calculate-score' };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets lang option', () => {\n            const result = script().lang('painless').toJSON();\n            const expected = { lang: 'painless' };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets params option', () => {\n            const result = script().params({ my_modifier: 2 }).toJSON();\n            const expected = { params: { my_modifier: 2 } };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('sets inline via constructor', () => {\n            const valueA = new Script(\n                'inline',\n                'params.my_var1 / params.my_var2'\n            ).toJSON();\n            const valueB = new Script()\n                .inline('params.my_var1 / params.my_var2')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n            expect(valueA).toEqual({\n                inline: 'params.my_var1 / params.my_var2'\n            });\n        });\n\n        test('sets source via constructor', () => {\n            const valueA = new Script(\n                'source',\n                'params.my_var1 / params.my_var2'\n            ).toJSON();\n            const valueB = new Script()\n                .source('params.my_var1 / params.my_var2')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n            expect(valueA).toEqual({\n                source: 'params.my_var1 / params.my_var2'\n            });\n        });\n\n        test('sets file via constructor', () => {\n            const valueA = new Script('file', 'calculate-score').toJSON();\n            const valueB = new Script().file('calculate-score').toJSON();\n            expect(valueA).toEqual(valueB);\n            expect(valueA).toEqual({ file: 'calculate-score' });\n        });\n\n        test('sets stored via constructor', () => {\n            const valueA = new Script('stored', 'calculate-score').toJSON();\n            const valueB = new Script().stored('calculate-score').toJSON();\n            expect(valueA).toEqual(valueB);\n            expect(valueA).toEqual({ stored: 'calculate-score' });\n        });\n\n        test('sets id via constructor', () => {\n            const valueA = new Script('id', 'calculate-score').toJSON();\n            const valueB = new Script().id('calculate-score').toJSON();\n            expect(valueA).toEqual(valueB);\n            expect(valueA).toEqual({ id: 'calculate-score' });\n        });\n\n        test('throws error for invalid script type', () => {\n            expect(() => new Script('invalid_script_type', 'src')).toThrow(\n                new Error('`type` must be one of `inline`, `stored`, `file`')\n            );\n        });\n    });\n\n    describe('mixed representation', () => {\n        let spy;\n\n        beforeEach(() => {\n            spy = vi.spyOn(console, 'warn').mockImplementation(() => {});\n        });\n\n        afterEach(() => {\n            spy.mockRestore();\n        });\n\n        test('logs warnings when overwriting script source', () => {\n            const value = new Script()\n                .file('calculate-score')\n                .stored('calculate-score')\n                .toJSON();\n            const expected = {\n                stored: 'calculate-score'\n            };\n            expect(value).toEqual(expected);\n\n            expect(spy).toHaveBeenCalledTimes(2);\n            expect(spy).toHaveBeenNthCalledWith(\n                1,\n                '[Script] Script source(`inline`/`source`/`stored`/`id`/`file`) was already specified!'\n            );\n            expect(spy).toHaveBeenNthCalledWith(2, '[Script] Overwriting.');\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/search-template.test.js",
    "content": "import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest';\nimport {\n    SearchTemplate,\n    searchTemplate,\n    matchQuery,\n    termQuery\n} from '../../src';\n\ndescribe('SearchTemplate', () => {\n    describe('constructor', () => {\n        test('sets inline via constructor', () => {\n            const valueA = new SearchTemplate(\n                'inline',\n                '{ \"query\": { \"terms\": {{#toJson}}statuses{{/toJson}} }}'\n            ).toJSON();\n            const valueB = new SearchTemplate()\n                .inline(\n                    '{ \"query\": { \"terms\": {{#toJson}}statuses{{/toJson}} }}'\n                )\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n            expect(valueA).toEqual({\n                inline: '{ \"query\": { \"terms\": {{#toJson}}statuses{{/toJson}} }}'\n            });\n        });\n\n        test('sets file via constructor', () => {\n            const valueA = new SearchTemplate(\n                'file',\n                'storedTemplate'\n            ).toJSON();\n            const valueB = new SearchTemplate().file('storedTemplate').toJSON();\n            expect(valueA).toEqual(valueB);\n            expect(valueA).toEqual({ file: 'storedTemplate' });\n        });\n\n        test('sets id via constructor', () => {\n            const valueA = new SearchTemplate('id', 'indexedTemplate').toJSON();\n            const valueB = new SearchTemplate().id('indexedTemplate').toJSON();\n            expect(valueA).toEqual(valueB);\n            expect(valueA).toEqual({ id: 'indexedTemplate' });\n        });\n\n        test('throws error for invalid script type', () => {\n            expect(\n                () => new SearchTemplate('invalid_script_type', 'src')\n            ).toThrow(\n                new Error(\n                    '`type` must be one of `inline`, `id`, `indexed`, `file`'\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets inline option', () => {\n            const result = searchTemplate()\n                .inline({\n                    query: matchQuery('{{my_field}}', '{{my_value}}'),\n                    size: '{{my_size}}'\n                })\n                .toJSON();\n            const expected = {\n                inline: {\n                    query: { match: { '{{my_field}}': '{{my_value}}' } },\n                    size: '{{my_size}}'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets file option', () => {\n            const result = searchTemplate().file('storedTemplate').toJSON();\n            const expected = { file: 'storedTemplate' };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets id option', () => {\n            const result = searchTemplate().id('indexedTemplate').toJSON();\n            const expected = { id: 'indexedTemplate' };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets params option', () => {\n            const result = searchTemplate()\n                .params({\n                    my_field: 'message',\n                    my_value: 'some message',\n                    my_size: 5\n                })\n                .toJSON();\n            const expected = {\n                params: {\n                    my_field: 'message',\n                    my_value: 'some message',\n                    my_size: 5\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets indexed option (maps to id)', () => {\n            const result = searchTemplate().indexed('indexedTemplate').toJSON();\n            const expected = { id: 'indexedTemplate' };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('mixed representation', () => {\n        let spy;\n\n        beforeEach(() => {\n            spy = vi.spyOn(console, 'warn').mockImplementation(() => {});\n        });\n\n        afterEach(() => {\n            spy.mockRestore();\n        });\n\n        test('logs warnings when overwriting template source', () => {\n            const value = new SearchTemplate()\n                .file('storedTemplate')\n                .id('indexedTemplate')\n                .toJSON();\n            const expected = {\n                id: 'indexedTemplate'\n            };\n            expect(value).toEqual(expected);\n\n            expect(spy).toHaveBeenCalledTimes(2);\n            expect(spy).toHaveBeenNthCalledWith(\n                1,\n                '[SearchTemplate] Search template source(`inline`/`id`/`file`) was already specified!'\n            );\n            expect(spy).toHaveBeenNthCalledWith(\n                2,\n                '[SearchTemplate] Overwriting.'\n            );\n        });\n    });\n\n    describe('toJSON', () => {\n        test('can handle elastic-builder objs', () => {\n            const value = new SearchTemplate(\n                'inline',\n                '{ \"query\": { \"bool\": { \"must\": {{#toJson}}clauses{{/toJson}} } } }'\n            )\n                .params({\n                    clauses: [\n                        termQuery('user', 'foo'),\n                        termQuery('user', 'bar')\n                    ]\n                })\n                .toJSON();\n            const expected = {\n                inline: '{ \"query\": { \"bool\": { \"must\": {{#toJson}}clauses{{/toJson}} } } }',\n                params: {\n                    clauses: [\n                        { term: { user: 'foo' } },\n                        { term: { user: 'bar' } }\n                    ]\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/sort.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { Sort, BoolQuery, TermQuery, Script } from '../../src';\n\nconst getInstance = order => new Sort('my_field', order);\n\ndescribe('Sort', () => {\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('nestedFilter()', () => {\n                expect(() => new Sort().nestedFilter(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n\n            test('script()', () => {\n                expect(() => new Sort().script(value)).toThrow(\n                    new TypeError('Argument must be an instance of Script')\n                );\n            });\n        });\n    });\n\n    describe('order() validation', () => {\n        test.each([\n            { name: 'accepts valid order: asc', value: 'asc' },\n            {\n                name: 'accepts valid order: ASC (case-insensitive)',\n                value: 'ASC'\n            },\n            { name: 'accepts valid order: desc', value: 'desc' },\n            {\n                name: 'accepts valid order: DESC (case-insensitive)',\n                value: 'DESC'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().order(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null order', value: null },\n            { name: 'throws for invalid order', value: 'invalid_order' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().order(value)).toThrow(\n                new Error(\n                    \"The 'order' parameter should be one of 'asc' or 'desc'\"\n                )\n            );\n        });\n    });\n\n    describe('mode() validation', () => {\n        test.each([\n            { name: 'accepts valid mode: avg', value: 'avg' },\n            {\n                name: 'accepts valid mode: AVG (case-insensitive)',\n                value: 'AVG'\n            },\n            { name: 'accepts valid mode: min', value: 'min' },\n            {\n                name: 'accepts valid mode: MIN (case-insensitive)',\n                value: 'MIN'\n            },\n            { name: 'accepts valid mode: max', value: 'max' },\n            {\n                name: 'accepts valid mode: MAX (case-insensitive)',\n                value: 'MAX'\n            },\n            { name: 'accepts valid mode: sum', value: 'sum' },\n            {\n                name: 'accepts valid mode: SUM (case-insensitive)',\n                value: 'SUM'\n            },\n            { name: 'accepts valid mode: median', value: 'median' },\n            {\n                name: 'accepts valid mode: MEDIAN (case-insensitive)',\n                value: 'MEDIAN'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().mode(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null mode', value: null },\n            { name: 'throws for invalid mode', value: 'invalid_mode' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().mode(value)).toThrow(\n                /The 'mode' parameter should be one of/\n            );\n        });\n    });\n\n    describe('distanceType() validation', () => {\n        test.each([\n            { name: 'accepts valid distanceType: plane', value: 'plane' },\n            {\n                name: 'accepts valid distanceType: PLANE (case-insensitive)',\n                value: 'PLANE'\n            },\n            { name: 'accepts valid distanceType: arc', value: 'arc' },\n            {\n                name: 'accepts valid distanceType: ARC (case-insensitive)',\n                value: 'ARC'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().distanceType(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null distanceType', value: null },\n            {\n                name: 'throws for invalid distanceType',\n                value: 'invalid_distance_type'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().distanceType(value)).toThrow(\n                new Error(\n                    \"The 'distance_type' parameter should be one of 'plane' or 'arc'\"\n                )\n            );\n        });\n    });\n\n    describe('unit() validation', () => {\n        test.each([\n            { name: 'accepts valid unit: in', value: 'in' },\n            { name: 'accepts valid unit: inch', value: 'inch' },\n            { name: 'accepts valid unit: yd', value: 'yd' },\n            { name: 'accepts valid unit: yards', value: 'yards' },\n            { name: 'accepts valid unit: ft', value: 'ft' },\n            { name: 'accepts valid unit: feet', value: 'feet' },\n            { name: 'accepts valid unit: km', value: 'km' },\n            { name: 'accepts valid unit: kilometers', value: 'kilometers' },\n            { name: 'accepts valid unit: NM', value: 'NM' },\n            { name: 'accepts valid unit: nmi', value: 'nmi' },\n            {\n                name: 'accepts valid unit: nauticalmiles',\n                value: 'nauticalmiles'\n            },\n            { name: 'accepts valid unit: mm', value: 'mm' },\n            { name: 'accepts valid unit: millimeters', value: 'millimeters' },\n            { name: 'accepts valid unit: cm', value: 'cm' },\n            { name: 'accepts valid unit: centimeters', value: 'centimeters' },\n            { name: 'accepts valid unit: mi', value: 'mi' },\n            { name: 'accepts valid unit: miles', value: 'miles' },\n            { name: 'accepts valid unit: m', value: 'm' },\n            { name: 'accepts valid unit: meters', value: 'meters' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().unit(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null unit', value: null },\n            { name: 'throws for invalid unit', value: 'invalid_unit' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().unit(value)).toThrow(\n                /The 'unit' parameter should be one of/\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets nestedPath option', () => {\n            const result = getInstance().nestedPath('offer').toJSON();\n            const expected = {\n                my_field: { nested_path: 'offer' }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets nestedFilter option', () => {\n            const filterQry = new BoolQuery()\n                .filter(new TermQuery('user', 'Idan'))\n                .filter(new TermQuery('level', 'INFO'));\n            const result = getInstance().nestedFilter(filterQry).toJSON();\n            const expected = {\n                my_field: { nested_filter: filterQry.toJSON() }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets missing option', () => {\n            const result = getInstance().missing('_last').toJSON();\n            const expected = {\n                my_field: { missing: '_last' }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets unmappedType option', () => {\n            const result = getInstance().unmappedType('long').toJSON();\n            const expected = {\n                my_field: { unmapped_type: 'long' }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets reverse option', () => {\n            const result = getInstance().reverse(true).toJSON();\n            const expected = {\n                my_field: { reverse: true }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets nested option with filter and path', () => {\n            const filterQry = new BoolQuery()\n                .filter(new TermQuery('user', 'Idan'))\n                .filter(new TermQuery('level', 'INFO'));\n            const result = getInstance()\n                .nested({\n                    filter: filterQry,\n                    path: 'offer'\n                })\n                .toJSON();\n            const expected = {\n                my_field: {\n                    nested: {\n                        filter: filterQry.toJSON(),\n                        path: 'offer'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets nested option with path only', () => {\n            const result = getInstance()\n                .nested({\n                    path: 'offer'\n                })\n                .toJSON();\n            const expected = {\n                my_field: {\n                    nested: {\n                        path: 'offer'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('toJSON', () => {\n        test('super simple sort', () => {\n            expect(getInstance().toJSON()).toBe('my_field');\n        });\n\n        test('sets ordered', () => {\n            const value = getInstance('desc').toJSON();\n            const expected = { my_field: 'desc' };\n            expect(value).toEqual(expected);\n        });\n\n        test('_geo_distance sort', () => {\n            const value = getInstance('asc')\n                .geoDistance([-70, 40])\n                .unit('km')\n                .mode('min')\n                .distanceType('arc')\n                .toJSON();\n            const expected = {\n                _geo_distance: {\n                    my_field: [-70, 40],\n                    order: 'asc',\n                    unit: 'km',\n                    mode: 'min',\n                    distance_type: 'arc'\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('_script sort', () => {\n            const value = new Sort()\n                .order('asc')\n                .script(\n                    new Script(\n                        'inline',\n                        \"doc['field_name'].value * params.factor\"\n                    )\n                        .lang('painless')\n                        .params({ factor: 1.1 })\n                )\n                .type('number')\n                .toJSON();\n            const expected = {\n                _script: {\n                    type: 'number',\n                    script: {\n                        lang: 'painless',\n                        inline: \"doc['field_name'].value * params.factor\",\n                        params: {\n                            factor: 1.1\n                        }\n                    },\n                    order: 'asc'\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('_format sort', () => {\n            const value = new Sort('date_field')\n                .order('asc')\n                .format('epoch_millis')\n                .toJSON();\n            const expected = {\n                date_field: {\n                    order: 'asc',\n                    format: 'epoch_millis'\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/suggester.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { Suggester } from '../../src/core';\n\nconst getInstance = field => new Suggester('my_type', 'my_suggester', field);\n\ndescribe('Suggester', () => {\n    describe('constructor', () => {\n        test('aggType cannot be empty', () => {\n            expect(() => new Suggester()).toThrow(\n                new Error('Suggester `suggesterType` cannot be empty')\n            );\n        });\n\n        test('name cannot be empty', () => {\n            expect(() => new Suggester('my_type')).toThrow(\n                new Error('Suggester `name` cannot be empty')\n            );\n        });\n\n        test('can be instantiated', () => {\n            const value = getInstance().toJSON();\n            const expected = {\n                my_suggester: {\n                    my_type: {}\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets field', () => {\n            const value = getInstance('my_field').toJSON();\n            const expected = {\n                my_suggester: {\n                    my_type: { field: 'my_field' }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('options', () => {\n        test('sets field option', () => {\n            const result = getInstance().field('my_field').toJSON();\n            const expected = {\n                my_suggester: {\n                    my_type: { field: 'my_field' }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets size option', () => {\n            const result = getInstance().size(5).toJSON();\n            const expected = {\n                my_suggester: {\n                    my_type: { size: 5 }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/core-test/util.test.js",
    "content": "import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest';\nimport { util } from '../../src/core';\n\ndescribe('util', () => {\n    describe('checkType', () => {\n        let spy;\n\n        beforeEach(() => {\n            spy = vi.spyOn(console, 'warn').mockImplementation(() => {});\n        });\n\n        afterEach(() => {\n            spy.mockRestore();\n        });\n\n        test('does not throw for valid instance', () => {\n            class TestClass {}\n            const instance = new TestClass();\n            expect(() => util.checkType(instance, TestClass)).not.toThrow();\n        });\n\n        test('throws TypeError for null value', () => {\n            class TestClass {}\n            expect(() => util.checkType(null, TestClass)).toThrow(\n                new TypeError('Argument must be an instance of TestClass')\n            );\n            expect(spy).toHaveBeenCalledTimes(1);\n            expect(spy).toHaveBeenCalledWith(\n                'Was expecting instance of TestClass but got null!'\n            );\n        });\n\n        test('throws TypeError for undefined value', () => {\n            class TestClass {}\n            expect(() => util.checkType(undefined, TestClass)).toThrow(\n                new TypeError('Argument must be an instance of TestClass')\n            );\n            expect(spy).toHaveBeenCalledTimes(1);\n            expect(spy).toHaveBeenCalledWith(\n                'Was expecting instance of TestClass but got undefined!'\n            );\n        });\n\n        test('throws TypeError for wrong instance type', () => {\n            class TestClass {}\n            class OtherClass {}\n            const instance = new OtherClass();\n            expect(() => util.checkType(instance, TestClass)).toThrow(\n                new TypeError('Argument must be an instance of TestClass')\n            );\n            expect(spy).toHaveBeenCalledTimes(1);\n        });\n\n        test('throws TypeError for primitive value', () => {\n            class TestClass {}\n            expect(() => util.checkType('string', TestClass)).toThrow(\n                new TypeError('Argument must be an instance of TestClass')\n            );\n            expect(spy).toHaveBeenCalledTimes(1);\n        });\n    });\n\n    describe('constructorWrapper', () => {\n        test('creates wrapper function for class constructor', () => {\n            class TestClass {\n                constructor(arg1, arg2) {\n                    this.arg1 = arg1;\n                    this.arg2 = arg2;\n                }\n            }\n\n            const wrapper = util.constructorWrapper(TestClass);\n            const instance = wrapper('value1', 'value2');\n\n            expect(instance).toBeInstanceOf(TestClass);\n            expect(instance.arg1).toBe('value1');\n            expect(instance.arg2).toBe('value2');\n        });\n\n        test('wrapper works with no arguments', () => {\n            class TestClass {\n                constructor() {\n                    this.initialized = true;\n                }\n            }\n\n            const wrapper = util.constructorWrapper(TestClass);\n            const instance = wrapper();\n\n            expect(instance).toBeInstanceOf(TestClass);\n            expect(instance.initialized).toBe(true);\n        });\n    });\n\n    describe('firstDigitPos', () => {\n        test.each([\n            { name: 'returns -1 for empty string', input: '', expected: -1 },\n            {\n                name: 'returns -1 for string with no digits',\n                input: 'no-digits-in-string',\n                expected: -1\n            },\n            {\n                name: 'returns 0 for string starting with digit',\n                input: '123abc',\n                expected: 0\n            },\n            {\n                name: 'returns correct index for digit in middle',\n                input: 'abc123def',\n                expected: 3\n            },\n            {\n                name: 'returns correct index for digit at end',\n                input: 'abc1',\n                expected: 3\n            },\n            {\n                name: 'finds first digit when multiple digits present',\n                input: 'field_name_1_test_2',\n                expected: 11\n            }\n        ])('$name', ({ input, expected }) => {\n            expect(util.firstDigitPos(input)).toBe(expected);\n        });\n    });\n\n    describe('invalidParam', () => {\n        let consoleLogSpy;\n        let consoleWarnSpy;\n\n        beforeEach(() => {\n            consoleLogSpy = vi\n                .spyOn(console, 'log')\n                .mockImplementation(() => {});\n            consoleWarnSpy = vi\n                .spyOn(console, 'warn')\n                .mockImplementation(() => {});\n        });\n\n        afterEach(() => {\n            consoleLogSpy.mockRestore();\n            consoleWarnSpy.mockRestore();\n        });\n\n        test('creates error function with string valid values', () => {\n            const errorFn = util.invalidParam(\n                'http://example.com',\n                'testParam',\n                'value1, value2'\n            );\n\n            expect(() => errorFn('invalidValue')).toThrow(\n                new Error(\n                    \"The 'testParam' parameter should be one of value1, value2\"\n                )\n            );\n            expect(consoleLogSpy).toHaveBeenCalledWith(\n                'See http://example.com'\n            );\n            expect(consoleWarnSpy).toHaveBeenCalledWith(\n                \"Got 'testParam' - 'invalidValue'\"\n            );\n        });\n\n        test('creates error function with non-string valid values', () => {\n            const validValues = new Set(['value1', 'value2']);\n            const errorFn = util.invalidParam(\n                'http://example.com',\n                'testParam',\n                validValues\n            );\n\n            expect(() => errorFn('invalidValue')).toThrow(\n                expect.objectContaining({\n                    message: expect.stringContaining(\n                        \"The 'testParam' parameter should be one of\"\n                    )\n                })\n            );\n        });\n\n        test('supports custom reference URL', () => {\n            const errorFn = util.invalidParam(\n                'http://default.com',\n                'testParam',\n                'value1'\n            );\n\n            expect(() =>\n                errorFn('invalidValue', 'http://custom.com')\n            ).toThrow();\n            expect(consoleLogSpy).toHaveBeenCalledWith('See http://custom.com');\n        });\n\n        test('handles null reference URL', () => {\n            const errorFn = util.invalidParam(null, 'testParam', 'value1');\n\n            expect(() => errorFn('invalidValue')).toThrow(\n                new Error(\"The 'testParam' parameter should be one of value1\")\n            );\n            expect(consoleLogSpy).not.toHaveBeenCalled();\n        });\n    });\n\n    describe('setDefault', () => {\n        test('sets default value when key does not exist', () => {\n            const obj = { existing: 'value' };\n            const result = util.setDefault(obj, 'newKey', 'newValue');\n\n            expect(result).toBe(true);\n            expect(obj.newKey).toBe('newValue');\n        });\n\n        test('does not set value when key already exists', () => {\n            const obj = { existing: 'originalValue' };\n            const result = util.setDefault(obj, 'existing', 'newValue');\n\n            expect(result).toBe(false);\n            expect(obj.existing).toBe('originalValue');\n        });\n\n        test('sets default for undefined value', () => {\n            const obj = { existing: undefined };\n            const result = util.setDefault(obj, 'existing', 'newValue');\n\n            expect(result).toBe(false);\n            expect(obj.existing).toBe(undefined);\n        });\n\n        test('sets default for null value', () => {\n            const obj = { existing: null };\n            const result = util.setDefault(obj, 'existing', 'newValue');\n\n            expect(result).toBe(false);\n            expect(obj.existing).toBe(null);\n        });\n    });\n});\n"
  },
  {
    "path": "test/index.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport * as esb from '../src';\n\ndescribe('RequestBodySearch', () => {\n    const exports = [\n        { name: 'RequestBodySearch', value: esb.RequestBodySearch },\n        { name: 'requestBodySearch', value: esb.requestBodySearch }\n    ];\n\n    exports.forEach(tc => {\n        test(`${tc.name} is exported`, () => {\n            expect(tc.value).toBeTruthy();\n        });\n    });\n});\n\ndescribe('Queries', () => {\n    const queryExports = [\n        // Basic Queries\n        { Class: 'MatchAllQuery', factory: 'matchAllQuery' },\n        { Class: 'MatchNoneQuery', factory: 'matchNoneQuery' },\n        // Full Text Queries\n        { Class: 'MatchQuery', factory: 'matchQuery' },\n        { Class: 'MatchPhraseQuery', factory: 'matchPhraseQuery' },\n        { Class: 'MatchPhrasePrefixQuery', factory: 'matchPhrasePrefixQuery' },\n        { Class: 'MultiMatchQuery', factory: 'multiMatchQuery' },\n        { Class: 'CommonTermsQuery', factory: 'commonTermsQuery' },\n        { Class: 'QueryStringQuery', factory: 'queryStringQuery' },\n        { Class: 'SimpleQueryStringQuery', factory: 'simpleQueryStringQuery' },\n        // Term Level Queries\n        { Class: 'TermQuery', factory: 'termQuery' },\n        { Class: 'TermsQuery', factory: 'termsQuery' },\n        { Class: 'TermsSetQuery', factory: 'termsSetQuery' },\n        { Class: 'RangeQuery', factory: 'rangeQuery' },\n        { Class: 'ExistsQuery', factory: 'existsQuery' },\n        { Class: 'PrefixQuery', factory: 'prefixQuery' },\n        { Class: 'WildcardQuery', factory: 'wildcardQuery' },\n        { Class: 'RegexpQuery', factory: 'regexpQuery' },\n        { Class: 'FuzzyQuery', factory: 'fuzzyQuery' },\n        { Class: 'TypeQuery', factory: 'typeQuery' },\n        { Class: 'IdsQuery', factory: 'idsQuery' },\n        // Compound Queries\n        { Class: 'ConstantScoreQuery', factory: 'constantScoreQuery' },\n        { Class: 'BoolQuery', factory: 'boolQuery' },\n        { Class: 'DisMaxQuery', factory: 'disMaxQuery' },\n        { Class: 'FunctionScoreQuery', factory: 'functionScoreQuery' },\n        { Class: 'BoostingQuery', factory: 'boostingQuery' },\n        // Joining Queries\n        { Class: 'NestedQuery', factory: 'nestedQuery' },\n        { Class: 'HasChildQuery', factory: 'hasChildQuery' },\n        { Class: 'HasParentQuery', factory: 'hasParentQuery' },\n        { Class: 'ParentIdQuery', factory: 'parentIdQuery' },\n        // Geo Queries\n        { Class: 'GeoShapeQuery', factory: 'geoShapeQuery' },\n        { Class: 'GeoBoundingBoxQuery', factory: 'geoBoundingBoxQuery' },\n        { Class: 'GeoDistanceQuery', factory: 'geoDistanceQuery' },\n        { Class: 'GeoPolygonQuery', factory: 'geoPolygonQuery' },\n        // Specialized Queries\n        { Class: 'MoreLikeThisQuery', factory: 'moreLikeThisQuery' },\n        { Class: 'ScriptQuery', factory: 'scriptQuery' },\n        { Class: 'ScriptScoreQuery', factory: 'scriptScoreQuery' },\n        { Class: 'PercolateQuery', factory: 'percolateQuery' },\n        { Class: 'DistanceFeatureQuery', factory: 'distanceFeatureQuery' },\n        // Span Queries\n        { Class: 'SpanTermQuery', factory: 'spanTermQuery' },\n        { Class: 'SpanMultiTermQuery', factory: 'spanMultiTermQuery' },\n        { Class: 'SpanFirstQuery', factory: 'spanFirstQuery' },\n        { Class: 'SpanNearQuery', factory: 'spanNearQuery' },\n        { Class: 'SpanOrQuery', factory: 'spanOrQuery' },\n        { Class: 'SpanNotQuery', factory: 'spanNotQuery' },\n        { Class: 'SpanContainingQuery', factory: 'spanContainingQuery' },\n        { Class: 'SpanWithinQuery', factory: 'spanWithinQuery' },\n        { Class: 'SpanFieldMaskingQuery', factory: 'spanFieldMaskingQuery' },\n        // Vector Queries\n        { Class: 'SparseVectorQuery', factory: 'sparseVectorQuery' },\n        { Class: 'SemanticQuery', factory: 'semanticQuery' }\n    ];\n\n    queryExports.forEach(tc => {\n        test(`${tc.Class} is exported`, () => {\n            expect(esb[tc.Class]).toBeTruthy();\n        });\n\n        test(`${tc.factory} factory is exported`, () => {\n            const factoryResult = esb[tc.factory]();\n            expect(factoryResult).toBeTruthy();\n        });\n    });\n});\n\ndescribe('Aggregations', () => {\n    const aggregationExports = [\n        // Metrics Aggregations\n        { Class: 'AvgAggregation', factory: 'avgAggregation' },\n        { Class: 'CardinalityAggregation', factory: 'cardinalityAggregation' },\n        {\n            Class: 'ExtendedStatsAggregation',\n            factory: 'extendedStatsAggregation'\n        },\n        { Class: 'GeoBoundsAggregation', factory: 'geoBoundsAggregation' },\n        { Class: 'GeoCentroidAggregation', factory: 'geoCentroidAggregation' },\n        { Class: 'MaxAggregation', factory: 'maxAggregation' },\n        { Class: 'MinAggregation', factory: 'minAggregation' },\n        { Class: 'PercentilesAggregation', factory: 'percentilesAggregation' },\n        {\n            Class: 'PercentileRanksAggregation',\n            factory: 'percentileRanksAggregation'\n        },\n        {\n            Class: 'ScriptedMetricAggregation',\n            factory: 'scriptedMetricAggregation'\n        },\n        { Class: 'StatsAggregation', factory: 'statsAggregation' },\n        { Class: 'SumAggregation', factory: 'sumAggregation' },\n        { Class: 'TopHitsAggregation', factory: 'topHitsAggregation' },\n        { Class: 'ValueCountAggregation', factory: 'valueCountAggregation' },\n        // Bucket Aggregations\n        {\n            Class: 'AdjacencyMatrixAggregation',\n            factory: 'adjacencyMatrixAggregation'\n        },\n        { Class: 'ChildrenAggregation', factory: 'childrenAggregation' },\n        { Class: 'CompositeAggregation', factory: 'compositeAggregation' },\n        {\n            Class: 'DateHistogramAggregation',\n            factory: 'dateHistogramAggregation'\n        },\n        {\n            Class: 'AutoDateHistogramAggregation',\n            factory: 'autoDateHistogramAggregation'\n        },\n        {\n            Class: 'VariableWidthHistogramAggregation',\n            factory: 'variableWidthHistogramAggregation'\n        },\n        { Class: 'DateRangeAggregation', factory: 'dateRangeAggregation' },\n        {\n            Class: 'DiversifiedSamplerAggregation',\n            factory: 'diversifiedSamplerAggregation'\n        },\n        { Class: 'FilterAggregation', factory: 'filterAggregation' },\n        { Class: 'FiltersAggregation', factory: 'filtersAggregation' },\n        { Class: 'GeoDistanceAggregation', factory: 'geoDistanceAggregation' },\n        { Class: 'GeoHashGridAggregation', factory: 'geoHashGridAggregation' },\n        { Class: 'GeoHexGridAggregation', factory: 'geoHexGridAggregation' },\n        { Class: 'GeoTileGridAggregation', factory: 'geoTileGridAggregation' },\n        { Class: 'GlobalAggregation', factory: 'globalAggregation' },\n        { Class: 'HistogramAggregation', factory: 'histogramAggregation' },\n        { Class: 'IpRangeAggregation', factory: 'ipRangeAggregation' },\n        { Class: 'MissingAggregation', factory: 'missingAggregation' },\n        { Class: 'NestedAggregation', factory: 'nestedAggregation' },\n        { Class: 'ParentAggregation', factory: 'parentAggregation' },\n        { Class: 'RangeAggregation', factory: 'rangeAggregation' },\n        { Class: 'RareTermsAggregation', factory: 'rareTermsAggregation' },\n        {\n            Class: 'ReverseNestedAggregation',\n            factory: 'reverseNestedAggregation'\n        },\n        { Class: 'SamplerAggregation', factory: 'samplerAggregation' },\n        {\n            Class: 'SignificantTermsAggregation',\n            factory: 'significantTermsAggregation'\n        },\n        {\n            Class: 'SignificantTextAggregation',\n            factory: 'significantTextAggregation'\n        },\n        { Class: 'TermsAggregation', factory: 'termsAggregation' },\n        // Pipeline Aggregations\n        { Class: 'AvgBucketAggregation', factory: 'avgBucketAggregation' },\n        { Class: 'DerivativeAggregation', factory: 'derivativeAggregation' },\n        { Class: 'MaxBucketAggregation', factory: 'maxBucketAggregation' },\n        { Class: 'MinBucketAggregation', factory: 'minBucketAggregation' },\n        { Class: 'SumBucketAggregation', factory: 'sumBucketAggregation' },\n        { Class: 'StatsBucketAggregation', factory: 'statsBucketAggregation' },\n        {\n            Class: 'ExtendedStatsBucketAggregation',\n            factory: 'extendedStatsBucketAggregation'\n        },\n        {\n            Class: 'PercentilesBucketAggregation',\n            factory: 'percentilesBucketAggregation'\n        },\n        {\n            Class: 'MovingAverageAggregation',\n            factory: 'movingAverageAggregation'\n        },\n        {\n            Class: 'MovingFunctionAggregation',\n            factory: 'movingFunctionAggregation'\n        },\n        {\n            Class: 'CumulativeSumAggregation',\n            factory: 'cumulativeSumAggregation'\n        },\n        {\n            Class: 'BucketScriptAggregation',\n            factory: 'bucketScriptAggregation'\n        },\n        {\n            Class: 'BucketSelectorAggregation',\n            factory: 'bucketSelectorAggregation'\n        },\n        { Class: 'BucketSortAggregation', factory: 'bucketSortAggregation' },\n        {\n            Class: 'SerialDifferencingAggregation',\n            factory: 'serialDifferencingAggregation'\n        },\n        // Matrix Aggregations\n        { Class: 'MatrixStatsAggregation', factory: 'matrixStatsAggregation' }\n    ];\n\n    aggregationExports.forEach(tc => {\n        test(`${tc.Class} is exported`, () => {\n            expect(esb[tc.Class]).toBeTruthy();\n        });\n\n        test(`${tc.factory} factory is exported`, () => {\n            expect(esb[tc.factory]).toBeTruthy();\n        });\n    });\n});\n\ndescribe('Composite Aggregation values sources', () => {\n    const valuesSources = [\n        { Class: 'TermsValuesSource', factory: 'termsValuesSource' },\n        { Class: 'HistogramValuesSource', factory: 'histogramValuesSource' },\n        {\n            Class: 'DateHistogramValuesSource',\n            factory: 'dateHistogramValuesSource'\n        }\n    ];\n\n    const { CompositeAggregation } = esb;\n\n    valuesSources.forEach(tc => {\n        test(`${tc.Class} is exported`, () => {\n            expect(CompositeAggregation[tc.Class]).toBeTruthy();\n        });\n\n        test(`${tc.factory} factory is exported`, () => {\n            expect(CompositeAggregation[tc.factory]).toBeTruthy();\n        });\n    });\n});\n\ndescribe('Suggesters', () => {\n    const suggesterExports = [\n        { Class: 'TermSuggester', factory: 'termSuggester' },\n        { Class: 'DirectGenerator', factory: 'directGenerator' },\n        { Class: 'PhraseSuggester', factory: 'phraseSuggester' },\n        { Class: 'CompletionSuggester', factory: 'completionSuggester' }\n    ];\n\n    suggesterExports.forEach(tc => {\n        test(`${tc.Class} is exported`, () => {\n            expect(esb[tc.Class]).toBeTruthy();\n        });\n\n        test(`${tc.factory} factory is exported`, () => {\n            expect(esb[tc.factory]).toBeTruthy();\n        });\n    });\n});\n\ndescribe('Score Functions', () => {\n    const scoreFunctionExports = [\n        { Class: 'ScriptScoreFunction', factory: 'scriptScoreFunction' },\n        { Class: 'WeightScoreFunction', factory: 'weightScoreFunction' },\n        { Class: 'RandomScoreFunction', factory: 'randomScoreFunction' },\n        {\n            Class: 'FieldValueFactorFunction',\n            factory: 'fieldValueFactorFunction'\n        },\n        { Class: 'DecayScoreFunction', factory: 'decayScoreFunction' }\n    ];\n\n    scoreFunctionExports.forEach(tc => {\n        test(`${tc.Class} is exported`, () => {\n            expect(esb[tc.Class]).toBeTruthy();\n        });\n\n        test(`${tc.factory} factory is exported`, () => {\n            expect(esb[tc.factory]).toBeTruthy();\n        });\n    });\n});\n\ndescribe('Miscellaneous', () => {\n    const miscExports = [\n        { Class: 'Highlight', factory: 'highlight' },\n        { Class: 'Script', factory: 'script' },\n        { Class: 'GeoPoint', factory: 'geoPoint' },\n        { Class: 'GeoShape', factory: 'geoShape' },\n        { Class: 'IndexedShape', factory: 'indexedShape' },\n        { Class: 'Sort', factory: 'sort' },\n        { Class: 'Rescore', factory: 'rescore' },\n        { Class: 'InnerHits', factory: 'innerHits' },\n        { Class: 'SearchTemplate', factory: 'searchTemplate' }\n    ];\n\n    miscExports.forEach(tc => {\n        test(`${tc.Class} is exported`, () => {\n            expect(esb[tc.Class]).toBeTruthy();\n        });\n\n        test(`${tc.factory} factory is exported`, () => {\n            expect(esb[tc.factory]).toBeTruthy();\n        });\n    });\n\n    test('recipes is exported', () => {\n        expect(esb.recipes).toBeTruthy();\n    });\n\n    const recipeExports = [\n        { method: 'missingQuery', cook: 'cookMissingQuery' },\n        { method: 'randomSortQuery', cook: 'cookRandomSortQuery' },\n        { method: 'filterQuery', cook: 'cookFilterQuery' }\n    ];\n\n    recipeExports.forEach(tc => {\n        test(`recipes.${tc.method} is exported`, () => {\n            expect(esb.recipes[tc.method]).toBeTruthy();\n        });\n\n        test(`${tc.cook} is exported`, () => {\n            expect(esb[tc.cook]).toBeTruthy();\n        });\n    });\n\n    test('prettyPrint is exported', () => {\n        expect(esb.prettyPrint).toBeTruthy();\n    });\n\n    test('prettyPrint calls toJSON', () => {\n        let toJSONCalled = false;\n        esb.prettyPrint({\n            toJSON() {\n                toJSONCalled = true;\n                return true;\n            }\n        });\n        expect(toJSONCalled).toBe(true);\n    });\n});\n"
  },
  {
    "path": "test/queries-test/bool-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { BoolQuery, boolQuery, TermQuery, MatchQuery } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst termQryA = new TermQuery('user', 'kimchy');\nconst termQryB = new TermQuery('user', 'clint');\nconst matchQryA = new MatchQuery('message', 'this is a test');\nconst matchQryB = new MatchQuery('message', 'this is also a test');\n\ndescribe('BoolQuery', () => {\n    describe('parameter type validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('must()', () => {\n                expect(() => boolQuery().must(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n\n            test('filter()', () => {\n                expect(() => boolQuery().filter(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n\n            test('mustNot()', () => {\n                expect(() => boolQuery().mustNot(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n\n            test('should()', () => {\n                expect(() => boolQuery().should(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n        });\n    });\n\n    describe('array item validation', () => {\n        test('must() accepts valid array', () => {\n            expect(() =>\n                new BoolQuery().must([termQryA, matchQryA])\n            ).not.toThrow();\n        });\n\n        test('must() throws for array with invalid item', () => {\n            expect(() => new BoolQuery().must([termQryA, {}])).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n        });\n\n        test('filter() accepts valid array', () => {\n            expect(() =>\n                new BoolQuery().filter([termQryA, matchQryA])\n            ).not.toThrow();\n        });\n\n        test('filter() throws for array with invalid item', () => {\n            expect(() => new BoolQuery().filter([termQryA, {}])).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n        });\n\n        test('mustNot() accepts valid array', () => {\n            expect(() =>\n                new BoolQuery().mustNot([termQryA, matchQryA])\n            ).not.toThrow();\n        });\n\n        test('mustNot() throws for array with invalid item', () => {\n            expect(() => new BoolQuery().mustNot([termQryA, {}])).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n        });\n\n        test('should() accepts valid array', () => {\n            expect(() =>\n                new BoolQuery().should([matchQryA, matchQryB])\n            ).not.toThrow();\n        });\n\n        test('should() throws for array with invalid item', () => {\n            expect(() => new BoolQuery().should([matchQryA, {}])).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n        });\n    });\n\n    describe('single query options', () => {\n        test('sets must option', () => {\n            const result = boolQuery().must(termQryA).toJSON();\n            const expected = {\n                bool: {\n                    must: recursiveToJSON(termQryA.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets filter option', () => {\n            const result = boolQuery().filter(termQryA).toJSON();\n            const expected = {\n                bool: {\n                    filter: recursiveToJSON(termQryA.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets must_not option', () => {\n            const result = boolQuery().mustNot(termQryA).toJSON();\n            const expected = {\n                bool: {\n                    must_not: recursiveToJSON(termQryA.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets should option', () => {\n            const result = boolQuery().should(matchQryA).toJSON();\n            const expected = {\n                bool: {\n                    should: recursiveToJSON(matchQryA.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('array query options', () => {\n        test('sets must option(arr)', () => {\n            const result = boolQuery().must([termQryA, termQryB]).toJSON();\n            const expected = {\n                bool: {\n                    must: [\n                        recursiveToJSON(termQryA.toJSON()),\n                        recursiveToJSON(termQryB.toJSON())\n                    ]\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets filter option(arr)', () => {\n            const result = boolQuery().filter([termQryA, termQryB]).toJSON();\n            const expected = {\n                bool: {\n                    filter: [\n                        recursiveToJSON(termQryA.toJSON()),\n                        recursiveToJSON(termQryB.toJSON())\n                    ]\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets must_not option(arr)', () => {\n            const result = boolQuery().mustNot([termQryA, termQryB]).toJSON();\n            const expected = {\n                bool: {\n                    must_not: [\n                        recursiveToJSON(termQryA.toJSON()),\n                        recursiveToJSON(termQryB.toJSON())\n                    ]\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets should option(arr)', () => {\n            const result = boolQuery().should([matchQryA, matchQryB]).toJSON();\n            const expected = {\n                bool: {\n                    should: [\n                        recursiveToJSON(matchQryA.toJSON()),\n                        recursiveToJSON(matchQryB.toJSON())\n                    ]\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('boolean options', () => {\n        test('sets disable_coord option', () => {\n            const result = boolQuery().disableCoord(true).toJSON();\n            const expected = { bool: { disable_coord: true } };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets minimum_should_match option', () => {\n            const result = boolQuery().minimumShouldMatch(1).toJSON();\n            const expected = { bool: { minimum_should_match: 1 } };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets adjust_pure_negative option', () => {\n            const result = boolQuery().adjustPureNegative(true).toJSON();\n            const expected = { bool: { adjust_pure_negative: true } };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/boosting-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { BoostingQuery, boostingQuery, MatchQuery } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst matchQryA = new MatchQuery('message', 'this is a test');\nconst matchQryB = new MatchQuery('message', 'this is also a test');\n\ndescribe('BoostingQuery', () => {\n    describe('parameter type validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('positive()', () => {\n                const instance = boostingQuery();\n                expect(() => instance.positive(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n\n            test('negative()', () => {\n                const instance = boostingQuery();\n                expect(() => instance.negative(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets positive option', () => {\n            const result = boostingQuery().positive(matchQryA).toJSON();\n            const expected = {\n                boosting: {\n                    positive: recursiveToJSON(matchQryA.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets negative option', () => {\n            const result = boostingQuery().negative(matchQryA).toJSON();\n            const expected = {\n                boosting: {\n                    negative: recursiveToJSON(matchQryA.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets negative_boost option', () => {\n            const result = boostingQuery().negativeBoost(0.4).toJSON();\n            const expected = {\n                boosting: {\n                    negative_boost: 0.4\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = new BoostingQuery(\n                matchQryA,\n                matchQryB,\n                0.4\n            ).toJSON();\n            const valueB = new BoostingQuery()\n                .positive(matchQryA)\n                .negative(matchQryB)\n                .negativeBoost(0.4)\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                boosting: {\n                    positive: {\n                        match: { message: 'this is a test' }\n                    },\n                    negative: {\n                        match: { message: 'this is also a test' }\n                    },\n                    negative_boost: 0.4\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/combined-fields-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { CombinedFieldsQuery } from '../../src';\n\nconst getInstance = (fields, queryStr) =>\n    new CombinedFieldsQuery(fields, queryStr);\n\ndescribe('CombinedFieldsQuery', () => {\n    describe('operator() validation', () => {\n        test.each([\n            { name: 'accepts valid value: and', value: 'and' },\n            {\n                name: 'accepts valid value: AND (case-insensitive)',\n                value: 'AND'\n            },\n            { name: 'accepts valid value: or', value: 'or' },\n            { name: 'accepts valid value: OR (case-insensitive)', value: 'OR' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().operator(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_operator' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().operator(value)).toThrow(\n                new Error(\n                    \"The 'operator' parameter should be one of 'and' or 'or'\"\n                )\n            );\n        });\n    });\n\n    describe('zeroTermsQuery() validation', () => {\n        test.each([\n            { name: 'accepts valid value: all', value: 'all' },\n            {\n                name: 'accepts valid value: ALL (case-insensitive)',\n                value: 'ALL'\n            },\n            { name: 'accepts valid value: none', value: 'none' },\n            {\n                name: 'accepts valid value: NONE (case-insensitive)',\n                value: 'NONE'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().zeroTermsQuery(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            {\n                name: 'throws for invalid value',\n                value: 'invalid_zero_terms_query'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().zeroTermsQuery(value)).toThrow(\n                new Error(\n                    \"The 'zero_terms_query' parameter should be one of 'all' or 'none'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets field option', () => {\n            const result = getInstance().field('my_field').toJSON();\n            const expected = {\n                combined_fields: {\n                    fields: ['my_field']\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets fields option', () => {\n            const result = getInstance()\n                .fields(['my_field_a', 'my_field_b'])\n                .toJSON();\n            const expected = {\n                combined_fields: {\n                    fields: ['my_field_a', 'my_field_b']\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets auto_generate_synonyms_phrase_query option', () => {\n            const result = getInstance()\n                .autoGenerateSynonymsPhraseQuery(true)\n                .toJSON();\n            const expected = {\n                combined_fields: {\n                    fields: [],\n                    auto_generate_synonyms_phrase_query: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments with string field', () => {\n            const valueA = getInstance('my_field', 'query str').toJSON();\n            const valueB = getInstance()\n                .field('my_field')\n                .query('query str')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                combined_fields: {\n                    fields: ['my_field'],\n                    query: 'query str'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n\n        test('constructor sets arguments with array fields', () => {\n            const valueA = getInstance(\n                ['my_field_a', 'my_field_b'],\n                'query str'\n            ).toJSON();\n            const valueB = getInstance()\n                .fields(['my_field_a', 'my_field_b'])\n                .query('query str')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const valueC = getInstance()\n                .field('my_field_a')\n                .field('my_field_b')\n                .query('query str')\n                .toJSON();\n            expect(valueA).toEqual(valueC);\n\n            const expected = {\n                combined_fields: {\n                    fields: ['my_field_a', 'my_field_b'],\n                    query: 'query str'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/common-terms-query.test.js",
    "content": "import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest';\nimport { CommonTermsQuery } from '../../src';\n\nconst getInstance = () => new CommonTermsQuery('my_field', 'query str');\n\ndescribe('CommonTermsQuery', () => {\n    describe('lowFreqOperator() validation', () => {\n        test.each([\n            { name: 'accepts valid value: and', value: 'and' },\n            {\n                name: 'accepts valid value: AND (case-insensitive)',\n                value: 'AND'\n            },\n            { name: 'accepts valid value: or', value: 'or' },\n            { name: 'accepts valid value: OR (case-insensitive)', value: 'OR' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().lowFreqOperator(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            {\n                name: 'throws for invalid value',\n                value: 'invalid_low_freq_operator'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().lowFreqOperator(value)).toThrow(\n                new Error(\n                    \"The 'low_freq_operator' parameter should be one of 'and' or 'or'\"\n                )\n            );\n        });\n    });\n\n    describe('highFreqOperator() validation', () => {\n        test.each([\n            { name: 'accepts valid value: and', value: 'and' },\n            {\n                name: 'accepts valid value: AND (case-insensitive)',\n                value: 'AND'\n            },\n            { name: 'accepts valid value: or', value: 'or' },\n            { name: 'accepts valid value: OR (case-insensitive)', value: 'OR' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().highFreqOperator(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            {\n                name: 'throws for invalid value',\n                value: 'invalid_high_freq_operator'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().highFreqOperator(value)).toThrow(\n                new Error(\n                    \"The 'high_freq_operator' parameter should be one of 'and' or 'or'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets cutoff_frequency option', () => {\n            const result = getInstance().cutoffFrequency(10).toJSON();\n            const expected = {\n                common: {\n                    my_field: {\n                        query: 'query str',\n                        cutoff_frequency: 10\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets low_freq_operator option', () => {\n            const result = getInstance().lowFreqOperator('and').toJSON();\n            const expected = {\n                common: {\n                    my_field: {\n                        query: 'query str',\n                        low_freq_operator: 'and'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets high_freq_operator option', () => {\n            const result = getInstance().highFreqOperator('and').toJSON();\n            const expected = {\n                common: {\n                    my_field: {\n                        query: 'query str',\n                        high_freq_operator: 'and'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets low_freq via minimum_should_match', () => {\n            const result = getInstance().lowFreq('30%').toJSON();\n            const expected = {\n                common: {\n                    my_field: {\n                        query: 'query str',\n                        minimum_should_match: { low_freq: '30%' }\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets high_freq via minimum_should_match', () => {\n            const result = getInstance().highFreq('30%').toJSON();\n            const expected = {\n                common: {\n                    my_field: {\n                        query: 'query str',\n                        minimum_should_match: { high_freq: '30%' }\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets disable_coord option', () => {\n            const result = getInstance().disableCoord(true).toJSON();\n            const expected = {\n                common: {\n                    my_field: {\n                        query: 'query str',\n                        disable_coord: true\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = getInstance().toJSON();\n            const valueB = new CommonTermsQuery()\n                .field('my_field')\n                .query('query str')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                common: {\n                    my_field: 'query str'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n\n    describe('mixed representation', () => {\n        let spy;\n\n        beforeEach(() => {\n            spy = vi.spyOn(console, 'warn').mockImplementation(() => {});\n        });\n\n        afterEach(() => {\n            spy.mockRestore();\n        });\n\n        test('mixed minimum_should_match repr', () => {\n            const value = getInstance()\n                .minimumShouldMatch('30%')\n                .lowFreq('50%')\n                .highFreq('10%')\n                .toJSON();\n            const expected = {\n                common: {\n                    my_field: {\n                        query: 'query str',\n                        minimum_should_match: {\n                            low_freq: '50%',\n                            high_freq: '10%'\n                        }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('mixed representation logs warning', () => {\n            getInstance()\n                .minimumShouldMatch('30%')\n                .lowFreq('50%')\n                .highFreq('10%')\n                .toJSON();\n\n            expect(spy).toHaveBeenCalledTimes(2);\n            expect(spy).toHaveBeenNthCalledWith(\n                1,\n                '[CommonTermsQuery] Do not mix with other representation!'\n            );\n            expect(spy).toHaveBeenNthCalledWith(\n                2,\n                '[CommonTermsQuery] Overwriting.'\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/constant-score-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ConstantScoreQuery, constantScoreQuery, TermQuery } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst termQry = new TermQuery('user', 'kimchy');\n\ndescribe('ConstantScoreQuery', () => {\n    describe('parameter type validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('filter()', () => {\n                const instance = constantScoreQuery();\n                expect(() => instance.filter(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n\n            test('query()', () => {\n                const instance = constantScoreQuery();\n                expect(() => instance.query(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets filter option', () => {\n            const result = constantScoreQuery().filter(termQry).toJSON();\n            const expected = {\n                constant_score: {\n                    filter: recursiveToJSON(termQry.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets filter option via query method', () => {\n            const result = constantScoreQuery().query(termQry).toJSON();\n            const expected = {\n                constant_score: {\n                    filter: recursiveToJSON(termQry.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets filter', () => {\n            const valueA = new ConstantScoreQuery(termQry).toJSON();\n            const valueB = new ConstantScoreQuery().filter(termQry).toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                constant_score: {\n                    filter: { term: { user: 'kimchy' } }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/decay-score-func.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { DecayScoreFunction } from '../../src';\n\nconst getInstance = (mode, field = 'my_field') =>\n    new DecayScoreFunction(mode, field);\n\ndescribe('DecayScoreFunction', () => {\n    describe('mode() validation', () => {\n        test.each([\n            { name: 'accepts valid value: linear', value: 'linear' },\n            {\n                name: 'accepts valid value: LINEAR (case-insensitive)',\n                value: 'LINEAR'\n            },\n            { name: 'accepts valid value: exp', value: 'exp' },\n            {\n                name: 'accepts valid value: EXP (case-insensitive)',\n                value: 'EXP'\n            },\n            { name: 'accepts valid value: gauss', value: 'gauss' },\n            {\n                name: 'accepts valid value: GAUSS (case-insensitive)',\n                value: 'GAUSS'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().mode(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_mode' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().mode(value)).toThrow(\n                new Error(\n                    \"The 'mode' parameter should be one of 'linear', 'exp' or 'gauss'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets origin option', () => {\n            const result = getInstance().origin('now-1h').toJSON();\n            const expected = {\n                gauss: { my_field: { origin: 'now-1h' } }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets scale option', () => {\n            const result = getInstance().scale('10d').toJSON();\n            const expected = {\n                gauss: { my_field: { scale: '10d' } }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets offset option', () => {\n            const result = getInstance().offset('5d').toJSON();\n            const expected = {\n                gauss: { my_field: { offset: '5d' } }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets decay option', () => {\n            const result = getInstance().decay(0.6).toJSON();\n            const expected = {\n                gauss: { my_field: { decay: 0.6 } }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('mode methods', () => {\n        test('sets linear mode', () => {\n            const value = getInstance().linear().toJSON();\n            const expected = { linear: { my_field: {} } };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets exp mode', () => {\n            const value = getInstance().exp().toJSON();\n            const expected = { exp: { my_field: {} } };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets gauss mode', () => {\n            const value = getInstance().gauss().toJSON();\n            const expected = { gauss: { my_field: {} } };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('create with gauss mode', () => {\n            const value = getInstance('gauss').toJSON();\n            const expected = { gauss: { my_field: {} } };\n            expect(value).toEqual(expected);\n        });\n\n        test('create with linear mode', () => {\n            const value = getInstance('linear').toJSON();\n            const expected = { linear: { my_field: {} } };\n            expect(value).toEqual(expected);\n        });\n\n        test('create with exp mode', () => {\n            const value = getInstance('exp').toJSON();\n            const expected = { exp: { my_field: {} } };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('field', () => {\n        test('sets field', () => {\n            const value = new DecayScoreFunction().field('my_field').toJSON();\n            const expected = { gauss: { my_field: {} } };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/dis-max-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { DisMaxQuery, disMaxQuery, TermQuery, MatchQuery } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst termQry = new TermQuery('user', 'kimchy');\nconst matchQry = new MatchQuery('message', 'this is a test');\n\ndescribe('DisMaxQuery', () => {\n    describe('parameter type validation', () => {\n        test('checks Query class for queries', () => {\n            const instance = disMaxQuery();\n            expect(() => instance.queries(null)).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n            expect(() => instance.queries(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n        });\n    });\n\n    describe('array item validation', () => {\n        test('checks array items', () => {\n            expect(() =>\n                new DisMaxQuery().queries([termQry, matchQry])\n            ).not.toThrow();\n            expect(() => new DisMaxQuery().queries([termQry, {}])).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets tie_breaker option', () => {\n            const result = disMaxQuery().tieBreaker(1.42).toJSON();\n            const expected = {\n                dis_max: {\n                    tie_breaker: 1.42\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets queries option', () => {\n            const result = disMaxQuery().queries(termQry).toJSON();\n            const expected = {\n                dis_max: {\n                    queries: [recursiveToJSON(termQry.toJSON())]\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets queries option(arr)', () => {\n            const result = disMaxQuery().queries([termQry, matchQry]).toJSON();\n            const expected = {\n                dis_max: {\n                    queries: [\n                        recursiveToJSON(termQry.toJSON()),\n                        recursiveToJSON(matchQry.toJSON())\n                    ]\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/distance-feature.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { DistanceFeatureQuery } from '../../src';\n\ndescribe('DistanceFeatureQuery', () => {\n    describe('options', () => {\n        test('sets time-based distance feature', () => {\n            const value = new DistanceFeatureQuery('time')\n                .origin('now')\n                .pivot('1h')\n                .toJSON();\n            const expected = {\n                distance_feature: {\n                    field: 'time',\n                    pivot: '1h',\n                    origin: 'now'\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets position-based distance feature', () => {\n            const value = new DistanceFeatureQuery('location')\n                .origin([-71.3, 41.15])\n                .pivot('1000m')\n                .toJSON();\n            const expected = {\n                distance_feature: {\n                    field: 'location',\n                    pivot: '1000m',\n                    origin: [-71.3, 41.15]\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets field and time for distance feature', () => {\n            const value = new DistanceFeatureQuery()\n                .field('time')\n                .origin('2016/02/01')\n                .pivot('7d')\n                .toJSON();\n            const expected = {\n                distance_feature: {\n                    field: 'time',\n                    pivot: '7d',\n                    origin: '2016/02/01'\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets position and field for distance feature', () => {\n            const value = new DistanceFeatureQuery()\n                .origin({\n                    lat: 41.12,\n                    lon: -71.34\n                })\n                .pivot('250m')\n                .field('location')\n                .toJSON();\n            const expected = {\n                distance_feature: {\n                    field: 'location',\n                    pivot: '250m',\n                    origin: {\n                        lat: 41.12,\n                        lon: -71.34\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/exists-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ExistsQuery } from '../../src';\n\ndescribe('ExistsQuery', () => {\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = new ExistsQuery('my_field').toJSON();\n            const valueB = new ExistsQuery().field('my_field').toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                exists: {\n                    field: 'my_field'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/field-value-factor-func.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { FieldValueFactorFunction } from '../../src';\n\nconst getInstance = field => new FieldValueFactorFunction(field);\n\ndescribe('FieldValueFactorFunction', () => {\n    describe('modifier() validation', () => {\n        test.each([\n            { name: 'accepts valid value: none', value: 'none' },\n            {\n                name: 'accepts valid value: NONE (case-insensitive)',\n                value: 'NONE'\n            },\n            { name: 'accepts valid value: log', value: 'log' },\n            {\n                name: 'accepts valid value: LOG (case-insensitive)',\n                value: 'LOG'\n            },\n            { name: 'accepts valid value: log1p', value: 'log1p' },\n            {\n                name: 'accepts valid value: LOG1P (case-insensitive)',\n                value: 'LOG1P'\n            },\n            { name: 'accepts valid value: log2p', value: 'log2p' },\n            {\n                name: 'accepts valid value: LOG2P (case-insensitive)',\n                value: 'LOG2P'\n            },\n            { name: 'accepts valid value: ln', value: 'ln' },\n            { name: 'accepts valid value: LN (case-insensitive)', value: 'LN' },\n            { name: 'accepts valid value: ln1p', value: 'ln1p' },\n            {\n                name: 'accepts valid value: LN1P (case-insensitive)',\n                value: 'LN1P'\n            },\n            { name: 'accepts valid value: ln2p', value: 'ln2p' },\n            {\n                name: 'accepts valid value: LN2P (case-insensitive)',\n                value: 'LN2P'\n            },\n            { name: 'accepts valid value: square', value: 'square' },\n            {\n                name: 'accepts valid value: SQUARE (case-insensitive)',\n                value: 'SQUARE'\n            },\n            { name: 'accepts valid value: sqrt', value: 'sqrt' },\n            {\n                name: 'accepts valid value: SQRT (case-insensitive)',\n                value: 'SQRT'\n            },\n            { name: 'accepts valid value: reciprocal', value: 'reciprocal' },\n            {\n                name: 'accepts valid value: RECIPROCAL (case-insensitive)',\n                value: 'RECIPROCAL'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().modifier(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_modifier' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().modifier(value)).toThrow(\n                /The 'modifier' parameter should be one of/\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets field option', () => {\n            const result = getInstance().field('my_field').toJSON();\n            const expected = {\n                field_value_factor: {\n                    field: 'my_field'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets factor option', () => {\n            const result = getInstance().factor(1.5).toJSON();\n            const expected = {\n                field_value_factor: {\n                    factor: 1.5\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets modifier option', () => {\n            const result = getInstance().modifier('log1p').toJSON();\n            const expected = {\n                field_value_factor: {\n                    modifier: 'log1p'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets missing option', () => {\n            const result = getInstance().missing(1).toJSON();\n            const expected = {\n                field_value_factor: {\n                    missing: 1\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets field', () => {\n            const value = getInstance('my_field').toJSON();\n            const expected = { field_value_factor: { field: 'my_field' } };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/full-text-query-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { FullTextQueryBase } from '../../src/queries/full-text-queries';\n\nconst getInstance = queryString =>\n    new FullTextQueryBase('my_qry_type', queryString);\n\ndescribe('FullTextQueryBase', () => {\n    describe('options', () => {\n        test('sets analyzer option', () => {\n            const result = getInstance().analyzer('snowball').toJSON();\n            const expected = {\n                my_qry_type: {\n                    analyzer: 'snowball'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets minimum_should_match option', () => {\n            const result = getInstance().minimumShouldMatch(2).toJSON();\n            const expected = {\n                my_qry_type: {\n                    minimum_should_match: 2\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets query option', () => {\n            const result = getInstance().query('query str').toJSON();\n            const expected = {\n                my_qry_type: {\n                    query: 'query str'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets query str', () => {\n            const valueA = getInstance('query str').toJSON();\n            const valueB = getInstance().query('query str').toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                my_qry_type: {\n                    query: 'query str'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/function-score-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport {\n    FunctionScoreQuery,\n    functionScoreQuery,\n    TermQuery,\n    ScriptScoreFunction,\n    RandomScoreFunction,\n    Script\n} from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst termQry = new TermQuery('user', 'kimchy');\nconst scriptScoreFunc = new ScriptScoreFunction(\n    new Script('inline', 'Math.log10(doc.likes.value + 2)')\n);\nconst randScoreFunc = new RandomScoreFunction();\n\ndescribe('FunctionScoreQuery', () => {\n    describe('parameter type validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('query()', () => {\n                expect(() => functionScoreQuery().query(value)).toThrow(\n                    new TypeError('Argument must be an instance of Query')\n                );\n            });\n\n            test('function()', () => {\n                expect(() => functionScoreQuery().function(value)).toThrow(\n                    new TypeError(\n                        'Argument must be an instance of ScoreFunction'\n                    )\n                );\n            });\n\n            test('functions()', () => {\n                expect(() => functionScoreQuery().functions(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n        });\n    });\n\n    describe('array item validation', () => {\n        test('checks array items', () => {\n            expect(() =>\n                new FunctionScoreQuery().functions([\n                    scriptScoreFunc,\n                    randScoreFunc\n                ])\n            ).not.toThrow();\n            expect(() =>\n                new FunctionScoreQuery().functions([scriptScoreFunc, {}])\n            ).toThrow(\n                new TypeError('Argument must be an instance of ScoreFunction')\n            );\n        });\n    });\n\n    describe('scoreMode() validation', () => {\n        test.each([\n            { name: 'accepts valid value: multiply', value: 'multiply' },\n            {\n                name: 'accepts valid value: MULTIPLY (case-insensitive)',\n                value: 'MULTIPLY'\n            },\n            { name: 'accepts valid value: sum', value: 'sum' },\n            {\n                name: 'accepts valid value: SUM (case-insensitive)',\n                value: 'SUM'\n            },\n            { name: 'accepts valid value: first', value: 'first' },\n            {\n                name: 'accepts valid value: FIRST (case-insensitive)',\n                value: 'FIRST'\n            },\n            { name: 'accepts valid value: min', value: 'min' },\n            {\n                name: 'accepts valid value: MIN (case-insensitive)',\n                value: 'MIN'\n            },\n            { name: 'accepts valid value: max', value: 'max' },\n            {\n                name: 'accepts valid value: MAX (case-insensitive)',\n                value: 'MAX'\n            },\n            { name: 'accepts valid value: avg', value: 'avg' },\n            {\n                name: 'accepts valid value: AVG (case-insensitive)',\n                value: 'AVG'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => functionScoreQuery().scoreMode(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_score_mode' }\n        ])('$name', ({ value }) => {\n            expect(() => functionScoreQuery().scoreMode(value)).toThrow(\n                /The 'score_mode' parameter should be one of/\n            );\n        });\n    });\n\n    describe('boostMode() validation', () => {\n        test.each([\n            { name: 'accepts valid value: multiply', value: 'multiply' },\n            {\n                name: 'accepts valid value: MULTIPLY (case-insensitive)',\n                value: 'MULTIPLY'\n            },\n            { name: 'accepts valid value: replace', value: 'replace' },\n            {\n                name: 'accepts valid value: REPLACE (case-insensitive)',\n                value: 'REPLACE'\n            },\n            { name: 'accepts valid value: sum', value: 'sum' },\n            {\n                name: 'accepts valid value: SUM (case-insensitive)',\n                value: 'SUM'\n            },\n            { name: 'accepts valid value: avg', value: 'avg' },\n            {\n                name: 'accepts valid value: AVG (case-insensitive)',\n                value: 'AVG'\n            },\n            { name: 'accepts valid value: max', value: 'max' },\n            {\n                name: 'accepts valid value: MAX (case-insensitive)',\n                value: 'MAX'\n            },\n            { name: 'accepts valid value: min', value: 'min' },\n            {\n                name: 'accepts valid value: MIN (case-insensitive)',\n                value: 'MIN'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => functionScoreQuery().boostMode(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_boost_mode' }\n        ])('$name', ({ value }) => {\n            expect(() => functionScoreQuery().boostMode(value)).toThrow(\n                /The 'boost_mode' parameter should be one of/\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets query option', () => {\n            const result = functionScoreQuery().query(termQry).toJSON();\n            const expected = {\n                function_score: {\n                    functions: [],\n                    query: recursiveToJSON(termQry.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets score_mode option', () => {\n            const result = functionScoreQuery().scoreMode('multiply').toJSON();\n            const expected = {\n                function_score: {\n                    functions: [],\n                    score_mode: 'multiply'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets boost_mode option', () => {\n            const result = functionScoreQuery().boostMode('multiply').toJSON();\n            const expected = {\n                function_score: {\n                    functions: [],\n                    boost_mode: 'multiply'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets max_boost option', () => {\n            const result = functionScoreQuery().maxBoost(999.9).toJSON();\n            const expected = {\n                function_score: {\n                    functions: [],\n                    max_boost: 999.9\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets min_score option', () => {\n            const result = functionScoreQuery().minScore(9.999).toJSON();\n            const expected = {\n                function_score: {\n                    functions: [],\n                    min_score: 9.999\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets functions option', () => {\n            const result = functionScoreQuery()\n                .functions([scriptScoreFunc, randScoreFunc])\n                .toJSON();\n            const expected = {\n                function_score: {\n                    functions: [\n                        recursiveToJSON(scriptScoreFunc.toJSON()),\n                        recursiveToJSON(randScoreFunc.toJSON())\n                    ]\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/fuzzy-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { FuzzyQuery } from '../../src';\n\nconst getInstance = () => new FuzzyQuery('my_field', 'my-value');\n\ndescribe('FuzzyQuery', () => {\n    describe('options', () => {\n        test('sets fuzziness option', () => {\n            const result = getInstance().fuzziness('AUTO').toJSON();\n            const expected = {\n                fuzzy: {\n                    my_field: {\n                        value: 'my-value',\n                        fuzziness: 'AUTO'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets prefix_length option', () => {\n            const result = getInstance().prefixLength(3).toJSON();\n            const expected = {\n                fuzzy: {\n                    my_field: {\n                        value: 'my-value',\n                        prefix_length: 3\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets max_expansions option', () => {\n            const result = getInstance().maxExpansions(25).toJSON();\n            const expected = {\n                fuzzy: {\n                    my_field: {\n                        value: 'my-value',\n                        max_expansions: 25\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets transpositions option', () => {\n            const result = getInstance().transpositions(true).toJSON();\n            const expected = {\n                fuzzy: {\n                    my_field: {\n                        value: 'my-value',\n                        transpositions: true\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = getInstance().toJSON();\n            const valueB = new FuzzyQuery()\n                .field('my_field')\n                .value('my-value')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                fuzzy: {\n                    my_field: 'my-value'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/geo-bounding-box-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GeoBoundingBoxQuery, GeoPoint } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst getInstance = () => new GeoBoundingBoxQuery('my_field');\nconst pt1 = new GeoPoint().lat(40.73).lon(-74.1);\nconst pt2 = new GeoPoint().lat(40.1).lon(-71.12);\n\ndescribe('GeoBoundingBoxQuery', () => {\n    describe('parameter type validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('topLeft()', () => {\n                const instance = getInstance();\n                expect(() => instance.topLeft(value)).toThrow(\n                    new TypeError('Argument must be an instance of GeoPoint')\n                );\n            });\n\n            test('bottomRight()', () => {\n                const instance = getInstance();\n                expect(() => instance.bottomRight(value)).toThrow(\n                    new TypeError('Argument must be an instance of GeoPoint')\n                );\n            });\n\n            test('topRight()', () => {\n                const instance = getInstance();\n                expect(() => instance.topRight(value)).toThrow(\n                    new TypeError('Argument must be an instance of GeoPoint')\n                );\n            });\n\n            test('bottomLeft()', () => {\n                const instance = getInstance();\n                expect(() => instance.bottomLeft(value)).toThrow(\n                    new TypeError('Argument must be an instance of GeoPoint')\n                );\n            });\n        });\n    });\n\n    describe('type() validation', () => {\n        test.each([\n            { name: 'accepts valid value: memory', value: 'memory' },\n            {\n                name: 'accepts valid value: MEMORY (case-insensitive)',\n                value: 'MEMORY'\n            },\n            { name: 'accepts valid value: indexed', value: 'indexed' },\n            {\n                name: 'accepts valid value: INDEXED (case-insensitive)',\n                value: 'INDEXED'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().type(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_type' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().type(value)).toThrow(\n                new Error(\n                    \"The 'type' parameter should be one of 'memory' or 'indexed'\"\n                )\n            );\n        });\n    });\n\n    describe('field options', () => {\n        test('sets top_left option', () => {\n            const result = getInstance().topLeft(pt1).toJSON();\n            const expected = {\n                geo_bounding_box: {\n                    my_field: {\n                        top_left: recursiveToJSON(pt1.toJSON())\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets bottom_right option', () => {\n            const result = getInstance().bottomRight(pt2).toJSON();\n            const expected = {\n                geo_bounding_box: {\n                    my_field: {\n                        bottom_right: recursiveToJSON(pt2.toJSON())\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets top_right option', () => {\n            const result = getInstance().topRight(pt1).toJSON();\n            const expected = {\n                geo_bounding_box: {\n                    my_field: {\n                        top_right: recursiveToJSON(pt1.toJSON())\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets bottom_left option', () => {\n            const result = getInstance().bottomLeft(pt2).toJSON();\n            const expected = {\n                geo_bounding_box: {\n                    my_field: {\n                        bottom_left: recursiveToJSON(pt2.toJSON())\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets top option', () => {\n            const result = getInstance().top(40.73).toJSON();\n            const expected = {\n                geo_bounding_box: {\n                    my_field: {\n                        top: 40.73\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets left option', () => {\n            const result = getInstance().left(-74.1).toJSON();\n            const expected = {\n                geo_bounding_box: {\n                    my_field: {\n                        left: -74.1\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets bottom option', () => {\n            const result = getInstance().bottom(40.1).toJSON();\n            const expected = {\n                geo_bounding_box: {\n                    my_field: {\n                        bottom: 40.1\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets right option', () => {\n            const result = getInstance().right(-71.12).toJSON();\n            const expected = {\n                geo_bounding_box: {\n                    my_field: {\n                        right: -71.12\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('query options', () => {\n        test('sets type option', () => {\n            const result = getInstance().type('indexed').toJSON();\n            const expected = {\n                geo_bounding_box: {\n                    my_field: {},\n                    type: 'indexed'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/geo-distance-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GeoDistanceQuery, GeoPoint } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst getInstance = pt => new GeoDistanceQuery('my_field', pt);\nconst pt = new GeoPoint().lat(40.73).lon(-74.1);\n\ndescribe('GeoDistanceQuery', () => {\n    describe('parameter type validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('geoPoint()', () => {\n                const instance = getInstance();\n                expect(() => instance.geoPoint(value)).toThrow(\n                    new TypeError('Argument must be an instance of GeoPoint')\n                );\n            });\n        });\n    });\n\n    describe('distanceType() validation', () => {\n        test.each([\n            { name: 'accepts valid value: arc', value: 'arc' },\n            {\n                name: 'accepts valid value: ARC (case-insensitive)',\n                value: 'ARC'\n            },\n            { name: 'accepts valid value: plane', value: 'plane' },\n            {\n                name: 'accepts valid value: PLANE (case-insensitive)',\n                value: 'PLANE'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().distanceType(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_distance_type' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().distanceType(value)).toThrow(\n                new Error(\n                    \"The 'distance_type' parameter should be one of 'plane' or 'arc'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets distance option', () => {\n            const result = getInstance().distance('10m').toJSON();\n            const expected = {\n                geo_distance: {\n                    my_field: {},\n                    distance: '10m'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets distance_type option', () => {\n            const result = getInstance().distanceType('arc').toJSON();\n            const expected = {\n                geo_distance: {\n                    my_field: {},\n                    distance_type: 'arc'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets geo_point option', () => {\n            const result = getInstance().geoPoint(pt).toJSON();\n            const expected = {\n                geo_distance: {\n                    my_field: recursiveToJSON(pt.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets point', () => {\n            const valueA = getInstance(pt).toJSON();\n            const valueB = getInstance().geoPoint(pt).toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                geo_distance: {\n                    my_field: { lat: 40.73, lon: -74.1 }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/geo-polygon-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GeoPolygonQuery } from '../../src';\n\nconst getInstance = () => new GeoPolygonQuery('my_field');\n\ndescribe('GeoPolygonQuery', () => {\n    describe('parameter type validation', () => {\n        test('checks Array class for points', () => {\n            const instance = getInstance();\n            expect(() => instance.points(null)).toThrow(\n                new TypeError('Argument must be an instance of Array')\n            );\n            expect(() => instance.points(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of Array')\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets points option', () => {\n            const points = [\n                { lat: 40, lon: -70 },\n                { lat: 30, lon: -80 },\n                { lat: 20, lon: -90 }\n            ];\n            const result = getInstance().points(points).toJSON();\n            const expected = {\n                geo_polygon: {\n                    my_field: {\n                        points: points\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/geo-query-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GeoQueryBase } from '../../src/queries/geo-queries';\n\nconst getInstance = (field = 'my_field') =>\n    new GeoQueryBase('my_qry_type', field);\n\ndescribe('GeoQueryBase', () => {\n    describe('validationMethod() validation', () => {\n        test.each([\n            {\n                name: 'accepts valid value: IGNORE_MALFORMED',\n                value: 'IGNORE_MALFORMED'\n            },\n            {\n                name: 'accepts valid value: ignore_malformed (case-insensitive)',\n                value: 'ignore_malformed'\n            },\n            { name: 'accepts valid value: COERCE', value: 'COERCE' },\n            {\n                name: 'accepts valid value: coerce (case-insensitive)',\n                value: 'coerce'\n            },\n            { name: 'accepts valid value: STRICT', value: 'STRICT' },\n            {\n                name: 'accepts valid value: strict (case-insensitive)',\n                value: 'strict'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().validationMethod(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            {\n                name: 'throws for invalid value',\n                value: 'invalid_validation_method'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().validationMethod(value)).toThrow(\n                new Error(\n                    \"The 'validation_method' parameter should be one of 'IGNORE_MALFORMED', 'COERCE' or 'STRICT'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets validation_method option', () => {\n            const result = getInstance().validationMethod('COERCE').toJSON();\n            const expected = {\n                my_qry_type: {\n                    my_field: {},\n                    validation_method: 'COERCE'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets field', () => {\n            const valueA = new GeoQueryBase('my_qry_type', 'my_field').toJSON();\n            const valueB = new GeoQueryBase('my_qry_type')\n                .field('my_field')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                my_qry_type: {\n                    my_field: {}\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/geo-shape-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { GeoShapeQuery, GeoShape, IndexedShape } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst getInstance = () => new GeoShapeQuery('my_field');\n\ndescribe('GeoShapeQuery', () => {\n    describe('illegal method call', () => {\n        test('validation_method cannot be set', () => {\n            expect(() => new GeoShapeQuery().validationMethod()).toThrow(\n                new Error('validationMethod is not supported in GeoShapeQuery')\n            );\n        });\n    });\n\n    describe('parameter type validation', () => {\n        test('checks GeoShape class for shape', () => {\n            const instance = getInstance();\n            expect(() => instance.shape(null)).toThrow(\n                new TypeError('Argument must be an instance of GeoShape')\n            );\n            expect(() => instance.shape(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of GeoShape')\n            );\n        });\n\n        test('checks IndexedShape class for indexedShape', () => {\n            const instance = getInstance();\n            expect(() => instance.indexedShape(null)).toThrow(\n                new TypeError('Argument must be an instance of IndexedShape')\n            );\n            expect(() => instance.indexedShape(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of IndexedShape')\n            );\n        });\n    });\n\n    describe('relation() validation', () => {\n        test.each([\n            { name: 'accepts valid value: WITHIN', value: 'WITHIN' },\n            {\n                name: 'accepts valid value: within (case-insensitive)',\n                value: 'within'\n            },\n            { name: 'accepts valid value: CONTAINS', value: 'CONTAINS' },\n            {\n                name: 'accepts valid value: contains (case-insensitive)',\n                value: 'contains'\n            },\n            { name: 'accepts valid value: DISJOINT', value: 'DISJOINT' },\n            {\n                name: 'accepts valid value: disjoint (case-insensitive)',\n                value: 'disjoint'\n            },\n            { name: 'accepts valid value: INTERSECTS', value: 'INTERSECTS' },\n            {\n                name: 'accepts valid value: intersects (case-insensitive)',\n                value: 'intersects'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().relation(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_relation' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().relation(value)).toThrow(\n                /The 'relation' parameter should be one of/\n            );\n        });\n    });\n\n    describe('field options', () => {\n        test('sets shape option', () => {\n            const shape = new GeoShape().type('envelope').coordinates([\n                [13.0, 53.0],\n                [14.0, 52.0]\n            ]);\n            const result = getInstance().shape(shape).toJSON();\n            const expected = {\n                geo_shape: {\n                    my_field: {\n                        shape: recursiveToJSON(shape.toJSON())\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets indexed_shape option', () => {\n            const indexedShape = new IndexedShape()\n                .id('DEU')\n                .type('countries')\n                .index('shapes')\n                .path('location');\n            const result = getInstance().indexedShape(indexedShape).toJSON();\n            const expected = {\n                geo_shape: {\n                    my_field: {\n                        indexed_shape: recursiveToJSON(indexedShape.toJSON())\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets relation option', () => {\n            const result = getInstance().relation('WITHIN').toJSON();\n            const expected = {\n                geo_shape: {\n                    my_field: {\n                        relation: 'WITHIN'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('query options', () => {\n        test('sets ignore_unmapped option', () => {\n            const result = getInstance().ignoreUnmapped(true).toJSON();\n            const expected = {\n                geo_shape: {\n                    my_field: {},\n                    ignore_unmapped: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/has-child-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { HasChildQuery, hasChildQuery, TermQuery } from '../../src';\n\nconst qry = new TermQuery('user', 'kimchy');\n\ndescribe('HasChildQuery', () => {\n    describe('options', () => {\n        describe.each([\n            {\n                name: 'sets type option',\n                type: 'blog_tag',\n                expected: {\n                    has_child: {\n                        type: 'blog_tag'\n                    }\n                }\n            }\n        ])('$name', ({ type, expected }) => {\n            test('type()', () => {\n                const result = hasChildQuery().type(type).toJSON();\n                expect(result).toEqual(expected);\n            });\n\n            test('childType()', () => {\n                const result = hasChildQuery().childType(type).toJSON();\n                expect(result).toEqual(expected);\n            });\n        });\n\n        test('sets min_children option', () => {\n            const result = hasChildQuery().minChildren(2).toJSON();\n            const expected = {\n                has_child: {\n                    min_children: 2\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets max_children option', () => {\n            const result = hasChildQuery().maxChildren(10).toJSON();\n            const expected = {\n                has_child: {\n                    max_children: 10\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = new HasChildQuery(qry, 'my_type').toJSON();\n            const valueB = new HasChildQuery()\n                .childType('my_type')\n                .query(qry)\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                has_child: {\n                    query: { term: { user: 'kimchy' } },\n                    type: 'my_type'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/has-parent-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { HasParentQuery, hasParentQuery, TermQuery } from '../../src';\n\nconst qry = new TermQuery('user', 'kimchy');\n\ndescribe('HasParentQuery', () => {\n    describe('illegal method call', () => {\n        test('score_mode cannot be set', () => {\n            expect(() => new HasParentQuery().scoreMode()).toThrow(\n                new Error('scoreMode is not supported in HasParentQuery')\n            );\n        });\n    });\n\n    describe('options', () => {\n        describe.each([\n            {\n                name: 'sets parent_type option',\n                type: 'blog',\n                expected: {\n                    has_parent: {\n                        parent_type: 'blog'\n                    }\n                }\n            }\n        ])('$name', ({ type, expected }) => {\n            test('type()', () => {\n                const result = hasParentQuery().type(type).toJSON();\n                expect(result).toEqual(expected);\n            });\n\n            test('parentType()', () => {\n                const result = hasParentQuery().parentType(type).toJSON();\n                expect(result).toEqual(expected);\n            });\n        });\n\n        test('sets score option', () => {\n            const result = hasParentQuery().score(true).toJSON();\n            const expected = {\n                has_parent: {\n                    score: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = new HasParentQuery(qry, 'my_type').toJSON();\n            const valueB = new HasParentQuery()\n                .parentType('my_type')\n                .query(qry)\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                has_parent: {\n                    query: { term: { user: 'kimchy' } },\n                    parent_type: 'my_type'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/ids-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { IdsQuery, idsQuery } from '../../src';\n\ndescribe('IdsQuery', () => {\n    describe('parameter type validation', () => {\n        test('checks Array class for values', () => {\n            const instance = new IdsQuery();\n            expect(() => instance.values(null)).toThrow(\n                new TypeError('Argument must be an instance of Array')\n            );\n            expect(() => instance.values(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of Array')\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets type option', () => {\n            const result = idsQuery().type('my_type').toJSON();\n            const expected = {\n                ids: {\n                    type: 'my_type'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        describe.each([\n            {\n                name: 'sets values option',\n                values: ['1', '4', '100'],\n                expected: {\n                    ids: {\n                        values: ['1', '4', '100']\n                    }\n                }\n            }\n        ])('$name', ({ values, expected }) => {\n            test('values()', () => {\n                const result = idsQuery().values(values).toJSON();\n                expect(result).toEqual(expected);\n            });\n\n            test('ids()', () => {\n                const result = idsQuery().ids(values).toJSON();\n                expect(result).toEqual(expected);\n            });\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const value = new IdsQuery('my_type', ['1', '4', '100']).toJSON();\n            const expected = {\n                ids: {\n                    type: 'my_type',\n                    values: ['1', '4', '100']\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/joining-query-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { JoiningQueryBase } from '../../src/queries/joining-queries';\nimport { TermQuery, InnerHits } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst getInstance = qry => new JoiningQueryBase('my_qry_type', '', qry);\nconst qry = new TermQuery('user', 'kimchy');\n\ndescribe('JoiningQueryBase', () => {\n    describe('parameter type validation', () => {\n        test('checks Query class for query', () => {\n            const instance = getInstance();\n            expect(() => instance.query(null)).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n            expect(() => instance.query(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n        });\n\n        test('checks InnerHits class for innerHits', () => {\n            const instance = getInstance();\n            expect(() => instance.innerHits(null)).toThrow(\n                new TypeError('Argument must be an instance of InnerHits')\n            );\n            expect(() => instance.innerHits(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of InnerHits')\n            );\n        });\n    });\n\n    describe('scoreMode() validation', () => {\n        test.each([\n            { name: 'accepts valid value: none', value: 'none' },\n            {\n                name: 'accepts valid value: NONE (case-insensitive)',\n                value: 'NONE'\n            },\n            { name: 'accepts valid value: sum', value: 'sum' },\n            {\n                name: 'accepts valid value: SUM (case-insensitive)',\n                value: 'SUM'\n            },\n            { name: 'accepts valid value: min', value: 'min' },\n            {\n                name: 'accepts valid value: MIN (case-insensitive)',\n                value: 'MIN'\n            },\n            { name: 'accepts valid value: max', value: 'max' },\n            {\n                name: 'accepts valid value: MAX (case-insensitive)',\n                value: 'MAX'\n            },\n            { name: 'accepts valid value: avg', value: 'avg' },\n            {\n                name: 'accepts valid value: AVG (case-insensitive)',\n                value: 'AVG'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().scoreMode(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_score_mode' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().scoreMode(value)).toThrow(\n                /The 'score_mode' parameter should be one of/\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets query option', () => {\n            const result = getInstance().query(qry).toJSON();\n            const expected = {\n                my_qry_type: {\n                    query: recursiveToJSON(qry.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets score_mode option', () => {\n            const result = getInstance().scoreMode('sum').toJSON();\n            const expected = {\n                my_qry_type: {\n                    score_mode: 'sum'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets ignore_unmapped option', () => {\n            const result = getInstance().ignoreUnmapped(true).toJSON();\n            const expected = {\n                my_qry_type: {\n                    ignore_unmapped: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets inner_hits option', () => {\n            const innerHits = new InnerHits('my_inner_hits');\n            const result = getInstance().innerHits(innerHits).toJSON();\n            const expected = {\n                my_qry_type: {\n                    inner_hits: recursiveToJSON(innerHits.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = getInstance(qry).toJSON();\n            const valueB = getInstance().query(qry).toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                my_qry_type: {\n                    query: { term: { user: 'kimchy' } }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/match-all-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MatchAllQuery } from '../../src';\n\ndescribe('MatchAllQuery', () => {\n    describe('constructor', () => {\n        test('can be instantiated', () => {\n            const value = new MatchAllQuery().toJSON();\n            const expected = { match_all: {} };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/match-none-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MatchNoneQuery } from '../../src';\n\ndescribe('MatchNoneQuery', () => {\n    test('can be instantiated', () => {\n        const value = new MatchNoneQuery().toJSON();\n        const expected = { match_none: {} };\n        expect(value).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/queries-test/match-phrase-prefix-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MatchPhrasePrefixQuery } from '../../src';\n\nconst getInstance = () => new MatchPhrasePrefixQuery('my_field', 'query str');\n\ndescribe('MatchPhrasePrefixQuery', () => {\n    describe('options', () => {\n        test('sets max_expansions option', () => {\n            const result = getInstance().maxExpansions(50).toJSON();\n            const expected = {\n                match_phrase_prefix: {\n                    my_field: {\n                        query: 'query str',\n                        max_expansions: 50\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/match-phrase-query-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MatchPhraseQueryBase } from '../../src/queries/full-text-queries';\n\nconst getInstance = () =>\n    new MatchPhraseQueryBase('my_qry_type', '', 'my_field', 'query str');\n\ndescribe('MatchPhraseQueryBase', () => {\n    describe('illegal method call', () => {\n        test('minimum_should_match cannot be set', () => {\n            expect(() =>\n                new MatchPhraseQueryBase().minimumShouldMatch()\n            ).toThrow(\n                new Error(\n                    'minimumShouldMatch is not supported in MatchPhraseQueryBase'\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets slop option', () => {\n            const result = getInstance().slop(2).toJSON();\n            const expected = {\n                my_qry_type: {\n                    my_field: {\n                        query: 'query str',\n                        slop: 2\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/match-phrase-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MatchPhraseQuery } from '../../src';\n\ndescribe('MatchPhraseQuery', () => {\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const value = new MatchPhraseQuery(\n                'my_field',\n                'query str'\n            ).toJSON();\n            const expected = {\n                match_phrase: {\n                    my_field: 'query str'\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/match-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MatchQuery } from '../../src';\n\nconst getInstance = () => new MatchQuery('my_field', 'query str');\n\ndescribe('MatchQuery', () => {\n    describe('operator() validation', () => {\n        test.each([\n            { name: 'accepts valid value: and', value: 'and' },\n            {\n                name: 'accepts valid value: AND (case-insensitive)',\n                value: 'AND'\n            },\n            { name: 'accepts valid value: or', value: 'or' },\n            { name: 'accepts valid value: OR (case-insensitive)', value: 'OR' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().operator(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_operator' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().operator(value)).toThrow(\n                new Error(\n                    \"The 'operator' parameter should be one of 'and' or 'or'\"\n                )\n            );\n        });\n    });\n\n    describe('zeroTermsQuery() validation', () => {\n        test.each([\n            { name: 'accepts valid value: all', value: 'all' },\n            {\n                name: 'accepts valid value: ALL (case-insensitive)',\n                value: 'ALL'\n            },\n            { name: 'accepts valid value: none', value: 'none' },\n            {\n                name: 'accepts valid value: NONE (case-insensitive)',\n                value: 'NONE'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().zeroTermsQuery(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            {\n                name: 'throws for invalid value',\n                value: 'invalid_zero_terms_query'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().zeroTermsQuery(value)).toThrow(\n                new Error(\n                    \"The 'zero_terms_query' parameter should be one of 'all' or 'none'\"\n                )\n            );\n        });\n    });\n\n    describe('rewrite() validation', () => {\n        test.each([\n            {\n                name: 'accepts valid value: constant_score',\n                value: 'constant_score'\n            },\n            {\n                name: 'accepts valid value: scoring_boolean',\n                value: 'scoring_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_boolean',\n                value: 'constant_score_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_filter',\n                value: 'constant_score_filter'\n            },\n            {\n                name: 'accepts valid value: top_terms_boost_23',\n                value: 'top_terms_boost_23'\n            },\n            { name: 'accepts valid value: top_terms_15', value: 'top_terms_15' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().rewrite(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_rewrite' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().rewrite(value)).toThrow();\n        });\n    });\n\n    describe('fuzzyRewrite() validation', () => {\n        test.each([\n            {\n                name: 'accepts valid value: constant_score',\n                value: 'constant_score'\n            },\n            {\n                name: 'accepts valid value: scoring_boolean',\n                value: 'scoring_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_boolean',\n                value: 'constant_score_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_filter',\n                value: 'constant_score_filter'\n            },\n            {\n                name: 'accepts valid value: top_terms_boost_23',\n                value: 'top_terms_boost_23'\n            },\n            { name: 'accepts valid value: top_terms_15', value: 'top_terms_15' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().fuzzyRewrite(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_fuzzy_rewrite' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().fuzzyRewrite(value)).toThrow();\n        });\n    });\n\n    describe('options', () => {\n        test('sets operator option', () => {\n            const result = getInstance().operator('and').toJSON();\n            const expected = {\n                match: {\n                    my_field: {\n                        query: 'query str',\n                        operator: 'and'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets lenient option', () => {\n            const result = getInstance().lenient(true).toJSON();\n            const expected = {\n                match: {\n                    my_field: {\n                        query: 'query str',\n                        lenient: true\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets fuzziness option', () => {\n            const result = getInstance().fuzziness('AUTO').toJSON();\n            const expected = {\n                match: {\n                    my_field: {\n                        query: 'query str',\n                        fuzziness: 'AUTO'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets prefix_length option', () => {\n            const result = getInstance().prefixLength(5).toJSON();\n            const expected = {\n                match: {\n                    my_field: {\n                        query: 'query str',\n                        prefix_length: 5\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets max_expansions option', () => {\n            const result = getInstance().maxExpansions(3).toJSON();\n            const expected = {\n                match: {\n                    my_field: {\n                        query: 'query str',\n                        max_expansions: 3\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets rewrite option', () => {\n            const result = getInstance().rewrite('constant_score').toJSON();\n            const expected = {\n                match: {\n                    my_field: {\n                        query: 'query str',\n                        rewrite: 'constant_score'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets fuzzy_rewrite option', () => {\n            const result = getInstance()\n                .fuzzyRewrite('constant_score_boolean')\n                .toJSON();\n            const expected = {\n                match: {\n                    my_field: {\n                        query: 'query str',\n                        fuzzy_rewrite: 'constant_score_boolean'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets fuzzy_transpositions option', () => {\n            const result = getInstance().fuzzyTranspositions(true).toJSON();\n            const expected = {\n                match: {\n                    my_field: {\n                        query: 'query str',\n                        fuzzy_transpositions: true\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets zero_terms_query option', () => {\n            const result = getInstance().zeroTermsQuery('all').toJSON();\n            const expected = {\n                match: {\n                    my_field: {\n                        query: 'query str',\n                        zero_terms_query: 'all'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets cutoff_frequency option', () => {\n            const result = getInstance().cutoffFrequency(10).toJSON();\n            const expected = {\n                match: {\n                    my_field: {\n                        query: 'query str',\n                        cutoff_frequency: 10\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/mono-field-query-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MonoFieldQueryBase } from '../../src/queries/full-text-queries';\n\nconst getInstance = (field, queryStr) =>\n    new MonoFieldQueryBase('my_qry_type', field, queryStr);\n\ndescribe('MonoFieldQueryBase', () => {\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = getInstance('my_field', 'query str').toJSON();\n            const valueB = getInstance()\n                .field('my_field')\n                .query('query str')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                my_qry_type: {\n                    my_field: 'query str'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n\n    describe('validation', () => {\n        test('query is required', () => {\n            expect(() => getInstance().toJSON()).toThrow(\n                new Error('Query string is required for full text query!')\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/more-like-this-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MoreLikeThisQuery, moreLikeThisQuery } from '../../src';\n\nconst doc1 = { _index: 'imdb', _type: 'movies', _id: '1' };\nconst doc2 = { _index: 'imdb', _type: 'movies', _id: '2' };\nconst doc3 = 'and potentially some more text here as well';\n\ndescribe('MoreLikeThisQuery', () => {\n    describe('parameter type validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('fields()', () => {\n                expect(() => new MoreLikeThisQuery().fields(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n\n            test('ids()', () => {\n                expect(() => new MoreLikeThisQuery().ids(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n\n            test('docs()', () => {\n                expect(() => new MoreLikeThisQuery().docs(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets fields option', () => {\n            const result = moreLikeThisQuery()\n                .fields(['title', 'description'])\n                .toJSON();\n            const expected = {\n                more_like_this: {\n                    fields: ['title', 'description']\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets like option', () => {\n            const result = moreLikeThisQuery().like(doc1).toJSON();\n            const expected = {\n                more_like_this: {\n                    like: doc1\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets like(arr) option', () => {\n            const result = moreLikeThisQuery()\n                .like([doc1, doc2, doc3])\n                .toJSON();\n            const expected = {\n                more_like_this: {\n                    like: [doc1, doc2, doc3]\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets unlike option', () => {\n            const result = moreLikeThisQuery().unlike(doc1).toJSON();\n            const expected = {\n                more_like_this: {\n                    unlike: doc1\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets unlike(arr) option', () => {\n            const result = moreLikeThisQuery()\n                .unlike([doc1, doc2, doc3])\n                .toJSON();\n            const expected = {\n                more_like_this: {\n                    unlike: [doc1, doc2, doc3]\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets like_text option', () => {\n            const result = moreLikeThisQuery().likeText('my text').toJSON();\n            const expected = {\n                more_like_this: {\n                    like_text: 'my text'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets ids option', () => {\n            const result = moreLikeThisQuery().ids(['1', '2']).toJSON();\n            const expected = {\n                more_like_this: {\n                    ids: ['1', '2']\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets docs option', () => {\n            const docs = [\n                { _type: 'type', _id: '1' },\n                { _type: 'type', _id: '2' }\n            ];\n            const result = moreLikeThisQuery().docs(docs).toJSON();\n            const expected = {\n                more_like_this: {\n                    docs: docs\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets max_query_terms option', () => {\n            const result = moreLikeThisQuery().maxQueryTerms(12).toJSON();\n            const expected = {\n                more_like_this: {\n                    max_query_terms: 12\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets min_term_freq option', () => {\n            const result = moreLikeThisQuery().minTermFreq(1).toJSON();\n            const expected = {\n                more_like_this: {\n                    min_term_freq: 1\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets min_doc_freq option', () => {\n            const result = moreLikeThisQuery().minDocFreq(6).toJSON();\n            const expected = {\n                more_like_this: {\n                    min_doc_freq: 6\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets max_doc_freq option', () => {\n            const result = moreLikeThisQuery().maxDocFreq(30).toJSON();\n            const expected = {\n                more_like_this: {\n                    max_doc_freq: 30\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets min_word_length option', () => {\n            const result = moreLikeThisQuery().minWordLength(3).toJSON();\n            const expected = {\n                more_like_this: {\n                    min_word_length: 3\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets max_word_length option', () => {\n            const result = moreLikeThisQuery().maxWordLength(20).toJSON();\n            const expected = {\n                more_like_this: {\n                    max_word_length: 20\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets stop_words option', () => {\n            const result = moreLikeThisQuery()\n                .stopWords(['the', 'a', 'trump'])\n                .toJSON();\n            const expected = {\n                more_like_this: {\n                    stop_words: ['the', 'a', 'trump']\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets analyzer option', () => {\n            const result = moreLikeThisQuery().analyzer('snowball').toJSON();\n            const expected = {\n                more_like_this: {\n                    analyzer: 'snowball'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets minimum_should_match option', () => {\n            const result = moreLikeThisQuery()\n                .minimumShouldMatch('30%')\n                .toJSON();\n            const expected = {\n                more_like_this: {\n                    minimum_should_match: '30%'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets boost_terms option', () => {\n            const result = moreLikeThisQuery().boostTerms(1.4).toJSON();\n            const expected = {\n                more_like_this: {\n                    boost_terms: 1.4\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets include option', () => {\n            const result = moreLikeThisQuery().include(true).toJSON();\n            const expected = {\n                more_like_this: {\n                    include: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('like/unlike clause behavior', () => {\n        test('sets like with string', () => {\n            const value = new MoreLikeThisQuery().like(doc3).toJSON();\n            const expected = {\n                more_like_this: {\n                    like: doc3\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets like with object', () => {\n            const value = new MoreLikeThisQuery().like(doc1).toJSON();\n            const expected = {\n                more_like_this: {\n                    like: doc1\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets like with multiple calls', () => {\n            const value = new MoreLikeThisQuery()\n                .like(doc1)\n                .like(doc2)\n                .like(doc3)\n                .toJSON();\n            const expected = {\n                more_like_this: {\n                    like: [doc1, doc2, doc3]\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets like with array', () => {\n            const value = new MoreLikeThisQuery()\n                .like([doc1, doc2, doc3])\n                .toJSON();\n            const expected = {\n                more_like_this: {\n                    like: [doc1, doc2, doc3]\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets like with mix of calls and arrays', () => {\n            const value = new MoreLikeThisQuery()\n                .like(doc1)\n                .like([doc1, doc2, doc3])\n                .toJSON();\n            const expected = {\n                more_like_this: {\n                    like: [doc1, doc2, doc3]\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets unlike with string', () => {\n            const value = new MoreLikeThisQuery().unlike(doc3).toJSON();\n            const expected = {\n                more_like_this: {\n                    unlike: doc3\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets unlike with object', () => {\n            const value = new MoreLikeThisQuery().unlike(doc1).toJSON();\n            const expected = {\n                more_like_this: {\n                    unlike: doc1\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets unlike with multiple calls', () => {\n            const value = new MoreLikeThisQuery()\n                .unlike(doc1)\n                .unlike(doc2)\n                .unlike(doc3)\n                .toJSON();\n            const expected = {\n                more_like_this: {\n                    unlike: [doc1, doc2, doc3]\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets unlike with array', () => {\n            const value = new MoreLikeThisQuery()\n                .unlike([doc1, doc2, doc3])\n                .toJSON();\n            const expected = {\n                more_like_this: {\n                    unlike: [doc1, doc2, doc3]\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets unlike with mix of calls and arrays', () => {\n            const value = new MoreLikeThisQuery()\n                .unlike(doc1)\n                .unlike([doc1, doc2, doc3])\n                .toJSON();\n            const expected = {\n                more_like_this: {\n                    unlike: [doc1, doc2, doc3]\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/multi-match-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { MultiMatchQuery } from '../../src';\n\nconst getInstance = (fields, queryStr) => new MultiMatchQuery(fields, queryStr);\n\ndescribe('MultiMatchQuery', () => {\n    describe('operator() validation', () => {\n        test.each([\n            { name: 'accepts valid value: and', value: 'and' },\n            {\n                name: 'accepts valid value: AND (case-insensitive)',\n                value: 'AND'\n            },\n            { name: 'accepts valid value: or', value: 'or' },\n            { name: 'accepts valid value: OR (case-insensitive)', value: 'OR' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().operator(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_operator' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().operator(value)).toThrow(\n                new Error(\n                    \"The 'operator' parameter should be one of 'and' or 'or'\"\n                )\n            );\n        });\n    });\n\n    describe('zeroTermsQuery() validation', () => {\n        test.each([\n            { name: 'accepts valid value: all', value: 'all' },\n            {\n                name: 'accepts valid value: ALL (case-insensitive)',\n                value: 'ALL'\n            },\n            { name: 'accepts valid value: none', value: 'none' },\n            {\n                name: 'accepts valid value: NONE (case-insensitive)',\n                value: 'NONE'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().zeroTermsQuery(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            {\n                name: 'throws for invalid value',\n                value: 'invalid_zero_terms_query'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().zeroTermsQuery(value)).toThrow(\n                /The 'behavior' parameter should be one of 'all' or 'none'/\n            );\n        });\n    });\n\n    describe('type() validation', () => {\n        test.each([\n            { name: 'accepts valid value: best_fields', value: 'best_fields' },\n            {\n                name: 'accepts valid value: BEST_FIELDS (case-insensitive)',\n                value: 'BEST_FIELDS'\n            },\n            { name: 'accepts valid value: most_fields', value: 'most_fields' },\n            {\n                name: 'accepts valid value: MOST_FIELDS (case-insensitive)',\n                value: 'MOST_FIELDS'\n            },\n            {\n                name: 'accepts valid value: cross_fields',\n                value: 'cross_fields'\n            },\n            {\n                name: 'accepts valid value: CROSS_FIELDS (case-insensitive)',\n                value: 'CROSS_FIELDS'\n            },\n            { name: 'accepts valid value: phrase', value: 'phrase' },\n            {\n                name: 'accepts valid value: PHRASE (case-insensitive)',\n                value: 'PHRASE'\n            },\n            {\n                name: 'accepts valid value: phrase_prefix',\n                value: 'phrase_prefix'\n            },\n            {\n                name: 'accepts valid value: PHRASE_PREFIX (case-insensitive)',\n                value: 'PHRASE_PREFIX'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().type(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_type' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().type(value)).toThrow(\n                /The 'type' parameter should be one of/\n            );\n        });\n    });\n\n    describe('rewrite() validation', () => {\n        test.each([\n            {\n                name: 'accepts valid value: constant_score',\n                value: 'constant_score'\n            },\n            {\n                name: 'accepts valid value: scoring_boolean',\n                value: 'scoring_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_boolean',\n                value: 'constant_score_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_filter',\n                value: 'constant_score_filter'\n            },\n            {\n                name: 'accepts valid value: top_terms_boost_23',\n                value: 'top_terms_boost_23'\n            },\n            { name: 'accepts valid value: top_terms_15', value: 'top_terms_15' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().rewrite(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_rewrite' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().rewrite(value)).toThrow();\n        });\n    });\n\n    describe('fuzzyRewrite() validation', () => {\n        test.each([\n            {\n                name: 'accepts valid value: constant_score',\n                value: 'constant_score'\n            },\n            {\n                name: 'accepts valid value: scoring_boolean',\n                value: 'scoring_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_boolean',\n                value: 'constant_score_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_filter',\n                value: 'constant_score_filter'\n            },\n            {\n                name: 'accepts valid value: top_terms_boost_23',\n                value: 'top_terms_boost_23'\n            },\n            { name: 'accepts valid value: top_terms_15', value: 'top_terms_15' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().fuzzyRewrite(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_fuzzy_rewrite' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().fuzzyRewrite(value)).toThrow();\n        });\n    });\n\n    describe('options', () => {\n        test('sets field option', () => {\n            const result = getInstance().field('my_field').toJSON();\n            const expected = {\n                multi_match: {\n                    fields: ['my_field']\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets fields option', () => {\n            const result = getInstance()\n                .fields(['my_field_a', 'my_field_b'])\n                .toJSON();\n            const expected = {\n                multi_match: {\n                    fields: ['my_field_a', 'my_field_b']\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets type option', () => {\n            const result = getInstance().type('best_fields').toJSON();\n            const expected = {\n                multi_match: {\n                    fields: [],\n                    type: 'best_fields'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets tie_breaker option', () => {\n            const result = getInstance().tieBreaker(0.3).toJSON();\n            const expected = {\n                multi_match: {\n                    fields: [],\n                    tie_breaker: 0.3\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets operator option', () => {\n            const result = getInstance().operator('and').toJSON();\n            const expected = {\n                multi_match: {\n                    fields: [],\n                    operator: 'and'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets lenient option', () => {\n            const result = getInstance().lenient(true).toJSON();\n            const expected = {\n                multi_match: {\n                    fields: [],\n                    lenient: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets slop option', () => {\n            const result = getInstance().slop(2).toJSON();\n            const expected = {\n                multi_match: {\n                    fields: [],\n                    slop: 2\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets fuzziness option', () => {\n            const result = getInstance().fuzziness('AUTO').toJSON();\n            const expected = {\n                multi_match: {\n                    fields: [],\n                    fuzziness: 'AUTO'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets prefix_length option', () => {\n            const result = getInstance().prefixLength(5).toJSON();\n            const expected = {\n                multi_match: {\n                    fields: [],\n                    prefix_length: 5\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets max_expansions option', () => {\n            const result = getInstance().maxExpansions(3).toJSON();\n            const expected = {\n                multi_match: {\n                    fields: [],\n                    max_expansions: 3\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets rewrite option', () => {\n            const result = getInstance().rewrite('constant_score').toJSON();\n            const expected = {\n                multi_match: {\n                    fields: [],\n                    rewrite: 'constant_score'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets fuzzy_rewrite option', () => {\n            const result = getInstance()\n                .fuzzyRewrite('constant_score_boolean')\n                .toJSON();\n            const expected = {\n                multi_match: {\n                    fields: [],\n                    fuzzy_rewrite: 'constant_score_boolean'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets zero_terms_query option', () => {\n            const result = getInstance().zeroTermsQuery('all').toJSON();\n            const expected = {\n                multi_match: {\n                    fields: [],\n                    zero_terms_query: 'all'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets cutoff_frequency option', () => {\n            const result = getInstance().cutoffFrequency(10).toJSON();\n            const expected = {\n                multi_match: {\n                    fields: [],\n                    cutoff_frequency: 10\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments with string field', () => {\n            const valueA = getInstance('my_field', 'query str').toJSON();\n            const valueB = getInstance()\n                .field('my_field')\n                .query('query str')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                multi_match: {\n                    fields: ['my_field'],\n                    query: 'query str'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n\n        test('constructor sets arguments with array fields', () => {\n            const valueA = getInstance(\n                ['my_field_a', 'my_field_b'],\n                'query str'\n            ).toJSON();\n            const valueB = getInstance()\n                .fields(['my_field_a', 'my_field_b'])\n                .query('query str')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const valueC = getInstance()\n                .field('my_field_a')\n                .field('my_field_b')\n                .query('query str')\n                .toJSON();\n            expect(valueA).toEqual(valueC);\n\n            const expected = {\n                multi_match: {\n                    fields: ['my_field_a', 'my_field_b'],\n                    query: 'query str'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/nested-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { NestedQuery, nestedQuery, TermQuery } from '../../src';\n\nconst qry = new TermQuery('user', 'kimchy');\n\ndescribe('NestedQuery', () => {\n    describe('options', () => {\n        test('sets path option', () => {\n            const result = nestedQuery().path('obj1').toJSON();\n            const expected = {\n                nested: {\n                    path: 'obj1'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = new NestedQuery(qry, 'obj1').toJSON();\n            const valueB = new NestedQuery().path('obj1').query(qry).toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                nested: {\n                    query: { term: { user: 'kimchy' } },\n                    path: 'obj1'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/parent-id-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ParentIdQuery, parentIdQuery } from '../../src';\n\ndescribe('ParentIdQuery', () => {\n    describe('options', () => {\n        test('sets type option', () => {\n            const result = parentIdQuery().type('blog_tag').toJSON();\n            const expected = {\n                parent_id: {\n                    type: 'blog_tag'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets id option', () => {\n            const result = parentIdQuery().id('1').toJSON();\n            const expected = {\n                parent_id: {\n                    id: '1'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets ignore_unmapped option', () => {\n            const result = parentIdQuery().ignoreUnmapped(true).toJSON();\n            const expected = {\n                parent_id: {\n                    ignore_unmapped: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = new ParentIdQuery('blog_tag', '1').toJSON();\n            const valueB = new ParentIdQuery()\n                .type('blog_tag')\n                .id('1')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                parent_id: {\n                    type: 'blog_tag',\n                    id: '1'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/percolate-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { PercolateQuery, percolateQuery } from '../../src';\n\ndescribe('PercolateQuery', () => {\n    describe('options', () => {\n        test('sets field option', () => {\n            const result = percolateQuery().field('query').toJSON();\n            const expected = {\n                percolate: {\n                    field: 'query',\n                    documents: []\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets document_type option', () => {\n            const result = percolateQuery().documentType('doctype').toJSON();\n            const expected = {\n                percolate: {\n                    document_type: 'doctype',\n                    documents: []\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets document option', () => {\n            const doc = { message: 'A new bonsai tree in the office' };\n            const result = percolateQuery().document(doc).toJSON();\n            const expected = {\n                percolate: {\n                    documents: [doc]\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets documents option', () => {\n            const docs = [{ message: 'A new bonsai tree in the office' }];\n            const result = percolateQuery().documents(docs).toJSON();\n            const expected = {\n                percolate: {\n                    documents: docs\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets index option', () => {\n            const result = percolateQuery().index('my-index').toJSON();\n            const expected = {\n                percolate: {\n                    documents: [],\n                    index: 'my-index'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets type option', () => {\n            const result = percolateQuery().type('message').toJSON();\n            const expected = {\n                percolate: {\n                    documents: [],\n                    type: 'message'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets id option', () => {\n            const result = percolateQuery().id('1').toJSON();\n            const expected = {\n                percolate: {\n                    documents: [],\n                    id: '1'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets routing option', () => {\n            const result = percolateQuery().routing('routing').toJSON();\n            const expected = {\n                percolate: {\n                    documents: [],\n                    routing: 'routing'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets preference option', () => {\n            const result = percolateQuery().preference('preference').toJSON();\n            const expected = {\n                percolate: {\n                    documents: [],\n                    preference: 'preference'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets version option', () => {\n            const result = percolateQuery().version(1).toJSON();\n            const expected = {\n                percolate: {\n                    documents: [],\n                    version: 1\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const value = new PercolateQuery('query', 'doctype').toJSON();\n            const expected = {\n                percolate: {\n                    field: 'query',\n                    document_type: 'doctype',\n                    documents: []\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('document behavior', () => {\n        test('set document after documents', () => {\n            const docA = { param: { message: 'a bonsai' } };\n            const docB = { param: { message: 'another bonsai' } };\n            const field = 'query';\n            const docType = 'docType';\n            const query = new PercolateQuery(field, docType);\n            const value = query.document(docA).documents([docB]).toJSON();\n\n            const expected = {\n                percolate: {\n                    field,\n                    document_type: docType,\n                    documents: [docA, docB]\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/prefix-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { PrefixQuery } from '../../src';\n\nconst getInstance = () => new PrefixQuery('my_field', 'my-value');\n\ndescribe('PrefixQuery', () => {\n    describe('rewrite() validation', () => {\n        test.each([\n            {\n                name: 'accepts valid value: constant_score',\n                value: 'constant_score'\n            },\n            {\n                name: 'accepts valid value: scoring_boolean',\n                value: 'scoring_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_boolean',\n                value: 'constant_score_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_filter',\n                value: 'constant_score_filter'\n            },\n            {\n                name: 'accepts valid value: top_terms_boost_23',\n                value: 'top_terms_boost_23'\n            },\n            { name: 'accepts valid value: top_terms_15', value: 'top_terms_15' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().rewrite(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_rewrite' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().rewrite(value)).toThrow();\n        });\n    });\n\n    describe('options', () => {\n        test('sets rewrite option', () => {\n            const result = getInstance().rewrite('constant_score').toJSON();\n            const expected = {\n                prefix: {\n                    my_field: {\n                        value: 'my-value',\n                        rewrite: 'constant_score'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = getInstance().toJSON();\n            const valueB = new PrefixQuery()\n                .field('my_field')\n                .value('my-value')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                prefix: {\n                    my_field: 'my-value'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/query-string-query-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { QueryStringQueryBase } from '../../src/queries/full-text-queries';\n\nconst getInstance = queryStr =>\n    new QueryStringQueryBase('my_qry_type', '', queryStr);\n\ndescribe('QueryStringQueryBase', () => {\n    describe('defaultOperator() validation', () => {\n        test.each([\n            { name: 'accepts valid value: and', value: 'and' },\n            {\n                name: 'accepts valid value: AND (case-insensitive)',\n                value: 'AND'\n            },\n            { name: 'accepts valid value: or', value: 'or' },\n            { name: 'accepts valid value: OR (case-insensitive)', value: 'OR' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().defaultOperator(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            {\n                name: 'throws for invalid value',\n                value: 'invalid_default_operator'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().defaultOperator(value)).toThrow(\n                new Error(\n                    \"The 'operator' parameter should be one of 'AND' or 'OR'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets field option', () => {\n            const result = getInstance().field('my_field').toJSON();\n            const expected = {\n                my_qry_type: {\n                    fields: ['my_field']\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets fields option', () => {\n            const result = getInstance()\n                .fields(['my_field_a', 'my_field_b'])\n                .toJSON();\n            const expected = {\n                my_qry_type: {\n                    fields: ['my_field_a', 'my_field_b']\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets default_operator option', () => {\n            const result = getInstance().defaultOperator('AND').toJSON();\n            const expected = {\n                my_qry_type: {\n                    default_operator: 'AND'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets lenient option', () => {\n            const result = getInstance().lenient(true).toJSON();\n            const expected = {\n                my_qry_type: {\n                    lenient: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets analyze_wildcard option', () => {\n            const result = getInstance().analyzeWildcard(true).toJSON();\n            const expected = {\n                my_qry_type: {\n                    analyze_wildcard: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets quote_field_suffix option', () => {\n            const result = getInstance().quoteFieldSuffix('.exact').toJSON();\n            const expected = {\n                my_qry_type: {\n                    quote_field_suffix: '.exact'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets all_fields option', () => {\n            const result = getInstance().allFields(true).toJSON();\n            const expected = {\n                my_qry_type: {\n                    all_fields: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('chained fields', () => {\n        test('sets chained fields', () => {\n            const value = getInstance()\n                .field('my_field_a')\n                .field('my_field_b')\n                .fields(['my_field_c', 'my_field_c'])\n                .query('query str')\n                .toJSON();\n            const expected = {\n                my_qry_type: {\n                    fields: [\n                        'my_field_a',\n                        'my_field_b',\n                        'my_field_c',\n                        'my_field_c'\n                    ],\n                    query: 'query str'\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/query-string-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { QueryStringQuery } from '../../src';\n\nconst getInstance = queryStr => new QueryStringQuery(queryStr);\n\ndescribe('QueryStringQuery', () => {\n    describe('rewrite() validation', () => {\n        test.each([\n            {\n                name: 'accepts valid value: constant_score',\n                value: 'constant_score'\n            },\n            {\n                name: 'accepts valid value: scoring_boolean',\n                value: 'scoring_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_boolean',\n                value: 'constant_score_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_filter',\n                value: 'constant_score_filter'\n            },\n            {\n                name: 'accepts valid value: top_terms_boost_23',\n                value: 'top_terms_boost_23'\n            },\n            { name: 'accepts valid value: top_terms_15', value: 'top_terms_15' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().rewrite(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_rewrite' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().rewrite(value)).toThrow();\n        });\n    });\n\n    describe('fuzzyRewrite() validation', () => {\n        test.each([\n            {\n                name: 'accepts valid value: constant_score',\n                value: 'constant_score'\n            },\n            {\n                name: 'accepts valid value: scoring_boolean',\n                value: 'scoring_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_boolean',\n                value: 'constant_score_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_filter',\n                value: 'constant_score_filter'\n            },\n            {\n                name: 'accepts valid value: top_terms_boost_23',\n                value: 'top_terms_boost_23'\n            },\n            { name: 'accepts valid value: top_terms_15', value: 'top_terms_15' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().fuzzyRewrite(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_fuzzy_rewrite' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().fuzzyRewrite(value)).toThrow();\n        });\n    });\n\n    describe('options', () => {\n        test('sets default_field option', () => {\n            const result = getInstance().defaultField('my_field').toJSON();\n            const expected = {\n                query_string: {\n                    default_field: 'my_field'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets allow_leading_wildcard option', () => {\n            const result = getInstance().allowLeadingWildcard(true).toJSON();\n            const expected = {\n                query_string: {\n                    allow_leading_wildcard: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets enable_position_increments option', () => {\n            const result = getInstance()\n                .enablePositionIncrements(true)\n                .toJSON();\n            const expected = {\n                query_string: {\n                    enable_position_increments: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets fuzzy_max_expansions option', () => {\n            const result = getInstance().fuzzyMaxExpansions(50).toJSON();\n            const expected = {\n                query_string: {\n                    fuzzy_max_expansions: 50\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets fuzziness option', () => {\n            const result = getInstance().fuzziness('AUTO').toJSON();\n            const expected = {\n                query_string: {\n                    fuzziness: 'AUTO'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets fuzzy_prefix_length option', () => {\n            const result = getInstance().fuzzyPrefixLength(5).toJSON();\n            const expected = {\n                query_string: {\n                    fuzzy_prefix_length: 5\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets rewrite option', () => {\n            const result = getInstance().rewrite('constant_score').toJSON();\n            const expected = {\n                query_string: {\n                    rewrite: 'constant_score'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets fuzzy_rewrite option', () => {\n            const result = getInstance()\n                .fuzzyRewrite('constant_score_boolean')\n                .toJSON();\n            const expected = {\n                query_string: {\n                    fuzzy_rewrite: 'constant_score_boolean'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets phrase_slop option', () => {\n            const result = getInstance().phraseSlop(2).toJSON();\n            const expected = {\n                query_string: {\n                    phrase_slop: 2\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets auto_generate_phrase_queries option', () => {\n            const result = getInstance()\n                .autoGeneratePhraseQueries(true)\n                .toJSON();\n            const expected = {\n                query_string: {\n                    auto_generate_phrase_queries: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets max_determinized_states option', () => {\n            const result = getInstance().maxDeterminizedStates(10500).toJSON();\n            const expected = {\n                query_string: {\n                    max_determinized_states: 10500\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets time_zone option', () => {\n            const result = getInstance().timeZone('+0530').toJSON();\n            const expected = {\n                query_string: {\n                    time_zone: '+0530'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets split_on_whitespace option', () => {\n            const result = getInstance().splitOnWhitespace(true).toJSON();\n            const expected = {\n                query_string: {\n                    split_on_whitespace: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets use_dis_max option', () => {\n            const result = getInstance().useDisMax(true).toJSON();\n            const expected = {\n                query_string: {\n                    use_dis_max: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets tie_breaker option', () => {\n            const result = getInstance().tieBreaker(0.3).toJSON();\n            const expected = {\n                query_string: {\n                    tie_breaker: 0.3\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets quote_analyzer option', () => {\n            const result = getInstance().quoteAnalyzer('my_analyzer').toJSON();\n            const expected = {\n                query_string: {\n                    quote_analyzer: 'my_analyzer'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets escape option', () => {\n            const result = getInstance().escape(true).toJSON();\n            const expected = {\n                query_string: {\n                    escape: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/random-score-func.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { randomScoreFunction } from '../../src';\n\ndescribe('RandomScoreFunction', () => {\n    describe('options', () => {\n        test('sets seed option', () => {\n            const seed = Date.now();\n            const result = randomScoreFunction().seed(seed).toJSON();\n            const expected = {\n                random_score: {\n                    seed: seed\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/range-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { RangeQuery } from '../../src';\n\nconst getInstance = () => new RangeQuery('my_field');\n\ndescribe('RangeQuery', () => {\n    describe('illegal method call', () => {\n        test('value cannot be set', () => {\n            expect(() => new RangeQuery().value()).toThrow(\n                new Error('value is not supported in RangeQuery')\n            );\n        });\n    });\n\n    describe('relation() validation', () => {\n        test.each([\n            { name: 'accepts valid value: WITHIN', value: 'WITHIN' },\n            {\n                name: 'accepts valid value: within (case-insensitive)',\n                value: 'within'\n            },\n            { name: 'accepts valid value: CONTAINS', value: 'CONTAINS' },\n            {\n                name: 'accepts valid value: contains (case-insensitive)',\n                value: 'contains'\n            },\n            { name: 'accepts valid value: DISJOINT', value: 'DISJOINT' },\n            {\n                name: 'accepts valid value: disjoint (case-insensitive)',\n                value: 'disjoint'\n            },\n            { name: 'accepts valid value: INTERSECTS', value: 'INTERSECTS' },\n            {\n                name: 'accepts valid value: intersects (case-insensitive)',\n                value: 'intersects'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().relation(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_relation' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().relation(value)).toThrow(\n                /The 'relation' parameter should be one of/\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets gte option', () => {\n            const result = getInstance().gte(10).toJSON();\n            const expected = {\n                range: {\n                    my_field: {\n                        gte: 10\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets lte option', () => {\n            const result = getInstance().lte(20).toJSON();\n            const expected = {\n                range: {\n                    my_field: {\n                        lte: 20\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets gt option', () => {\n            const result = getInstance().gt(10).toJSON();\n            const expected = {\n                range: {\n                    my_field: {\n                        gt: 10\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets lt option', () => {\n            const result = getInstance().lt(20).toJSON();\n            const expected = {\n                range: {\n                    my_field: {\n                        lt: 20\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets from option', () => {\n            const result = getInstance().from(10).toJSON();\n            const expected = {\n                range: {\n                    my_field: {\n                        from: 10\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets to option', () => {\n            const result = getInstance().to(20).toJSON();\n            const expected = {\n                range: {\n                    my_field: {\n                        to: 20\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets include_lower option', () => {\n            const result = getInstance().includeLower(true).toJSON();\n            const expected = {\n                range: {\n                    my_field: {\n                        include_lower: true\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets include_upper option', () => {\n            const result = getInstance().includeUpper(true).toJSON();\n            const expected = {\n                range: {\n                    my_field: {\n                        include_upper: true\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets time_zone option', () => {\n            const result = getInstance().timeZone('+0530').toJSON();\n            const expected = {\n                range: {\n                    my_field: {\n                        time_zone: '+0530'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets format option', () => {\n            const result = getInstance().format('####.00').toJSON();\n            const expected = {\n                range: {\n                    my_field: {\n                        format: '####.00'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets relation option', () => {\n            const result = getInstance().relation('WITHIN').toJSON();\n            const expected = {\n                range: {\n                    my_field: {\n                        relation: 'WITHIN'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/rank-feature.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { RankFeatureQuery } from '../../src';\n\ndescribe('RankFeatureQuery', () => {\n    describe('constructor', () => {\n        test('constructor sets field', () => {\n            const value = new RankFeatureQuery('my_field').toJSON();\n            const expected = {\n                rank_feature: {\n                    field: 'my_field'\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets field via method', () => {\n            const value = new RankFeatureQuery().field('my_field').toJSON();\n            const expected = {\n                rank_feature: {\n                    field: 'my_field'\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('scoring functions', () => {\n        test('sets linear scoring function', () => {\n            const value = new RankFeatureQuery('my_field').linear().toJSON();\n            const expected = {\n                rank_feature: {\n                    field: 'my_field',\n                    linear: {}\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets saturation scoring function', () => {\n            const value = new RankFeatureQuery('my_field')\n                .saturation()\n                .toJSON();\n            const expected = {\n                rank_feature: {\n                    field: 'my_field',\n                    saturation: {}\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets saturation with pivot', () => {\n            const value = new RankFeatureQuery('my_field')\n                .saturationPivot(123)\n                .toJSON();\n            const expected = {\n                rank_feature: {\n                    field: 'my_field',\n                    saturation: {\n                        pivot: 123\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets sigmoid scoring function', () => {\n            const value = new RankFeatureQuery('my_field')\n                .sigmoid(2, 0.6)\n                .toJSON();\n            const expected = {\n                rank_feature: {\n                    field: 'my_field',\n                    sigmoid: {\n                        pivot: 2,\n                        exponent: 0.6\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('sets logarithmic scoring function', () => {\n            const value = new RankFeatureQuery('my_field').log(2).toJSON();\n            const expected = {\n                rank_feature: {\n                    field: 'my_field',\n                    log: {\n                        scaling_factor: 2\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/regexp-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { RegexpQuery } from '../../src';\n\nconst getInstance = () => new RegexpQuery('my_field', 'my-value');\n\ndescribe('RegexpQuery', () => {\n    describe('rewrite() validation', () => {\n        test.each([\n            {\n                name: 'accepts valid value: constant_score',\n                value: 'constant_score'\n            },\n            {\n                name: 'accepts valid value: scoring_boolean',\n                value: 'scoring_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_boolean',\n                value: 'constant_score_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_filter',\n                value: 'constant_score_filter'\n            },\n            {\n                name: 'accepts valid value: top_terms_boost_23',\n                value: 'top_terms_boost_23'\n            },\n            { name: 'accepts valid value: top_terms_15', value: 'top_terms_15' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().rewrite(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_rewrite' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().rewrite(value)).toThrow();\n        });\n    });\n\n    describe('options', () => {\n        test('sets flags option', () => {\n            const result = getInstance().flags('PREFIX|PHRASE').toJSON();\n            const expected = {\n                regexp: {\n                    my_field: {\n                        value: 'my-value',\n                        flags: 'PREFIX|PHRASE'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets case_insensitive option', () => {\n            const result = getInstance().caseInsensitive(true).toJSON();\n            const expected = {\n                regexp: {\n                    my_field: {\n                        value: 'my-value',\n                        case_insensitive: true\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets max_determinized_states option', () => {\n            const result = getInstance().maxDeterminizedStates(10500).toJSON();\n            const expected = {\n                regexp: {\n                    my_field: {\n                        value: 'my-value',\n                        max_determinized_states: 10500\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets rewrite option', () => {\n            const result = getInstance().rewrite('constant_score').toJSON();\n            const expected = {\n                regexp: {\n                    my_field: {\n                        value: 'my-value',\n                        rewrite: 'constant_score'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = getInstance().toJSON();\n            const valueB = new RegexpQuery()\n                .field('my_field')\n                .value('my-value')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                regexp: {\n                    my_field: 'my-value'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/score-func.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ScoreFunction } from '../../src/queries/compound-queries/score-functions';\nimport { BoolQuery, TermQuery } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst getInstance = () => new ScoreFunction('my_score_func');\n\ndescribe('ScoreFunction', () => {\n    describe('parameter type validation', () => {\n        test('checks Query class for filter', () => {\n            const instance = getInstance();\n            expect(() => instance.filter(null)).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n            expect(() => instance.filter(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets filter option', () => {\n            const filterQry = new BoolQuery()\n                .filter(new TermQuery('user', 'Idan'))\n                .filter(new TermQuery('level', 'INFO'));\n            const result = getInstance().filter(filterQry).toJSON();\n            const expected = {\n                my_score_func: {},\n                filter: recursiveToJSON(filterQry.toJSON())\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets weight option', () => {\n            const result = getInstance().weight(10).toJSON();\n            const expected = {\n                my_score_func: {},\n                weight: 10\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/script-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ScriptQuery, scriptQuery, Script } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst script = new Script()\n    .lang('groovy')\n    .file('calculate-score')\n    .params({ my_modifier: 2 });\n\ndescribe('ScriptQuery', () => {\n    describe('parameter type validation', () => {\n        test('checks Script class for script', () => {\n            const instance = new ScriptQuery();\n            expect(() => instance.script(null)).toThrow(\n                new TypeError('Argument must be an instance of Script')\n            );\n            expect(() => instance.script(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of Script')\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets script option', () => {\n            const result = scriptQuery().script(script).toJSON();\n            const expected = {\n                script: {\n                    script: recursiveToJSON(script.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets script', () => {\n            const value = new ScriptQuery(script).toJSON();\n            const expected = {\n                script: {\n                    script: {\n                        file: 'calculate-score',\n                        lang: 'groovy',\n                        params: { my_modifier: 2 }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/script-score-func.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { ScriptScoreFunction, scriptScoreFunction, Script } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst scoreScript = new Script('inline', 'Math.log10(doc.likes.value + 2)');\n\ndescribe('ScriptScoreFunction', () => {\n    describe('options', () => {\n        test('sets script option', () => {\n            const result = scriptScoreFunction().script(scoreScript).toJSON();\n            const expected = {\n                script_score: {\n                    script: recursiveToJSON(scoreScript.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets script with Script object', () => {\n            const valueA = new ScriptScoreFunction(scoreScript).toJSON();\n            const valueB = new ScriptScoreFunction()\n                .script(scoreScript)\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                script_score: {\n                    script: {\n                        inline: 'Math.log10(doc.likes.value + 2)'\n                    }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n\n        test('constructor sets script with string', () => {\n            const valueA = new ScriptScoreFunction(\n                \"_score * doc['view_count'].value\"\n            ).toJSON();\n            const valueB = new ScriptScoreFunction()\n                .script(\"_score * doc['view_count'].value\")\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                script_score: {\n                    script: \"_score * doc['view_count'].value\"\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/script-score-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport {\n    MatchQuery,\n    Script,\n    ScriptScoreQuery,\n    scriptScoreQuery\n} from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst query = new MatchQuery('message', 'elasticsearch');\n\nconst lang = 'painless';\nconst source =\n    \"decayNumericLinear(params.origin, params.scale, params.offset, params.decay, doc['dval'].value)\";\nconst params = { origin: 20, scale: 10, decay: 0.5, offset: 0 };\nconst script = new Script().lang(lang).source(source).params(params);\n\ndescribe('ScriptScoreQuery', () => {\n    describe('parameter type validation', () => {\n        test('checks Query class for query', () => {\n            const instance = new ScriptScoreQuery();\n            expect(() => instance.query(null)).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n            expect(() => instance.query(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n        });\n\n        test('checks Script class for script', () => {\n            const instance = new ScriptScoreQuery();\n            expect(() => instance.script(null)).toThrow(\n                new TypeError('Argument must be an instance of Script')\n            );\n            expect(() => instance.script(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of Script')\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets query option', () => {\n            const result = scriptScoreQuery().query(query).toJSON();\n            const expected = {\n                script_score: {\n                    query: recursiveToJSON(query.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets script option', () => {\n            const result = scriptScoreQuery().script(script).toJSON();\n            const expected = {\n                script_score: {\n                    script: recursiveToJSON(script.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets min_score option', () => {\n            const result = scriptScoreQuery().minScore(9.999).toJSON();\n            const expected = {\n                script_score: {\n                    min_score: 9.999\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/semantic-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport esb, { SemanticQuery } from '../../src';\n\ndescribe('SemanticQuery', () => {\n    describe('constructor', () => {\n        test('constructor sets field and query correctly', () => {\n            const q = new SemanticQuery(\n                'inference_field',\n                'Best surfing places'\n            );\n\n            const expected = {\n                semantic: {\n                    field: 'inference_field',\n                    query: 'Best surfing places'\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n\n        test('empty constructor allows method chaining', () => {\n            const q = new SemanticQuery();\n            q.field('inference_field').query('Best surfing places');\n\n            const expected = {\n                semantic: {\n                    field: 'inference_field',\n                    query: 'Best surfing places'\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n    });\n\n    describe('options', () => {\n        test('field method sets field correctly', () => {\n            const q = new SemanticQuery();\n            q.field('title_semantic');\n\n            const expected = {\n                semantic: {\n                    field: 'title_semantic'\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n\n        test('query method sets query text correctly', () => {\n            const q = new SemanticQuery();\n            q.query('mountain lake');\n\n            const expected = {\n                semantic: {\n                    query: 'mountain lake'\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n\n        test('supports boost parameter', () => {\n            const q = new SemanticQuery('title_semantic', 'mountain lake');\n            q.boost(2);\n\n            const expected = {\n                semantic: {\n                    field: 'title_semantic',\n                    query: 'mountain lake',\n                    boost: 2\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n\n        test('overwriting field and query works correctly', () => {\n            const q = new SemanticQuery('old_field', 'old query');\n            q.field('new_field').query('new query');\n\n            const expected = {\n                semantic: {\n                    field: 'new_field',\n                    query: 'new query'\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n    });\n\n    describe('factory function', () => {\n        test('calls semantic query via esb factory function', () => {\n            const q = esb.semanticQuery(\n                'inference_field',\n                'Best surfing places'\n            );\n\n            const expected = {\n                semantic: {\n                    field: 'inference_field',\n                    query: 'Best surfing places'\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n\n        test('calls semantic query via esb factory function with chaining', () => {\n            const q = esb\n                .semanticQuery()\n                .field('semantic_field')\n                .query('shoes')\n                .boost(1.5);\n\n            const expected = {\n                semantic: {\n                    field: 'semantic_field',\n                    query: 'shoes',\n                    boost: 1.5\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/simple-query-string-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SimpleQueryStringQuery } from '../../src';\n\nconst getInstance = () => new SimpleQueryStringQuery();\n\ndescribe('SimpleQueryStringQuery', () => {\n    describe('options', () => {\n        test('sets flags option', () => {\n            const result = getInstance().flags('PREFIX|PHRASE').toJSON();\n            const expected = {\n                simple_query_string: {\n                    flags: 'PREFIX|PHRASE'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/span-containing-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SpanContainingQuery } from '../../src';\n\ndescribe('SpanContainingQuery', () => {\n    test('sets correct type', () => {\n        const value = new SpanContainingQuery().toJSON();\n        const expected = {\n            span_containing: {}\n        };\n        expect(value).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/queries-test/span-field-masking-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport {\n    SpanFieldMaskingQuery,\n    spanFieldMaskingQuery,\n    SpanTermQuery\n} from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst qry = new SpanTermQuery('text.stems', 'fox');\n\ndescribe('SpanFieldMaskingQuery', () => {\n    describe('options', () => {\n        test('sets query option', () => {\n            const result = spanFieldMaskingQuery().query(qry).toJSON();\n            const expected = {\n                field_masking_span: {\n                    query: recursiveToJSON(qry.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets field option', () => {\n            const result = spanFieldMaskingQuery().field('text').toJSON();\n            const expected = {\n                field_masking_span: {\n                    field: 'text'\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = new SpanFieldMaskingQuery('text', qry).toJSON();\n            const valueB = new SpanFieldMaskingQuery()\n                .field('text')\n                .query(qry)\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                field_masking_span: {\n                    query: { span_term: { 'text.stems': 'fox' } },\n                    field: 'text'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/span-first-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SpanFirstQuery, spanFirstQuery, SpanTermQuery } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst qry = new SpanTermQuery('text.stems', 'fox');\n\ndescribe('SpanFirstQuery', () => {\n    describe('parameter type validation', () => {\n        test('checks SpanQueryBase class for match', () => {\n            const instance = new SpanFirstQuery();\n            expect(() => instance.match(null)).toThrow(\n                new TypeError('Argument must be an instance of SpanQueryBase')\n            );\n            expect(() => instance.match(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of SpanQueryBase')\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets match option', () => {\n            const result = spanFirstQuery().match(qry).toJSON();\n            const expected = {\n                span_first: {\n                    match: recursiveToJSON(qry.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets end option', () => {\n            const result = spanFirstQuery().end(10).toJSON();\n            const expected = {\n                span_first: {\n                    end: 10\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets query', () => {\n            const valueA = new SpanFirstQuery(qry).toJSON();\n            const valueB = new SpanFirstQuery().match(qry).toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                span_first: {\n                    match: { span_term: { 'text.stems': 'fox' } }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/span-little-big-query-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SpanLittleBigQueryBase } from '../../src/queries/span-queries';\nimport { SpanTermQuery } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst getInstance = () => new SpanLittleBigQueryBase('my_qry_type');\nconst qry = new SpanTermQuery('text.stems', 'fox');\n\ndescribe('SpanLittleBigQueryBase', () => {\n    describe('parameter type validation', () => {\n        test('checks SpanQueryBase class for little', () => {\n            const instance = getInstance();\n            expect(() => instance.little(null)).toThrow(\n                new TypeError('Argument must be an instance of SpanQueryBase')\n            );\n            expect(() => instance.little(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of SpanQueryBase')\n            );\n        });\n\n        test('checks SpanQueryBase class for big', () => {\n            const instance = getInstance();\n            expect(() => instance.big(null)).toThrow(\n                new TypeError('Argument must be an instance of SpanQueryBase')\n            );\n            expect(() => instance.big(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of SpanQueryBase')\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets little option', () => {\n            const result = getInstance().little(qry).toJSON();\n            const expected = {\n                my_qry_type: {\n                    little: recursiveToJSON(qry.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets big option', () => {\n            const result = getInstance().big(qry).toJSON();\n            const expected = {\n                my_qry_type: {\n                    big: recursiveToJSON(qry.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/span-multi-term-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SpanMultiTermQuery, spanMultiTermQuery, PrefixQuery } from '../../src';\n\nconst qry = new PrefixQuery('user', 'ki');\n\ndescribe('SpanMultiTermQuery', () => {\n    describe('parameter type validation', () => {\n        test('checks MultiTermQueryBase class for match', () => {\n            const instance = new SpanMultiTermQuery();\n            expect(() => instance.match(null)).toThrow(\n                new TypeError(\n                    'Argument must be an instance of MultiTermQueryBase'\n                )\n            );\n            expect(() => instance.match(Object.create(null))).toThrow(\n                new TypeError(\n                    'Argument must be an instance of MultiTermQueryBase'\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets match option', () => {\n            const result = spanMultiTermQuery().match(qry).toJSON();\n            const expected = {\n                span_multi: {\n                    match: { prefix: { user: 'ki' } }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets query', () => {\n            const value = new SpanMultiTermQuery(qry).toJSON();\n            const expected = {\n                span_multi: {\n                    match: { prefix: { user: 'ki' } }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/span-near-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SpanNearQuery, spanNearQuery, SpanTermQuery } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst qry1 = new SpanTermQuery('field', 'value1');\nconst qry2 = new SpanTermQuery('field', 'value2');\n\ndescribe('SpanNearQuery', () => {\n    describe('parameter type validation', () => {\n        test('checks Array class for clauses', () => {\n            const instance = new SpanNearQuery();\n            expect(() => instance.clauses(null)).toThrow(\n                new TypeError('Argument must be an instance of Array')\n            );\n            expect(() => instance.clauses(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of Array')\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets clauses option', () => {\n            const result = spanNearQuery().clauses([qry1, qry2]).toJSON();\n            const expected = {\n                span_near: {\n                    clauses: [\n                        recursiveToJSON(qry1.toJSON()),\n                        recursiveToJSON(qry2.toJSON())\n                    ]\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets slop option', () => {\n            const result = spanNearQuery().slop(2).toJSON();\n            const expected = {\n                span_near: {\n                    slop: 2\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets in_order option', () => {\n            const result = spanNearQuery().inOrder(true).toJSON();\n            const expected = {\n                span_near: {\n                    in_order: true\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('validation', () => {\n        test('checks clauses type', () => {\n            expect(() => new SpanNearQuery().clauses([qry1, {}])).toThrow(\n                'Argument must be an instance of SpanQueryBase'\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/span-not-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SpanNotQuery, spanNotQuery, SpanTermQuery } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst qry = new SpanTermQuery('text.stems', 'fox');\n\ndescribe('SpanNotQuery', () => {\n    describe('parameter type validation', () => {\n        test('checks SpanQueryBase class for include', () => {\n            const instance = new SpanNotQuery();\n            expect(() => instance.include(null)).toThrow(\n                new TypeError('Argument must be an instance of SpanQueryBase')\n            );\n            expect(() => instance.include(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of SpanQueryBase')\n            );\n        });\n\n        test('checks SpanQueryBase class for exclude', () => {\n            const instance = new SpanNotQuery();\n            expect(() => instance.exclude(null)).toThrow(\n                new TypeError('Argument must be an instance of SpanQueryBase')\n            );\n            expect(() => instance.exclude(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of SpanQueryBase')\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets include option', () => {\n            const result = spanNotQuery().include(qry).toJSON();\n            const expected = {\n                span_not: {\n                    include: recursiveToJSON(qry.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets exclude option', () => {\n            const result = spanNotQuery().exclude(qry).toJSON();\n            const expected = {\n                span_not: {\n                    exclude: recursiveToJSON(qry.toJSON())\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets pre option', () => {\n            const result = spanNotQuery().pre(10).toJSON();\n            const expected = {\n                span_not: {\n                    pre: 10\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets post option', () => {\n            const result = spanNotQuery().post(10).toJSON();\n            const expected = {\n                span_not: {\n                    post: 10\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets dist option', () => {\n            const result = spanNotQuery().dist(10).toJSON();\n            const expected = {\n                span_not: {\n                    dist: 10\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/span-or-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SpanOrQuery, spanOrQuery, SpanTermQuery } from '../../src';\nimport { recursiveToJSON } from '../testutil/index.js';\n\nconst qry1 = new SpanTermQuery('field', 'value1');\nconst qry2 = new SpanTermQuery('field', 'value2');\n\ndescribe('SpanOrQuery', () => {\n    describe('parameter type validation', () => {\n        test('checks Array class for clauses', () => {\n            const instance = new SpanOrQuery();\n            expect(() => instance.clauses(null)).toThrow(\n                new TypeError('Argument must be an instance of Array')\n            );\n            expect(() => instance.clauses(Object.create(null))).toThrow(\n                new TypeError('Argument must be an instance of Array')\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets clauses option', () => {\n            const result = spanOrQuery().clauses([qry1, qry2]).toJSON();\n            const expected = {\n                span_or: {\n                    clauses: [\n                        recursiveToJSON(qry1.toJSON()),\n                        recursiveToJSON(qry2.toJSON())\n                    ]\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('validation', () => {\n        test('checks clauses type', () => {\n            expect(() => new SpanOrQuery().clauses([qry1, {}])).toThrow(\n                'Argument must be an instance of SpanQueryBase'\n            );\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/span-term-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SpanTermQuery } from '../../src';\n\ndescribe('SpanTermQuery', () => {\n    test('all in one', () => {\n        let valueA = new SpanTermQuery('user', 'kimchy').toJSON();\n        const valueB = new SpanTermQuery()\n            .field('user')\n            .value('kimchy')\n            .toJSON();\n        expect(valueA).toEqual(valueB);\n\n        let expected = {\n            span_term: {\n                user: 'kimchy'\n            }\n        };\n        expect(valueA).toEqual(expected);\n\n        valueA = new SpanTermQuery('user', 'kimchy').boost(2).toJSON();\n        expected = {\n            span_term: {\n                user: { value: 'kimchy', boost: 2 }\n            }\n        };\n        expect(valueA).toEqual(expected);\n    });\n\n    test('value is required', () => {\n        expect(() => new SpanTermQuery('user').toJSON()).toThrow(\n            new Error('Value is required for Span term query!')\n        );\n    });\n});\n"
  },
  {
    "path": "test/queries-test/span-within-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { SpanWithinQuery } from '../../src';\n\ndescribe('SpanWithinQuery', () => {\n    test('sets correct type', () => {\n        const value = new SpanWithinQuery().toJSON();\n        const expected = {\n            span_within: {}\n        };\n        expect(value).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/queries-test/sparse-vector-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport esb, { SparseVectorQuery } from '../../src';\n\ndescribe('SparseVectorQuery', () => {\n    describe('options', () => {\n        test('sets inference id and query', () => {\n            const q = new SparseVectorQuery();\n            q.field('my_field').inferenceId('model_id').query('my query');\n\n            const expected = {\n                sparse_vector: {\n                    field: 'my_field',\n                    inference_id: 'model_id',\n                    query: 'my query'\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n\n        test('sets vector as parameter', () => {\n            const q = new SparseVectorQuery();\n            q.field('my_field').queryVector({ a: 1, b: 2, c: 0.4 });\n\n            const expected = {\n                sparse_vector: {\n                    field: 'my_field',\n                    query_vector: {\n                        a: 1,\n                        b: 2,\n                        c: 0.4\n                    }\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n\n        test('sets pruning enabled', () => {\n            const q = new SparseVectorQuery();\n            q.field('my_field')\n                .inferenceId('model_id')\n                .query('my query')\n                .prune(true);\n\n            const expected = {\n                sparse_vector: {\n                    field: 'my_field',\n                    inference_id: 'model_id',\n                    query: 'my query',\n                    prune: true\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n\n        test('sets pruning config for only scoring pruned tokens', () => {\n            const q = new SparseVectorQuery();\n            q.field('my_field')\n                .inferenceId('model_id')\n                .query('my query')\n                .onlyScorePrunedTokens(true);\n\n            const expected = {\n                sparse_vector: {\n                    field: 'my_field',\n                    inference_id: 'model_id',\n                    query: 'my query',\n                    pruning_config: {\n                        only_score_pruned_tokens: true\n                    }\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n\n        test('sets pruning config for token weight threshold', () => {\n            const q = new SparseVectorQuery();\n            q.field('my_field')\n                .inferenceId('model_id')\n                .query('my query')\n                .tokensWeightThreshold(0.4);\n\n            const expected = {\n                sparse_vector: {\n                    field: 'my_field',\n                    inference_id: 'model_id',\n                    query: 'my query',\n                    pruning_config: {\n                        tokens_weight_threshold: 0.4\n                    }\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n\n        test('sets pruning config for token freq ratio threshold', () => {\n            const q = new SparseVectorQuery();\n            q.field('my_field')\n                .inferenceId('model_id')\n                .query('my query')\n                .tokensFreqRatioThreshold(5);\n\n            const expected = {\n                sparse_vector: {\n                    field: 'my_field',\n                    inference_id: 'model_id',\n                    query: 'my query',\n                    pruning_config: {\n                        tokens_freq_ratio_threshold: 5\n                    }\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n\n        test('sets pruning config for multiple elements', () => {\n            const q = new SparseVectorQuery();\n            q.field('my_field')\n                .inferenceId('model_id')\n                .query('my query')\n                .onlyScorePrunedTokens(true)\n                .tokensFreqRatioThreshold(5)\n                .tokensWeightThreshold(0.4)\n                .onlyScorePrunedTokens(false);\n\n            const expected = {\n                sparse_vector: {\n                    field: 'my_field',\n                    inference_id: 'model_id',\n                    query: 'my query',\n                    pruning_config: {\n                        tokens_freq_ratio_threshold: 5,\n                        tokens_weight_threshold: 0.4,\n                        only_score_pruned_tokens: false\n                    }\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n\n        test('calls sparse vector query via esb factory function', () => {\n            const q = esb.sparseVectorQuery('my_field').query('my query');\n\n            const expected = {\n                sparse_vector: {\n                    field: 'my_field',\n                    query: 'my query'\n                }\n            };\n            expect(q.toJSON()).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/term-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { TermQuery } from '../../src';\n\ndescribe('TermQuery', () => {\n    describe('all in one', () => {\n        test('all in one', () => {\n            const valueA = new TermQuery('user', 'kimchy');\n            const valueB = new TermQuery();\n\n            expect(() => valueB.toJSON()).toThrow(\n                new Error('Value is required for term level query!')\n            );\n\n            valueB.field('user').value('kimchy');\n            expect(valueA.toJSON()).toEqual(valueB.toJSON());\n\n            let expected = {\n                term: { user: 'kimchy' }\n            };\n            expect(valueA.toJSON()).toEqual(expected);\n\n            valueA.boost(2);\n            expected = {\n                term: { user: { value: 'kimchy', boost: 2 } }\n            };\n            expect(valueA.toJSON()).toEqual(expected);\n        });\n    });\n\n    describe('caseInsensitive', () => {\n        test('test caseInsensitive: default', () => {\n            const valueA = new TermQuery('my_field', 'my-value')\n                .caseInsensitive()\n                .toJSON();\n\n            const expected = {\n                term: {\n                    my_field: {\n                        value: 'my-value',\n                        case_insensitive: true\n                    }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n\n        test('test caseInsensitive: false', () => {\n            const valueA = new TermQuery('my_field', 'my-value')\n                .caseInsensitive(false)\n                .toJSON();\n\n            const expected = {\n                term: {\n                    my_field: {\n                        value: 'my-value',\n                        case_insensitive: false\n                    }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n\n        test('test caseInsensitive: true', () => {\n            const valueA = new TermQuery('my_field', 'my-value')\n                .caseInsensitive(true)\n                .toJSON();\n\n            const expected = {\n                term: {\n                    my_field: {\n                        value: 'my-value',\n                        case_insensitive: true\n                    }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/terms-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { TermsQuery } from '../../src';\n\nconst getInstance = () => new TermsQuery('my_field');\n\ndescribe('TermsQuery', () => {\n    describe('parameter type validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('values()', () => {\n                expect(() => getInstance().values(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n\n            test('termsLookup()', () => {\n                expect(() => getInstance().termsLookup(value)).toThrow(\n                    new TypeError('Argument must be an instance of Object')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets value option', () => {\n            const result = getInstance().value('my-value').toJSON();\n            const expected = {\n                terms: {\n                    my_field: ['my-value']\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets values option', () => {\n            const result = getInstance()\n                .values(['my-value-1', 'my-value-2'])\n                .toJSON();\n            const expected = {\n                terms: {\n                    my_field: ['my-value-1', 'my-value-2']\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets termsLookup option', () => {\n            const lookup = {\n                index: 'users',\n                type: 'user',\n                id: '2',\n                path: 'followers'\n            };\n            const result = getInstance().termsLookup(lookup).toJSON();\n            const expected = {\n                terms: {\n                    my_field: lookup\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets type option', () => {\n            const result = getInstance().type('user').toJSON();\n            const expected = {\n                terms: {\n                    my_field: { type: 'user' }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets index option', () => {\n            const result = getInstance().index('users').toJSON();\n            const expected = {\n                terms: {\n                    my_field: { index: 'users' }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets id option', () => {\n            const result = getInstance().id('2').toJSON();\n            const expected = {\n                terms: {\n                    my_field: { id: '2' }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets path option', () => {\n            const result = getInstance().path('followers').toJSON();\n            const expected = {\n                terms: {\n                    my_field: { path: 'followers' }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets routing option', () => {\n            const result = getInstance().routing('my_routing').toJSON();\n            const expected = {\n                terms: {\n                    my_field: { routing: 'my_routing' }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments with single value', () => {\n            const valueA = new TermsQuery('my_field', 'my-value').toJSON();\n            const valueB = new TermsQuery()\n                .field('my_field')\n                .value('my-value')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = { terms: { my_field: ['my-value'] } };\n            expect(valueA).toEqual(expected);\n        });\n\n        test('constructor sets arguments with array values', () => {\n            const valueA = new TermsQuery('my_field', [\n                'my-value-1',\n                'my-value-2'\n            ]).toJSON();\n            const valueB = new TermsQuery()\n                .field('my_field')\n                .values(['my-value-1', 'my-value-2'])\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                terms: { my_field: ['my-value-1', 'my-value-2'] }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/terms-set-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { TermsSetQuery } from '../../src';\n\nconst getInstance = () => new TermsSetQuery('my_field');\n\ndescribe('TermsSetQuery', () => {\n    describe('parameter type validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            {\n                name: 'throw TypeError for invalid parameter',\n                value: Object.create(null)\n            }\n        ])('$name', ({ value }) => {\n            test('terms()', () => {\n                expect(() => getInstance().terms(value)).toThrow(\n                    new TypeError('Argument must be an instance of Array')\n                );\n            });\n        });\n    });\n\n    describe('options', () => {\n        test('sets term option', () => {\n            const result = getInstance().term('my-value').toJSON();\n            const expected = {\n                terms_set: {\n                    my_field: {\n                        terms: ['my-value']\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets terms option', () => {\n            const result = getInstance()\n                .terms(['my-value-1', 'my-value-2'])\n                .toJSON();\n            const expected = {\n                terms_set: {\n                    my_field: {\n                        terms: ['my-value-1', 'my-value-2']\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets minimum_should_match_field option', () => {\n            const result = getInstance()\n                .minimumShouldMatchField('required_matches')\n                .toJSON();\n            const expected = {\n                terms_set: {\n                    my_field: {\n                        terms: [],\n                        minimum_should_match_field: 'required_matches'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets minimum_should_match_script option', () => {\n            const script = {\n                source: \"Math.min(params.num_terms, doc['required_matches'].value)\"\n            };\n            const result = getInstance()\n                .minimumShouldMatchScript(script)\n                .toJSON();\n            const expected = {\n                terms_set: {\n                    my_field: {\n                        terms: [],\n                        minimum_should_match_script: script\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments with single term', () => {\n            const valueA = new TermsSetQuery('my_field', 'my-value').toJSON();\n            const valueB = new TermsSetQuery()\n                .field('my_field')\n                .term('my-value')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                terms_set: {\n                    my_field: { terms: ['my-value'] }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n\n        test('constructor sets arguments with array terms', () => {\n            const valueA = new TermsSetQuery('my_field', [\n                'my-value-1',\n                'my-value-2'\n            ]).toJSON();\n            const valueB = new TermsSetQuery()\n                .field('my_field')\n                .terms(['my-value-1', 'my-value-2'])\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                terms_set: {\n                    my_field: {\n                        terms: ['my-value-1', 'my-value-2']\n                    }\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/type-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { TypeQuery } from '../../src';\n\ndescribe('TypeQuery', () => {\n    test('all in one', () => {\n        const valueA = new TypeQuery('my_type').toJSON();\n        let valueB = new TypeQuery().value('my_type').toJSON();\n        expect(valueA).toEqual(valueB);\n\n        valueB = new TypeQuery().type('my_type').toJSON();\n        expect(valueA).toEqual(valueB);\n\n        const expected = { type: { value: 'my_type' } };\n        expect(valueA).toEqual(expected);\n    });\n});\n"
  },
  {
    "path": "test/queries-test/weight-score-func.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { WeightScoreFunction } from '../../src';\n\ndescribe('WeightScoreFunction', () => {\n    describe('constructor', () => {\n        test('constructor sets weight', () => {\n            const valueA = new WeightScoreFunction(42).toJSON();\n            const valueB = new WeightScoreFunction().weight(42).toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = { weight: 42 };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/queries-test/wildcard-query.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { WildcardQuery } from '../../src';\n\nconst getInstance = () => new WildcardQuery('my_field', 'my-value');\n\ndescribe('WildcardQuery', () => {\n    describe('rewrite() validation', () => {\n        test.each([\n            {\n                name: 'accepts valid value: constant_score',\n                value: 'constant_score'\n            },\n            {\n                name: 'accepts valid value: scoring_boolean',\n                value: 'scoring_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_boolean',\n                value: 'constant_score_boolean'\n            },\n            {\n                name: 'accepts valid value: constant_score_filter',\n                value: 'constant_score_filter'\n            },\n            {\n                name: 'accepts valid value: top_terms_boost_23',\n                value: 'top_terms_boost_23'\n            },\n            { name: 'accepts valid value: top_terms_15', value: 'top_terms_15' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().rewrite(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_rewrite' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().rewrite(value)).toThrow();\n        });\n    });\n\n    describe('options', () => {\n        test('sets rewrite option', () => {\n            const result = getInstance().rewrite('constant_score').toJSON();\n            const expected = {\n                wildcard: {\n                    my_field: {\n                        value: 'my-value',\n                        rewrite: 'constant_score'\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n\n        test('sets case_insensitive option', () => {\n            const result = getInstance().caseInsensitive(true).toJSON();\n            const expected = {\n                wildcard: {\n                    my_field: {\n                        value: 'my-value',\n                        case_insensitive: true\n                    }\n                }\n            };\n            expect(result).toEqual(expected);\n        });\n    });\n\n    describe('constructor', () => {\n        test('constructor sets arguments', () => {\n            const valueA = getInstance().toJSON();\n            const valueB = new WildcardQuery()\n                .field('my_field')\n                .value('my-value')\n                .toJSON();\n            expect(valueA).toEqual(valueB);\n\n            const expected = {\n                wildcard: {\n                    my_field: 'my-value'\n                }\n            };\n            expect(valueA).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/recipes.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport * as esb from '../src';\n\ndescribe('recipes', () => {\n    describe('missingQuery', () => {\n        test('exports alias', () => {\n            expect(esb.recipes.missingQuery).toBe(esb.cookMissingQuery);\n        });\n\n        test('creates BoolQuery with must_not exists filter', () => {\n            const value = esb.recipes.missingQuery('my_field');\n            expect(value.constructor.name).toBe('BoolQuery');\n\n            const expected = {\n                bool: {\n                    must_not: { exists: { field: 'my_field' } }\n                }\n            };\n            expect(value.toJSON()).toEqual(expected);\n        });\n    });\n\n    // Table-driven test for parameter validation across recipes\n    describe('parameter validation', () => {\n        const recipes = [\n            { name: 'randomSortQuery', method: 'randomSortQuery' },\n            { name: 'filterQuery', method: 'filterQuery' }\n        ];\n\n        recipes.forEach(recipe => {\n            describe(recipe.name, () => {\n                test('throws TypeError for null query parameter', () => {\n                    expect(() => esb.recipes[recipe.method](null)).toThrow(\n                        TypeError\n                    );\n                    expect(() => esb.recipes[recipe.method](null)).toThrow(\n                        'Argument must be an instance of Query'\n                    );\n                });\n\n                test('throws TypeError for invalid query parameter type', () => {\n                    const invalidParam = Object.create(null);\n                    expect(() =>\n                        esb.recipes[recipe.method](invalidParam)\n                    ).toThrow(TypeError);\n                    expect(() =>\n                        esb.recipes[recipe.method](invalidParam)\n                    ).toThrow('Argument must be an instance of Query');\n                });\n            });\n        });\n    });\n\n    describe('randomSortQuery', () => {\n        test('exports alias', () => {\n            expect(esb.recipes.randomSortQuery).toBe(esb.cookRandomSortQuery);\n        });\n\n        test('creates FunctionScoreQuery with random_score function', () => {\n            const value = esb.recipes.randomSortQuery();\n            expect(value.constructor.name).toBe('FunctionScoreQuery');\n\n            const expected = {\n                function_score: {\n                    query: {\n                        match_all: {}\n                    },\n                    functions: [{ random_score: {} }]\n                }\n            };\n            expect(value.toJSON()).toEqual(expected);\n        });\n\n        test('creates FunctionScoreQuery with custom query and seed', () => {\n            const seed = Date.now();\n            const value = esb.recipes\n                .randomSortQuery(new esb.RangeQuery('age').gte(10), seed)\n                .toJSON();\n\n            const expected = {\n                function_score: {\n                    query: {\n                        range: { age: { gte: 10 } }\n                    },\n                    functions: [{ random_score: { seed } }]\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('filterQuery', () => {\n        test('exports alias', () => {\n            expect(esb.recipes.filterQuery).toBe(esb.cookFilterQuery);\n        });\n\n        test('creates BoolQuery with filter clause', () => {\n            const qry = new esb.TermQuery('status', 'active');\n            const value = esb.recipes.filterQuery(qry);\n            expect(value.constructor.name).toBe('BoolQuery');\n\n            const expected = {\n                bool: {\n                    filter: {\n                        term: { status: 'active' }\n                    }\n                }\n            };\n            expect(value.toJSON()).toEqual(expected);\n        });\n\n        test('creates BoolQuery with match_all query when includeMatchAll is true', () => {\n            const qry = new esb.TermQuery('status', 'active');\n            const value = esb.recipes.filterQuery(qry, true).toJSON();\n\n            const expected = {\n                bool: {\n                    must: { match_all: {} },\n                    filter: {\n                        term: { status: 'active' }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/suggesters-test/analyzed-suggester-base.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { AnalyzedSuggesterBase } from '../../src/suggesters';\n\nconst getInstance = (...args) =>\n    new AnalyzedSuggesterBase('my_type', 'my_suggester', ...args);\n\ndescribe('AnalyzedSuggesterBase', () => {\n    describe('constructor', () => {\n        test('sets txt', () => {\n            const value = getInstance('my_field', 'my-text').toJSON();\n            const expected = {\n                my_suggester: {\n                    text: 'my-text',\n                    my_type: {\n                        field: 'my_field'\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('text method', () => {\n        test('text can be set', () => {\n            const value = getInstance().text('my-text').toJSON();\n            const expected = {\n                my_suggester: {\n                    text: 'my-text',\n                    my_type: {}\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('options', () => {\n        test('sets analyzer', () => {\n            const value = getInstance().analyzer('snowball').toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    my_type: {\n                        analyzer: 'snowball'\n                    }\n                }\n            });\n        });\n\n        test('sets shardSize', () => {\n            const value = getInstance().shardSize(10).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    my_type: {\n                        shard_size: 10\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/suggesters-test/completion-suggester.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { CompletionSuggester } from '../../src';\n\nconst getInstance = () => new CompletionSuggester('my_suggester');\n\ndescribe('CompletionSuggester', () => {\n    describe('prefix method', () => {\n        test('prefix is set', () => {\n            const value = getInstance().prefix('nir').toJSON();\n            const expected = {\n                my_suggester: {\n                    prefix: 'nir',\n                    completion: {}\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('options', () => {\n        test('sets skipDuplicates', () => {\n            const value = getInstance().skipDuplicates(true).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    completion: {\n                        skip_duplicates: true\n                    }\n                }\n            });\n        });\n\n        test('sets fuzzy with boolean', () => {\n            const value = getInstance().fuzzy(true).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    completion: {\n                        fuzzy: true\n                    }\n                }\n            });\n        });\n\n        test('sets fuzziness', () => {\n            const value = getInstance().fuzziness(2).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    completion: {\n                        fuzzy: { fuzziness: 2 }\n                    }\n                }\n            });\n        });\n\n        test('sets transpositions', () => {\n            const value = getInstance().transpositions(true).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    completion: {\n                        fuzzy: { transpositions: true }\n                    }\n                }\n            });\n        });\n\n        test('sets minLength', () => {\n            const value = getInstance().minLength(2).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    completion: {\n                        fuzzy: { min_length: 2 }\n                    }\n                }\n            });\n        });\n\n        test('sets prefixLength', () => {\n            const value = getInstance().prefixLength(2).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    completion: {\n                        fuzzy: { prefix_length: 2 }\n                    }\n                }\n            });\n        });\n\n        test('sets unicodeAware', () => {\n            const value = getInstance().unicodeAware(true).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    completion: {\n                        fuzzy: { unicode_aware: true }\n                    }\n                }\n            });\n        });\n\n        test('sets regex flags', () => {\n            const value = getInstance().flags('ANYSTRING').toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    completion: {\n                        regex: { flags: 'ANYSTRING' }\n                    }\n                }\n            });\n        });\n\n        test('sets maxDeterminizedStates', () => {\n            const value = getInstance().maxDeterminizedStates(5000).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    completion: {\n                        regex: { max_determinized_states: 5000 }\n                    }\n                }\n            });\n        });\n\n        test('sets contexts', () => {\n            const value = getInstance()\n                .contexts('location', { lat: 43.662, lon: -79.38 })\n                .toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    completion: {\n                        contexts: {\n                            location: { lat: 43.662, lon: -79.38 }\n                        }\n                    }\n                }\n            });\n        });\n    });\n\n    describe('regex method', () => {\n        test('regex is set', () => {\n            const value = getInstance().regex('nir').toJSON();\n            const expected = {\n                my_suggester: {\n                    regex: 'nir',\n                    completion: {}\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('multiple options', () => {\n        test('multiple contexts can be set', () => {\n            const value = getInstance()\n                .contexts('location', { lat: 43.662, lon: -79.38 })\n                .contexts('place_type', ['cafe', 'restaurants'])\n                .toJSON();\n            const expected = {\n                my_suggester: {\n                    completion: {\n                        contexts: {\n                            location: { lat: 43.662, lon: -79.38 },\n                            place_type: ['cafe', 'restaurants']\n                        }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n\n        test('multiple fuzzy options can be set', () => {\n            const value = getInstance()\n                .fuzziness(2)\n                .transpositions(true)\n                .toJSON();\n            const expected = {\n                my_suggester: {\n                    completion: {\n                        fuzzy: {\n                            fuzziness: 2,\n                            transpositions: true\n                        }\n                    }\n                }\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n});\n"
  },
  {
    "path": "test/suggesters-test/direct-generator.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { DirectGenerator } from '../../src';\n\nconst getInstance = field => new DirectGenerator(field);\n\ndescribe('DirectGenerator', () => {\n    describe('constructor', () => {\n        test('sets field', () => {\n            const value = getInstance('my_field').toJSON();\n            const expected = {\n                field: 'my_field'\n            };\n            expect(value).toEqual(expected);\n        });\n    });\n\n    describe('suggestMode() validation', () => {\n        test.each([\n            { name: 'accepts valid value: missing', value: 'missing' },\n            {\n                name: 'accepts valid value: MISSING (case-insensitive)',\n                value: 'MISSING'\n            },\n            { name: 'accepts valid value: popular', value: 'popular' },\n            {\n                name: 'accepts valid value: POPULAR (case-insensitive)',\n                value: 'POPULAR'\n            },\n            { name: 'accepts valid value: always', value: 'always' },\n            {\n                name: 'accepts valid value: ALWAYS (case-insensitive)',\n                value: 'ALWAYS'\n            }\n        ])('$name', ({ value }) => {\n            expect(() =>\n                getInstance('my_field').suggestMode(value)\n            ).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_suggest_mode' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance('my_field').suggestMode(value)).toThrow(\n                new Error(\n                    \"The 'suggest_mode' parameter should be one of 'always', 'missing', 'popular'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets field', () => {\n            const value = getInstance('my_field').field('my_field').toJSON();\n            expect(value).toEqual({\n                field: 'my_field'\n            });\n        });\n\n        test('sets size', () => {\n            const value = getInstance('my_field').size(7).toJSON();\n            expect(value).toEqual({\n                field: 'my_field',\n                size: 7\n            });\n        });\n\n        test('sets suggestMode', () => {\n            const value = getInstance('my_field')\n                .suggestMode('always')\n                .toJSON();\n            expect(value).toEqual({\n                field: 'my_field',\n                suggest_mode: 'always'\n            });\n        });\n\n        test('sets maxEdits', () => {\n            const value = getInstance('my_field').maxEdits(3).toJSON();\n            expect(value).toEqual({\n                field: 'my_field',\n                max_edits: 3\n            });\n        });\n\n        test('sets prefixLength', () => {\n            const value = getInstance('my_field').prefixLength(3).toJSON();\n            expect(value).toEqual({\n                field: 'my_field',\n                prefix_length: 3\n            });\n        });\n\n        test('sets minWordLength', () => {\n            const value = getInstance('my_field').minWordLength(5).toJSON();\n            expect(value).toEqual({\n                field: 'my_field',\n                min_word_length: 5\n            });\n        });\n\n        test('sets maxInspections', () => {\n            const value = getInstance('my_field').maxInspections(4).toJSON();\n            expect(value).toEqual({\n                field: 'my_field',\n                max_inspections: 4\n            });\n        });\n\n        test('sets minDocFreq', () => {\n            const value = getInstance('my_field').minDocFreq(0.4).toJSON();\n            expect(value).toEqual({\n                field: 'my_field',\n                min_doc_freq: 0.4\n            });\n        });\n\n        test('sets maxTermFreq', () => {\n            const value = getInstance('my_field').maxTermFreq(1).toJSON();\n            expect(value).toEqual({\n                field: 'my_field',\n                max_term_freq: 1\n            });\n        });\n\n        test('sets preFilter', () => {\n            const value = getInstance('my_field').preFilter('reverse').toJSON();\n            expect(value).toEqual({\n                field: 'my_field',\n                pre_filter: 'reverse'\n            });\n        });\n\n        test('sets postFilter', () => {\n            const value = getInstance('my_field')\n                .postFilter('reverse')\n                .toJSON();\n            expect(value).toEqual({\n                field: 'my_field',\n                post_filter: 'reverse'\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/suggesters-test/phrase-suggester.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { PhraseSuggester, DirectGenerator } from '../../src';\n\nconst getInstance = () => new PhraseSuggester('my_suggester');\n\nconst dirGenA = new DirectGenerator('title.trigram').suggestMode('always');\nconst dirGenB = new DirectGenerator('title.reverse')\n    .suggestMode('always')\n    .preFilter('reverse')\n    .postFilter('reverse');\n\ndescribe('PhraseSuggester', () => {\n    describe('smoothing() validation', () => {\n        test.each([\n            {\n                name: 'accepts valid value: stupid_backoff',\n                value: 'stupid_backoff'\n            },\n            {\n                name: 'accepts valid value: STUPID_BACKOFF (case-insensitive)',\n                value: 'STUPID_BACKOFF'\n            },\n            { name: 'accepts valid value: laplace', value: 'laplace' },\n            {\n                name: 'accepts valid value: LAPLACE (case-insensitive)',\n                value: 'LAPLACE'\n            },\n            {\n                name: 'accepts valid value: linear_interpolation',\n                value: 'linear_interpolation'\n            },\n            {\n                name: 'accepts valid value: LINEAR_INTERPOLATION (case-insensitive)',\n                value: 'LINEAR_INTERPOLATION'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().smoothing(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_smoothing' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().smoothing(value)).toThrow(\n                new Error(\n                    \"The 'smoothing' parameter should be one of 'laplace', 'linear_interpolation', 'stupid_backoff'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets gramSize', () => {\n            const value = getInstance().gramSize(1).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    phrase: {\n                        gram_size: 1\n                    }\n                }\n            });\n        });\n\n        test('sets realWordErrorLikelihood', () => {\n            const value = getInstance().realWordErrorLikelihood(0.9).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    phrase: {\n                        real_word_error_likelihood: 0.9\n                    }\n                }\n            });\n        });\n\n        test('sets confidence', () => {\n            const value = getInstance().confidence(0).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    phrase: {\n                        confidence: 0\n                    }\n                }\n            });\n        });\n\n        test('sets maxErrors', () => {\n            const value = getInstance().maxErrors(10).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    phrase: {\n                        max_errors: 10\n                    }\n                }\n            });\n        });\n\n        test('sets separator', () => {\n            const value = getInstance().separator('|').toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    phrase: {\n                        separator: '|'\n                    }\n                }\n            });\n        });\n\n        test('sets highlight', () => {\n            const value = getInstance().highlight('<em>', '</em>').toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    phrase: {\n                        highlight: {\n                            pre_tag: '<em>',\n                            post_tag: '</em>'\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets collate', () => {\n            const value = getInstance()\n                .collate({\n                    query: {\n                        inline: {\n                            match: { '{{field_name}}': '{{suggestion}}' }\n                        }\n                    },\n                    params: { field_name: 'title' },\n                    prune: true\n                })\n                .toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    phrase: {\n                        collate: {\n                            query: {\n                                inline: {\n                                    match: {\n                                        '{{field_name}}': '{{suggestion}}'\n                                    }\n                                }\n                            },\n                            params: { field_name: 'title' },\n                            prune: true\n                        }\n                    }\n                }\n            });\n        });\n\n        test('sets smoothing', () => {\n            const value = getInstance().smoothing('stupid_backoff').toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    phrase: {\n                        smoothing: 'stupid_backoff'\n                    }\n                }\n            });\n        });\n\n        test('sets directGenerator with single generator', () => {\n            const value = getInstance().directGenerator(dirGenA).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    phrase: {\n                        direct_generator: [\n                            {\n                                field: 'title.trigram',\n                                suggest_mode: 'always'\n                            }\n                        ]\n                    }\n                }\n            });\n        });\n\n        test('sets directGenerator with array of generators', () => {\n            const value = getInstance()\n                .directGenerator([dirGenA, dirGenB])\n                .toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    phrase: {\n                        direct_generator: [\n                            {\n                                field: 'title.trigram',\n                                suggest_mode: 'always'\n                            },\n                            {\n                                field: 'title.reverse',\n                                suggest_mode: 'always',\n                                pre_filter: 'reverse',\n                                post_filter: 'reverse'\n                            }\n                        ]\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/suggesters-test/term-suggester.test.js",
    "content": "import { describe, test, expect } from 'vitest';\nimport { TermSuggester } from '../../src';\n\nconst getInstance = () => new TermSuggester('my_suggester');\n\ndescribe('TermSuggester', () => {\n    describe('sort() validation', () => {\n        test.each([\n            { name: 'accepts valid value: score', value: 'score' },\n            {\n                name: 'accepts valid value: SCORE (case-insensitive)',\n                value: 'SCORE'\n            },\n            { name: 'accepts valid value: frequency', value: 'frequency' },\n            {\n                name: 'accepts valid value: FREQUENCY (case-insensitive)',\n                value: 'FREQUENCY'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().sort(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_sort' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().sort(value)).toThrow(\n                new Error(\n                    \"The 'sort' parameter should be one of 'score' or 'frequency'\"\n                )\n            );\n        });\n    });\n\n    describe('suggestMode() validation', () => {\n        test.each([\n            { name: 'accepts valid value: missing', value: 'missing' },\n            {\n                name: 'accepts valid value: MISSING (case-insensitive)',\n                value: 'MISSING'\n            },\n            { name: 'accepts valid value: popular', value: 'popular' },\n            {\n                name: 'accepts valid value: POPULAR (case-insensitive)',\n                value: 'POPULAR'\n            },\n            { name: 'accepts valid value: always', value: 'always' },\n            {\n                name: 'accepts valid value: ALWAYS (case-insensitive)',\n                value: 'ALWAYS'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().suggestMode(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid_suggest_mode' }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().suggestMode(value)).toThrow(\n                new Error(\n                    \"The 'suggest_mode' parameter should be one of 'always', 'missing', 'popular'\"\n                )\n            );\n        });\n    });\n\n    describe('stringDistance() validation', () => {\n        test.each([\n            { name: 'accepts valid value: internal', value: 'internal' },\n            {\n                name: 'accepts valid value: INTERNAL (case-insensitive)',\n                value: 'INTERNAL'\n            },\n            {\n                name: 'accepts valid value: damerau_levenshtein',\n                value: 'damerau_levenshtein'\n            },\n            {\n                name: 'accepts valid value: DAMERAU_LEVENSHTEIN (case-insensitive)',\n                value: 'DAMERAU_LEVENSHTEIN'\n            },\n            { name: 'accepts valid value: levenstein', value: 'levenstein' },\n            {\n                name: 'accepts valid value: LEVENSTEIN (case-insensitive)',\n                value: 'LEVENSTEIN'\n            },\n            { name: 'accepts valid value: jarowinkler', value: 'jarowinkler' },\n            {\n                name: 'accepts valid value: JAROWINKLER (case-insensitive)',\n                value: 'JAROWINKLER'\n            },\n            { name: 'accepts valid value: ngram', value: 'ngram' },\n            {\n                name: 'accepts valid value: NGRAM (case-insensitive)',\n                value: 'NGRAM'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().stringDistance(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            {\n                name: 'throws for invalid value',\n                value: 'invalid_string_distance'\n            }\n        ])('$name', ({ value }) => {\n            expect(() => getInstance().stringDistance(value)).toThrow(\n                new Error(\n                    \"The 'string_distance' parameter should be one of 'damerau_levenshtein', 'internal', 'jarowinkler', 'levenstein', 'ngram'\"\n                )\n            );\n        });\n    });\n\n    describe('options', () => {\n        test('sets sort', () => {\n            const value = getInstance().sort('score').toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    term: {\n                        sort: 'score'\n                    }\n                }\n            });\n        });\n\n        test('sets suggestMode', () => {\n            const value = getInstance().suggestMode('always').toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    term: {\n                        suggest_mode: 'always'\n                    }\n                }\n            });\n        });\n\n        test('sets maxEdits', () => {\n            const value = getInstance().maxEdits(3).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    term: {\n                        max_edits: 3\n                    }\n                }\n            });\n        });\n\n        test('sets prefixLength', () => {\n            const value = getInstance().prefixLength(3).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    term: {\n                        prefix_length: 3\n                    }\n                }\n            });\n        });\n\n        test('sets minWordLength', () => {\n            const value = getInstance().minWordLength(5).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    term: {\n                        min_word_length: 5\n                    }\n                }\n            });\n        });\n\n        test('sets maxInspections', () => {\n            const value = getInstance().maxInspections(4).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    term: {\n                        max_inspections: 4\n                    }\n                }\n            });\n        });\n\n        test('sets minDocFreq', () => {\n            const value = getInstance().minDocFreq(0.4).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    term: {\n                        min_doc_freq: 0.4\n                    }\n                }\n            });\n        });\n\n        test('sets maxTermFreq', () => {\n            const value = getInstance().maxTermFreq(1).toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    term: {\n                        max_term_freq: 1\n                    }\n                }\n            });\n        });\n\n        test('sets stringDistance', () => {\n            const value = getInstance()\n                .stringDistance('damerau_levenshtein')\n                .toJSON();\n            expect(value).toEqual({\n                my_suggester: {\n                    term: {\n                        string_distance: 'damerau_levenshtein'\n                    }\n                }\n            });\n        });\n    });\n});\n"
  },
  {
    "path": "test/testing-guidelines.mdc",
    "content": "# Testing Guidelines\n\nThis document defines the testing standards for the elastic-builder project. Use it as context when writing or refactoring tests.\n\n## AVA to Vitest Migration\n\nThis section provides patterns and references for refactoring tests from AVA to Vitest.\n\n### Import Changes\n\n```javascript\n// ❌ AVA\nimport test from 'ava';\n\n// ✅ Vitest\nimport { describe, test, expect } from 'vitest';\n// Add beforeEach, afterEach, vi as needed\n```\n\n### Assertion Mapping\n\n| AVA | Vitest |\n|-----|--------|\n| `t.deepEqual(value, expected)` | `expect(value).toEqual(expected)` |\n| `t.is(value, expected)` | `expect(value).toBe(expected)` |\n| `t.true(value)` | `expect(value).toBe(true)` |\n| `t.false(value)` | `expect(value).toBe(false)` |\n| `t.truthy(value)` | `expect(value).toBeTruthy()` |\n| `t.falsy(value)` | `expect(value).toBeFalsy()` |\n| `t.throws(() => fn())` | `expect(() => fn()).toThrow()` |\n| `t.throws(() => fn(), TypeError)` | `expect(() => fn()).toThrow(TypeError)` |\n| `t.notThrows(() => fn())` | `expect(() => fn()).not.toThrow()` |\n| `const err = t.throws(fn); t.is(err.message, 'msg')` | `expect(fn).toThrow(new Error('msg'))` |\n\n### Eliminating Test Macros\n\nThe codebase uses AVA test macros in `_macros.js`. These must be eliminated and replaced with explicit inline tests. **Do not import from `_macros.js`** in refactored tests.\n\n#### Pattern 1: `setsAggType` Macro\n\n```javascript\n// ❌ AVA with macro\nimport { setsAggType } from '../_macros';\ntest(setsAggType, TermsAggregation, 'terms');\n\n// ✅ Vitest - explicit inline test\ntest('sets type as terms', () => {\n    const value = new TermsAggregation('my_agg').toJSON();\n    expect(value).toEqual({\n        my_agg: { terms: {} }\n    });\n});\n```\n\n#### Pattern 2: `validatedCorrectly` Macro\n\nThe `validatedCorrectly` macro tests valid values, case variations, and invalid values in one call with dynamic method invocation. Replace with split `test.each` blocks and explicit method calls.\n\n```javascript\n// ❌ AVA with macro (dynamic method calls, tests all case variants)\nimport { validatedCorrectly } from '../_macros';\ntest(validatedCorrectly, getInstance, 'collectMode', ['depth_first', 'breadth_first']);\n\n// ✅ Vitest - split into valid/invalid with explicit method calls\ndescribe('collectMode() validation', () => {\n    test.each([\n        { name: 'accepts valid value: depth_first', value: 'depth_first' },\n        { name: 'accepts valid value: DEPTH_FIRST (case-insensitive)', value: 'DEPTH_FIRST' },\n        { name: 'accepts valid value: breadth_first', value: 'breadth_first' },\n        { name: 'accepts valid value: BREADTH_FIRST (case-insensitive)', value: 'BREADTH_FIRST' }\n    ])('$name', ({ value }) => {\n        expect(() => getInstance().collectMode(value)).not.toThrow();\n    });\n\n    test.each([\n        { name: 'throws for null value', value: null },\n        { name: 'throws for invalid value', value: 'invalid_collect_mode' }\n    ])('$name', ({ value }) => {\n        expect(() => getInstance().collectMode(value)).toThrow(\n            new Error('`collect_mode` must be one of `depth_first`, `breadth_first`')\n        );\n    });\n});\n```\n\n#### Pattern 3: `makeSetsOptionMacro` / `setsOption` Factory\n\nThe `makeSetsOptionMacro` creates a macro that tests option setters. Replace with explicit individual tests.\n\n```javascript\n// ❌ AVA with makeSetsOptionMacro\nimport { makeSetsOptionMacro, nameTypeExpectStrategy } from '../_macros';\nconst setsOption = makeSetsOptionMacro(getInstance, nameTypeExpectStrategy('my_agg', 'terms'));\ntest(setsOption, 'showTermDocCountError', { param: true });\ntest(setsOption, 'collectMode', { param: 'breadth_first' });\n\n// ✅ Vitest - explicit tests grouped under describe\ndescribe('options', () => {\n    test('sets showTermDocCountError', () => {\n        const value = getInstance().showTermDocCountError(true).toJSON();\n        expect(value).toEqual({\n            my_agg: { terms: { show_term_doc_count_error: true } }\n        });\n    });\n\n    test('sets collectMode', () => {\n        const value = getInstance().collectMode('breadth_first').toJSON();\n        expect(value).toEqual({\n            my_agg: { terms: { collect_mode: 'breadth_first' } }\n        });\n    });\n});\n```\n\n#### Pattern 4: `illegalParamType` Macro\n\nThe `illegalParamType` macro tests that a method throws TypeError for null and invalid parameters using dynamic method calls. Replace with `describe.each` pattern.\n\n**IMPORTANT:** When testing multiple methods with the same validation, each method MUST have its own `test()` call. Never combine multiple method validations in a single test, even if they validate against the same values.\n\n```javascript\n// ❌ AVA with macro (dynamic method calls, separate type/message assertions)\nimport { illegalParamType } from '../_macros';\ntest(illegalParamType, getInstance(), 'filter', 'Query');\n\n// ❌ BAD - Multiple method calls in one test\ntest('checks Query class', () => {\n    expect(() => instance.positive(null)).toThrow(new TypeError('...'));\n    expect(() => instance.positive(Object.create(null))).toThrow(new TypeError('...'));\n    expect(() => instance.negative(null)).toThrow(new TypeError('...'));\n    expect(() => instance.negative(Object.create(null))).toThrow(new TypeError('...'));\n});\n\n// ✅ GOOD - describe.each with separate tests for each method\ndescribe('parameter validation', () => {\n    describe.each([\n        { name: 'throw TypeError for null parameter', value: null },\n        { name: 'throw TypeError for invalid parameter', value: Object.create(null) }\n    ])('$name', ({ value }) => {\n        test('filter()', () => {\n            expect(() => getInstance().filter(value)).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n        });\n        \n        test('query()', () => {\n            expect(() => getInstance().query(value)).toThrow(\n                new TypeError('Argument must be an instance of Query')\n            );\n        });\n    });\n});\n```\n\nThis pattern ensures:\n- Each method gets its own test for clear failure reporting\n- Test output shows exactly which method failed\n- Easier to debug and maintain\n- Follows the \"one test, one concern\" principle\n\n#### Pattern 5: `illegalCall` Macro\n\nThe `illegalCall` macro tests that a method cannot be called on an instance. Replace with explicit inline test.\n\n```javascript\n// ❌ AVA with macro\nimport { illegalCall } from '../_macros';\ntest(illegalCall, MyClass, 'unsupportedMethod', 'arg1', 'arg2');\n\n// ✅ Vitest - explicit inline test\ntest('unsupported_method cannot be set', () => {\n    expect(() => new MyClass('arg1', 'arg2').unsupportedMethod()).toThrow(\n        new Error('unsupportedMethod is not supported in MyClass')\n    );\n});\n```\n\n### Error Assertion Consolidation\n\nAVA tests often capture the error and assert on it separately. Consolidate into a single Vitest assertion:\n\n```javascript\n// ❌ AVA pattern - separate assertions\nlet err = t.throws(() => getInstance().method(null), Error);\nt.is(err.message, 'expected message');\n\nerr = t.throws(() => getInstance().method('invalid'), Error);\nt.is(err.message, 'expected message');\n\n// ✅ Vitest - single consolidated assertions\nexpect(() => getInstance().method(null)).toThrow(\n    new Error('expected message')\n);\nexpect(() => getInstance().method('invalid')).toThrow(\n    new Error('expected message')\n);\n```\n\n### Wrapping Tests in Describe Blocks\n\nAVA tests are often at the file's root level. Vitest tests should be wrapped in a `describe` block named after the class being tested:\n\n```javascript\n// ❌ AVA - tests at root level\ntest('can be instantiated', t => { /* ... */ });\ntest('sets option', t => { /* ... */ });\n\n// ✅ Vitest - wrapped in describe\ndescribe('ClassName', () => {\n    test('can be instantiated', () => { /* ... */ });\n\n    describe('options', () => {\n        test('sets option', () => { /* ... */ });\n    });\n});\n```\n\n### Test Naming Conventions\n\nTest names should be **clear and descriptive**. There is flexibility in naming style:\n\n**Acceptable patterns:**\n```javascript\n// Present tense (preferred for most cases)\ntest('sets option', () => { /* ... */ });\ntest('throws error for invalid input', () => { /* ... */ });\n\n// Descriptive with \"Should\" (acceptable for feature-focused tests)\ntest('Should set time for distance feature', () => { /* ... */ });\n\n// Assertion-style (acceptable)\ntest('can be instantiated', () => { /* ... */ });\ntest('serializes to correct DSL', () => { /* ... */ });\n```\n\n**Key principles:**\n- Be descriptive and specific\n- Avoid overly technical jargon\n- Use consistent style within a file\n- Test name should explain what behavior is being verified\n\nAll of the above patterns are acceptable. Choose the style that best communicates the test's intent.\n\n### Migration Checklist\n\nFor each test file being migrated:\n\n1. [ ] Replace import: `import test from 'ava'` → `import { describe, test, expect } from 'vitest'`\n2. [ ] Remove all `_macros` imports\n3. [ ] Remove the `t` parameter from all test callbacks\n4. [ ] Convert all assertions using the mapping table above\n5. [ ] Replace macro usages with explicit inline tests\n6. [ ] Wrap all tests under a `describe('ClassName')` block\n7. [ ] Group related tests into logical `describe` sub-blocks\n8. [ ] Split validation tests into valid/invalid `test.each` blocks\n9. [ ] Verify error messages are asserted (not just error types)\n10. [ ] Consolidate `t.throws` + `t.is(err.message)` into single `toThrow(new Error())` assertions\n11. [ ] Run tests to confirm behavior is preserved\n\n## Core Principles\n\n### 1. One Test, One Concern\nEach test should verify a single behavior or concern. Avoid testing multiple unrelated methods or behaviors in the same test.\n\n**❌ BAD - Multiple unrelated concerns:**\n```javascript\ntest('validation works', () => {\n    expect(() => instance.methodA(value)).toThrow(TypeError);\n    expect(() => instance.methodB(value)).toThrow(TypeError);\n});\n```\n\n**✅ GOOD - Separate tests for each method:**\n```javascript\ndescribe('validation', () => {\n    test('methodA()', () => {\n        expect(() => instance.methodA(value)).toThrow(TypeError);\n    });\n    \n    test('methodB()', () => {\n        expect(() => instance.methodB(value)).toThrow(TypeError);\n    });\n});\n```\n\n### 2. Keep Tests Simple and Explicit\n- No abstraction, no hidden logic\n- Test names must be static strings (never use template literals with variables)\n- No dynamic method calls - always call methods explicitly: `instance.method()` not `instance[tc.method]()`\n- No conditional logic in test bodies\n- **Prefer repetition over abstraction** - If similar tests exist for different methods or scenarios, it's better to repeat the test code than to abstract it into helper functions or dynamic patterns. Explicit, readable tests are more valuable than DRY test code.\n\n### 3. Use Table-Driven Tests Appropriately\nUse table-driven tests when:\n- The exact same method(s) are called in each test case\n- Only the input data varies between tests\n- The test structure is identical across cases\n\n### 4. Use Vitest's Built-in Tools\n- Use Vitest's `vi` for mocking - no external libraries like sinon\n- Use `test.each` for parameterized/table-driven tests\n- Use `expect.assertions()` in async tests to ensure all assertions run\n\n## Table-Driven Tests\n\n### Use `test.each` for Parameterized Tests\n\nAlways use Vitest's built-in `test.each` for table-driven tests. This is mandatory - do not use `forEach` loops.\n\n**Benefits of `test.each`:**\n- Better test output with clear naming\n- Built-in Vitest feature with first-class support\n- Cleaner, more declarative syntax\n- Improved IDE integration and debugging\n\n### Pattern 1: Multiple Methods with Same Input (Separate Tests)\n\nWhen multiple methods need to be tested with the same inputs but represent different concerns, use `describe.each` with nested tests:\n\n```javascript\ndescribe.each([\n    { name: 'throw TypeError for null parameter', value: null },\n    { name: 'throw TypeError for invalid parameter', value: Object.create(null) }\n])('$name', ({ value }) => {\n    test('methodA()', () => {\n        expect(() => instance.methodA(value)).toThrow(\n            new TypeError('Argument must be an instance of Type')\n        );\n    });\n\n    test('methodB()', () => {\n        expect(() => instance.methodB(value)).toThrow(\n            new TypeError('Argument must be an instance of Other')\n        );\n    });\n});\n```\n\n**Why this pattern:**\n- Each method gets its own test\n- Maintains clarity about what's being tested\n- Easier to debug when a specific method fails\n- Allows for method-specific assertions\n\n### Pattern 2: Multiple Methods Producing Same Result\n\nWhen testing that multiple methods produce identical output for the same input:\n\n```javascript\ndescribe.each([\n    { name: 'description of test scenario', input: inputValue, expected: expectedOutput }\n])('$name', ({ input, expected }) => {\n    test('methodA()', () => {\n        const value = instance.methodA(input).toJSON();\n        expect(value).toEqual(expected);\n    });\n\n    test('methodB()', () => {\n        const value = instance.methodB(input).toJSON();\n        expect(value).toEqual(expected);\n    });\n});\n```\n\n### Pattern 3: Same Test Logic, Different Inputs (Validation Tests)\n\nWhen testing a single method with multiple input variations, **split valid and invalid cases into separate `test.each` blocks** to avoid conditional logic and ensure error messages are verified:\n\n**❌ BAD - Conditional logic and no error message verification:**\n```javascript\ntest.each([\n    { name: 'accepts valid value: default', value: 'default', shouldThrow: false },\n    { name: 'accepts valid value: html', value: 'html', shouldThrow: false },\n    { name: 'throws for null value', value: null, shouldThrow: true },\n    { name: 'throws for invalid value', value: 'invalid_value', shouldThrow: true }\n])('$name', ({ value, shouldThrow }) => {\n    const fn = () => instance.method(value);\n    if (shouldThrow) {\n        expect(fn).toThrow();  // Doesn't verify the error message!\n    } else {\n        expect(fn).not.toThrow();\n    }\n});\n```\n\n**✅ GOOD - Separate blocks with error message verification:**\n```javascript\ndescribe('method() validation', () => {\n    // Valid values - test that they don't throw\n    test.each([\n        { name: 'accepts valid value: default', value: 'default' },\n        { name: 'accepts valid value: html', value: 'html' },\n        { name: 'accepts valid value: HTML (case-insensitive)', value: 'HTML' }\n    ])('$name', ({ value }) => {\n        expect(() => instance.method(value)).not.toThrow();\n    });\n\n    // Invalid values - test that they throw with specific error\n    test.each([\n        { name: 'throws for null value', value: null },\n        { name: 'throws for invalid value', value: 'invalid_value' }\n    ])('$name', ({ value }) => {\n        expect(() => instance.method(value)).toThrow(\n            new Error('`method` must be one of `default`, `html`')\n        );\n    });\n});\n```\n\n**Benefits of splitting:**\n- No conditional logic in test bodies\n- Error messages are verified (catches bugs where wrong error is thrown)\n- Clearer test intent\n- Easier to maintain\n\n### Case-Insensitive Value Testing\n\nWhen testing methods that accept case-insensitive enum values, test the lowercase version and **one** uppercase variant to demonstrate case-insensitivity. Testing all case variations (lowercase, UPPERCASE, MixedCase) is unnecessary.\n\n**✅ GOOD - Minimal case coverage:**\n```javascript\ntest.each([\n    { name: 'accepts valid value: plain', value: 'plain' },\n    { name: 'accepts valid value: PLAIN (case-insensitive)', value: 'PLAIN' },\n    { name: 'accepts valid value: postings', value: 'postings' },\n    { name: 'accepts valid value: POSTINGS (case-insensitive)', value: 'POSTINGS' }\n])('$name', ({ value }) => {\n    expect(() => instance.type(value)).not.toThrow();\n});\n```\n\n**❌ BAD - Excessive case variations:**\n```javascript\ntest.each([\n    { name: 'accepts value: plain (lowercase)', value: 'plain' },\n    { name: 'accepts value: PLAIN (uppercase)', value: 'PLAIN' },\n    { name: 'accepts value: Plain (title case)', value: 'Plain' },\n    { name: 'accepts value: pLaIn (mixed case)', value: 'pLaIn' }\n])('$name', ({ value }) => { /* ... */ });\n```\n\n**Key principle:** One uppercase test per enum value is sufficient to prove case-insensitivity.\n\n### When NOT to Use Table-Driven Tests\n\n**Critical Rule:** If different methods are called in each test, DO NOT use table-driven tests. Each method must be tested separately.\n\n❌ **Don't use table-driven tests when:**\n- **Different methods are tested per case** - Each method should have its own test\n- **Test structure differs between cases** - Keep tests separate if they follow different patterns\n- **You need conditional logic** - Any `if/else` in test body means tests should be split\n- **Methods would be called dynamically** - Never use `instance[tc.method]()`\n- **Subtle variations in setup or assertions** - Even small differences warrant separate tests\n- **Different fields or properties are being set** - e.g., `topLeft()` vs `bottomRight()` vs `top()` vs `left()`\n\n**Examples of tests that should NOT be table-driven:**\n\n```javascript\n// ❌ BAD - Different methods being called\ntest.each([\n    { name: 'sets topLeft', method: 'topLeft', point: pt1 },\n    { name: 'sets bottomRight', method: 'bottomRight', point: pt2 },\n    { name: 'sets top', method: 'top', value: 40.73 },\n    { name: 'sets left', method: 'left', value: -74.1 }\n])('$name', ({ method, point, value }) => {\n    // Would require conditional logic or dynamic method calls!\n});\n\n// ✅ GOOD - Separate tests for each method\ntest('sets topLeft option', () => {\n    const result = getInstance().topLeft(pt1).toJSON();\n    expect(result).toEqual({ geo_bounding_box: { my_field: { top_left: ... } } });\n});\n\ntest('sets bottomRight option', () => {\n    const result = getInstance().bottomRight(pt2).toJSON();\n    expect(result).toEqual({ geo_bounding_box: { my_field: { bottom_right: ... } } });\n});\n\ntest('sets top option', () => {\n    const result = getInstance().top(40.73).toJSON();\n    expect(result).toEqual({ geo_bounding_box: { my_field: { top: 40.73 } } });\n});\n```\n\n```javascript\n// ❌ BAD - Different setups and structures\ntest.each([\n    { name: 'time for distance feature', field: 'time', origin: 'now', pivot: '1h' },\n    { name: 'position for distance feature', field: 'location', origin: [-71.3, 41.15], pivot: '1000m' }\n])('$name', ({ field, origin, pivot }) => {\n    // origin is sometimes string, sometimes array - requires different handling\n});\n\n// ✅ GOOD - Separate tests for conceptually different scenarios\ntest('Should set time for distance feature', () => {\n    const value = new DistanceFeatureQuery('time').origin('now').pivot('1h').toJSON();\n    expect(value).toEqual({ distance_feature: { field: 'time', pivot: '1h', origin: 'now' } });\n});\n\ntest('Should set position for distance feature', () => {\n    const value = new DistanceFeatureQuery('location').origin([-71.3, 41.15]).pivot('1000m').toJSON();\n    expect(value).toEqual({ distance_feature: { field: 'location', pivot: '1000m', origin: [-71.3, 41.15] } });\n});\n```\n\n**When table-driven tests ARE appropriate:**\n- Testing the **exact same method** with different input values\n- Validation tests (valid values vs invalid values for **one method**)\n- Testing alias methods that should behave identically\n\n### Decision Tree\n\n1. **Are the exact same method(s) being called in each test case?**\n   - NO → Write individual tests\n   - YES → Continue to question 2\n\n2. **Do the methods represent different concerns?**\n   - YES → Use Pattern 1 (separate tests with nested describe)\n   - NO → Continue to question 3\n\n3. **Does only the input data vary between tests?**\n   - NO → Write individual tests\n   - YES → Use Pattern 2 or 3 depending on scenario\n\n## Identifying Refactoring Opportunities\n\n### Pattern 1: Repeated Tests for Same Method\n\n**❌ Should be consolidated:**\n```javascript\ndescribe('encoder() validation', () => {\n    test('accepts valid value: default', () => {\n        expect(() => highlight().encoder('default')).not.toThrow();\n    });\n    \n    test('accepts valid value: html', () => {\n        expect(() => highlight().encoder('html')).not.toThrow();\n    });\n    \n    test('throws for null value', () => {\n        expect(() => highlight().encoder(null)).toThrow();\n    });\n});\n```\n\n**Indicators:**\n- Multiple tests calling the same method\n- Each test differs only in the input value\n- Test names follow a repetitive pattern\n- Same assertion type across tests\n\n**✅ Refactored:**\nUse Pattern 3 from table-driven tests above.\n\n### Pattern 2: Similar Tests Across Multiple Methods\n\n**❌ Should be consolidated:**\n```javascript\ndescribe('method1() validation', () => {\n    test('accepts string', () => { /* ... */ });\n    test('accepts number', () => { /* ... */ });\n    test('throws for null', () => { /* ... */ });\n});\n\ndescribe('method2() validation', () => {\n    test('accepts string', () => { /* ... */ });\n    test('accepts number', () => { /* ... */ });\n    test('throws for null', () => { /* ... */ });\n});\n```\n\n**Indicators:**\n- Multiple describe blocks with similar structure\n- Same validation patterns repeated for different methods\n- Test names are nearly identical across describe blocks\n\n**✅ Refactored:**\nUse Pattern 1 from table-driven tests above.\n\n### Pattern 3: Case-Insensitive Value Testing\n\n**❌ Redundant:**\n```javascript\ntest('accepts value: html (lowercase)', () => { /* ... */ });\ntest('accepts value: HTML (uppercase)', () => { /* ... */ });\ntest('accepts value: Html (mixed)', () => { /* ... */ });\n```\n\n**Indicators:**\n- Multiple tests that only differ in the casing of the input\n- Test names explicitly mention \"lowercase\", \"uppercase\", \"mixed case\"\n\n**✅ Refactored:**\nConsolidate into a single table-driven test with case variations.\n\n### Quick Checklist\n\nAsk these questions when reviewing tests:\n\n1. **Repetition**: Do I see 3+ consecutive tests calling the same method?\n2. **Pattern matching**: Do the test names follow a template (e.g., \"accepts X\", \"throws for Y\")?\n3. **Input variation**: Is the only difference between tests the input value?\n4. **Assertion uniformity**: Are all tests using the same assertion pattern?\n5. **Single concern**: Does each test verify only one behavior/method?\n\nIf you answer \"yes\" to questions 1-4, the tests are likely candidates for table-driven refactoring.\n\n## Anti-Patterns\n\n### ❌ Dynamic Test Names\n```javascript\n// BAD\ntest(`method ${name}`, () => { /* ... */ });\n\n// GOOD - use test.each with $name interpolation\ntest.each([\n    { name: 'accepts valid type: point', value: 'point' }\n])('$name', ({ value }) => { /* ... */ });\n```\n\n### ❌ Dynamic Method Calls\n```javascript\n// BAD - method name is in a variable\ntest.each([\n    { name: 'test methodA', method: 'methodA', value: null },\n    { name: 'test methodB', method: 'methodB', value: null }\n])('$name', ({ method, value }) => {\n    expect(() => instance[method](value)).toThrow(TypeError);\n});\n\n// GOOD - each method is explicitly called\ndescribe.each([\n    { name: 'throws for null', value: null }\n])('$name', ({ value }) => {\n    test('methodA()', () => {\n        expect(() => instance.methodA(value)).toThrow(TypeError);\n    });\n    test('methodB()', () => {\n        expect(() => instance.methodB(value)).toThrow(TypeError);\n    });\n});\n```\n\n**Key principle:** You should be able to read the test and see exactly which method is being called without looking at the test data.\n\n### ❌ Testing Multiple Unrelated Concerns\n```javascript\n// BAD - tests two different methods in one test\ntest('both methods throw', () => {\n    expect(() => instance.methodA(value)).toThrow();\n    expect(() => instance.methodB(value)).toThrow();\n});\n\n// GOOD - separate test for each method\ndescribe('throw for invalid input', () => {\n    test('methodA()', () => {\n        expect(() => instance.methodA(value)).toThrow();\n    });\n    test('methodB()', () => {\n        expect(() => instance.methodB(value)).toThrow();\n    });\n});\n```\n\n### ❌ Setup Functions That Hide Logic\nDon't abstract away test logic into helper functions that make it unclear what's being tested.\n\n### ❌ External Fixture Objects\nDon't extract test data to module-scope fixture objects that separate the data from the test. This forces readers to scroll back and forth to understand the test.\n\n```javascript\n// BAD - fixture object separates data from test\nconst fixtures = {\n    'scenario A': { input: new Thing(), expected: { /* ... */ } },\n    'scenario B': { input: new OtherThing(), expected: { /* ... */ } }\n};\n\ndescribe.each([{ name: 'scenario A' }, { name: 'scenario B' }])('$name', ({ name }) => {\n    test('method()', () => {\n        const { input, expected } = fixtures[name];  // Where is this data? Have to scroll up!\n        expect(instance.method(input).toJSON()).toEqual(expected);\n    });\n});\n\n// GOOD - data is inline with the test\ndescribe('scenario A', () => {\n    test('method()', () => {\n        const input = new Thing();\n        const value = instance.method(input).toJSON();\n        expect(value).toEqual({ /* expected inline */ });\n    });\n});\n\ndescribe('scenario B', () => {\n    test('method()', () => {\n        const input = new OtherThing();\n        const value = instance.method(input).toJSON();\n        expect(value).toEqual({ /* expected inline */ });\n    });\n});\n```\n\n**Key principle:** Test data should be immediately visible when reading the test. Prefer duplicating setup over creating abstraction layers that hide the actual test inputs and outputs.\n\n### ❌ Conditional Logic in Tests\nKeep test bodies linear - avoid if/else statements when possible.\n\nWhen you find yourself needing conditional logic in a parameterized test, it's a sign that the test cases are too different and should be written as separate tests instead.\n\n```javascript\n// BAD - conditional logic based on test data\ntest.each([\n    { name: 'scenario A', scenario: 'a' },\n    { name: 'scenario B', scenario: 'b' }\n])('$name', ({ scenario }) => {\n    let instance;\n    if (scenario === 'a') {\n        instance = new Thing().methodA();\n    } else {\n        instance = new Thing().methodB();\n    }\n    expect(instance.toJSON()).toBeTruthy();\n});\n\n// GOOD - separate tests for different scenarios\ntest('scenario A', () => {\n    const instance = new Thing().methodA();\n    expect(instance.toJSON()).toBeTruthy();\n});\n\ntest('scenario B', () => {\n    const instance = new Thing().methodB();\n    expect(instance.toJSON()).toBeTruthy();\n});\n```\n\n### ❌ Dynamic Data in Test Cases\nTest case data should not contain functions, factory methods, or any executable code. However, **object instances as input values are allowed** when testing alias methods (see [Testing Alias Methods](#testing-alias-methods)).\n\n```javascript\n// ❌ BAD - factory functions in test data\ntest.each([\n    { name: 'scenario A', setup: () => new Thing().methodA() },\n    { name: 'scenario B', setup: () => new Thing().methodB() }\n])('$name', ({ setup }) => {\n    const instance = setup();\n    expect(instance.toJSON()).toBeTruthy();\n});\n\n// ❌ BAD - different classes/methods per test case (conditional behavior)\ntest.each([\n    { name: 'test 1', instance: new SomeClass() },\n    { name: 'test 2', instance: new OtherClass() }\n])('$name', ({ instance }) => {\n    expect(instance.method()).toBeTruthy();\n});\n\n// ✅ GOOD - write separate tests when setup differs\ntest('scenario A', () => {\n    const instance = new Thing().methodA();\n    expect(instance.toJSON()).toBeTruthy();\n});\n\ntest('scenario B', () => {\n    const instance = new Thing().methodB();\n    expect(instance.toJSON()).toBeTruthy();\n});\n\n// ✅ GOOD - object instances as shared input for alias method testing\ndescribe.each([\n    {\n        name: 'scenario A',\n        input: new TermsAggregation('users', 'user'),\n        expected: { /* ... */ }\n    },\n    {\n        name: 'scenario B',\n        input: new TermsAggregation('countries', 'country'),\n        expected: { /* ... */ }\n    }\n])('$name', ({ input, expected }) => {\n    test('aggregation()', () => {\n        const value = getInstance().aggregation(input).toJSON();\n        expect(value).toEqual(expected);\n    });\n\n    test('agg()', () => {\n        const value = getInstance().agg(input).toJSON();\n        expect(value).toEqual(expected);\n    });\n});\n```\n\n**Key distinctions:**\n- ❌ Functions/factories in test data → BAD (hides setup logic)\n- ❌ Different classes per test case → BAD (conditional behavior)\n- ✅ Same class instances as shared input for alias methods → OK (both methods use identical input)\n\n### ❌ External Mocking Libraries\nUse `vi`, not sinon or other external libraries.\n\n### ❌ Using `forEach` Instead of `test.each`\n```javascript\n// BAD - using forEach for parameterized tests\nconst cases = [{ name: 'test 1', value: 1 }, { name: 'test 2', value: 2 }];\ncases.forEach((tc) => {\n    test(tc.name, () => { /* ... */ });\n});\n\n// GOOD - use test.each\ntest.each([\n    { name: 'test 1', value: 1 },\n    { name: 'test 2', value: 2 }\n])('$name', ({ value }) => { /* ... */ });\n```\n\n### ❌ Async Tests Without `expect.assertions()`\n```javascript\n// BAD - no assertion count\ntest('fetches data', async () => {\n    const data = await fetchData();\n    expect(data).toBeTruthy();\n});\n\n// GOOD - explicit assertion count\ntest('fetches data', async () => {\n    expect.assertions(1);\n    const data = await fetchData();\n    expect(data).toBeTruthy();\n});\n```\n\n## Mocking with Vitest\n\nUse Vitest's built-in `vi` instead of external libraries.\n\n### Spy Cleanup with Lifecycle Hooks\n\nAlways use `beforeEach`/`afterEach` for spy setup and cleanup. This ensures spies are properly restored even if a test fails:\n\n```javascript\nimport { describe, test, expect, vi, beforeEach, afterEach } from 'vitest';\n\ndescribe('warning scenarios', () => {\n    let spy;\n\n    beforeEach(() => {\n        spy = vi.spyOn(console, 'warn').mockImplementation(() => {});\n    });\n\n    afterEach(() => {\n        spy.mockRestore();\n    });\n\n    test('logs warning for scenario A', () => {\n        // ... test code ...\n        expect(spy).toHaveBeenCalledTimes(2);\n        expect(spy).toHaveBeenNthCalledWith(1, 'expected message');\n    });\n\n    test('logs warning for scenario B', () => {\n        // ... test code ...\n        expect(spy).toHaveBeenCalledTimes(2);\n    });\n});\n```\n\n**❌ BAD - Manual cleanup that may not run on failure:**\n```javascript\ntest('logs warning', () => {\n    const spy = vi.spyOn(console, 'warn').mockImplementation(() => {});\n    \n    // ... test code that might throw ...\n    \n    spy.mockRestore(); // Won't run if test fails above!\n});\n```\n\n**Benefits:**\n- No external dependencies\n- Better API integration\n- Prevents console output during tests\n- Built-in TypeScript support\n- Automatic cleanup even on test failure\n\n## Error Assertions\n\n### Consolidated Error Checking\n\nWhen testing that a function throws with a specific error type and message, use a single assertion instead of two:\n\n**❌ BAD - Redundant execution:**\n```javascript\nexpect(() => func()).toThrow(TypeError);\nexpect(() => func()).toThrow('message');\n```\n\n**✅ GOOD - Single assertion with type and message:**\n```javascript\nexpect(() => func()).toThrow(new TypeError('message'));\n```\n\n**✅ ALSO GOOD - Using Error constructor name:**\n```javascript\nexpect(() => func()).toThrow(\n    expect.objectContaining({\n        name: 'TypeError',\n        message: 'Expected error message'\n    })\n);\n```\n\n### Pattern for Validation Error Tests\n\nWhen testing parameter validation across multiple methods with the same invalid inputs:\n\n```javascript\ndescribe.each([\n    { name: 'throw TypeError for null parameter', value: null },\n    { name: 'throw TypeError for invalid parameter', value: Object.create(null) }\n])('$name', ({ value }) => {\n    test('methodA()', () => {\n        expect(() => instance.methodA(value)).toThrow(\n            new TypeError('Argument must be an instance of TypeA')\n        );\n    });\n\n    test('methodB()', () => {\n        expect(() => instance.methodB(value)).toThrow(\n            new TypeError('Argument must be an instance of TypeB')\n        );\n    });\n});\n```\n\n## Async Testing\n\n### Mandatory: Use `expect.assertions()` in Async Tests\n\n**All async tests must include `expect.assertions(n)`** at the start to declare the expected number of assertions. This is mandatory because:\n- Prevents false positives when callbacks don't execute\n- Makes test intent explicit\n- Catches bugs where async code paths are skipped\n\n```javascript\ntest('callback is executed', async () => {\n    expect.assertions(2);\n    \n    await someAsyncOperation((data) => {\n        expect(data).toBeTruthy();\n        expect(data.value).toBe('expected');\n    });\n});\n```\n\n**❌ BAD - No assertion count (test could pass without assertions running):**\n```javascript\ntest('callback is executed', async () => {\n    await someAsyncOperation((data) => {\n        expect(data).toBeTruthy();\n    });\n});\n```\n\n**✅ GOOD - Explicit assertion count:**\n```javascript\ntest('callback is executed', async () => {\n    expect.assertions(1);\n    \n    await someAsyncOperation((data) => {\n        expect(data).toBeTruthy();\n    });\n});\n```\n\n### When to Use `expect.assertions()`\n\nUse in **all** of the following scenarios:\n1. Any test with `async`/`await`\n2. Tests with callbacks that contain assertions\n3. Tests with `.then()`/`.catch()` chains\n4. Tests using `Promise.all()` or similar patterns\n\n**Note:** For simple synchronous tests, `expect.assertions()` is optional but can still add clarity.\n\n## Test File Organization\n\n### Structure\n\nOrganize test files with a consistent structure. All tests should be grouped under `describe` blocks - avoid placing tests at the root level of the file.\n\n```javascript\ndescribe('ClassName', () => {\n    // 1. Constructor tests (if applicable)\n    describe('constructor', () => {\n        test('sets arguments', () => { /* ... */ });\n        test('throws error for invalid arguments', () => { /* ... */ });\n    });\n\n    // 2. Parameter validation tests (type checking)\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            { name: 'throw TypeError for invalid parameter', value: Object.create(null) }\n        ])('$name', ({ value }) => {\n            test('methodA()', () => { /* ... */ });\n            test('methodB()', () => { /* ... */ });\n        });\n    });\n\n    // 3. Validation tests for specific methods (enum values, ranges, etc.)\n    describe('methodName() validation', () => {\n        test.each([\n            { name: 'accepts valid value: foo', value: 'foo' },\n            { name: 'accepts valid value: FOO (case-insensitive)', value: 'FOO' }\n        ])('$name', ({ value }) => {\n            expect(() => instance.methodName(value)).not.toThrow();\n        });\n\n        test.each([\n            { name: 'throws for null value', value: null },\n            { name: 'throws for invalid value', value: 'invalid' }\n        ])('$name', ({ value }) => {\n            expect(() => instance.methodName(value)).toThrow(\n                new Error('`methodName` must be one of `foo`, `bar`')\n            );\n        });\n    });\n\n    // 4. Option setters / method behavior tests\n    describe('options', () => {\n        test('sets optionA', () => { /* ... */ });\n        test('sets optionB', () => { /* ... */ });\n    });\n\n    // 5. Specific feature tests\n    describe('feature name', () => {\n        test('does specific behavior', () => { /* ... */ });\n    });\n\n    // 6. Output/serialization tests (if applicable)\n    describe('toJSON', () => {\n        test('returns correct DSL', () => { /* ... */ });\n        test('throws error when required fields missing', () => { /* ... */ });\n    });\n});\n```\n\n### Grouping Guidelines\n\n1. **All tests under describe blocks** - Never place `test()` calls at the root level of a `describe('ClassName')` block. Group related tests into nested `describe` blocks.\n\n2. **Consistent ordering** - Follow this order when applicable:\n   - Constructor behavior\n   - Parameter validation (type checking)\n   - Method-specific validation (enums, ranges)\n   - Option setters\n   - Feature-specific behavior\n   - Output/serialization (`toJSON`)\n\n3. **Descriptive describe names** - Use names that clearly indicate what aspect is being tested:\n   - `'constructor'` - for constructor tests\n   - `'parameter validation'` - for type checking tests\n   - `'[methodName]() validation'` - for value validation on specific methods\n   - `'options'` - for simple option setters\n   - `'[feature name]'` - for specific feature behavior\n\n**❌ BAD - Mixed organization:**\n```javascript\ndescribe('KNN', () => {\n    test('can be instantiated', () => { /* ... */ });\n    \n    describe('filter method', () => {\n        test('adds single query', () => { /* ... */ });\n    });\n    \n    test('queryVector sets correctly', () => { /* ... */ });\n    \n    describe('option setters', () => { /* ... */ });\n});\n```\n\n**✅ GOOD - Consistent organization:**\n```javascript\ndescribe('KNN', () => {\n    describe('constructor', () => {\n        test('can be instantiated', () => { /* ... */ });\n        test('throws error if numCandidates is less than k', () => { /* ... */ });\n    });\n\n    describe('parameter validation', () => {\n        describe.each([\n            { name: 'throw TypeError for null parameter', value: null },\n            { name: 'throw TypeError for invalid parameter', value: 'not_a_query' }\n        ])('$name', ({ value }) => {\n            test('filter()', () => { /* ... */ });\n        });\n    });\n\n    describe('options', () => {\n        test('queryVector sets correctly', () => { /* ... */ });\n        test('boost sets correctly', () => { /* ... */ });\n    });\n\n    describe('filter method', () => {\n        test('adds single query correctly', () => { /* ... */ });\n        test('adds queries as array correctly', () => { /* ... */ });\n    });\n});\n```\n\n## Testing Alias Methods\n\nWhen a method has an alias (e.g., `agg()` is an alias for `aggregation()`), test both methods with the same logic. Use table-driven tests when you have multiple scenarios to test.\n\n### Pattern: Table-Driven Alias Method Tests\n\nWhen testing alias methods across multiple scenarios, use `describe.each` with the input and expected values inline in the test data. Object instances ARE allowed in test data for this specific pattern because:\n- Both alias methods need the **exact same input** (shared reference is correct)\n- The input/expected pairs are self-contained within the test data array\n- No external fixture lookups are required\n\n**✅ GOOD - Table-driven with inline object instances:**\n```javascript\ndescribe.each([\n    {\n        name: 'nested aggs',\n        input: new TermsAggregation('users', 'user'),\n        expected: {\n            my_agg: {\n                my_type: {},\n                aggs: {\n                    users: { terms: { field: 'user' } }\n                }\n            }\n        }\n    },\n    {\n        name: 'deep nested aggs',\n        input: new TermsAggregation('countries', 'artist.country')\n            .order('rock>playback_stats.avg', 'desc')\n            .agg(\n                new FilterAggregation('rock', new TermQuery('genre', 'rock'))\n                    .agg(new StatsAggregation('playback_stats', 'play_count'))\n            ),\n        expected: {\n            my_agg: {\n                my_type: {},\n                aggs: {\n                    countries: {\n                        terms: {\n                            field: 'artist.country',\n                            order: { 'rock>playback_stats.avg': 'desc' }\n                        },\n                        aggs: {\n                            rock: {\n                                filter: { term: { genre: 'rock' } },\n                                aggs: {\n                                    playback_stats: { stats: { field: 'play_count' } }\n                                }\n                            }\n                        }\n                    }\n                }\n            }\n        }\n    }\n])('$name', ({ input, expected }) => {\n    test('aggregation()', () => {\n        const value = getInstance().aggregation(input).toJSON();\n        expect(value).toEqual(expected);\n    });\n\n    test('agg()', () => {\n        const value = getInstance().agg(input).toJSON();\n        expect(value).toEqual(expected);\n    });\n});\n```\n\n**Why this works:**\n- Each scenario has its `input` and `expected` defined together (no external lookups)\n- Both alias methods share the same input instance (correct for alias testing)\n- Adding new scenarios is straightforward\n- Test output clearly shows which scenario and method failed\n\n### Single Scenario Pattern\n\nFor a single scenario with alias methods, define `input` and `expected` once within the describe block:\n\n**✅ GOOD - Shared input/expected within describe:**\n```javascript\ndescribe('multiple nested aggs', () => {\n    const input = [\n        new TermsAggregation('countries', 'country'),\n        new TermsAggregation('users', 'user')\n    ];\n    const expected = {\n        my_agg: {\n            my_type: {},\n            aggs: {\n                countries: { terms: { field: 'country' } },\n                users: { terms: { field: 'user' } }\n            }\n        }\n    };\n\n    test('aggregations()', () => {\n        const value = getInstance().aggregations(input).toJSON();\n        expect(value).toEqual(expected);\n    });\n\n    test('aggs()', () => {\n        const value = getInstance().aggs(input).toJSON();\n        expect(value).toEqual(expected);\n    });\n});\n```\n\n**❌ BAD - External fixtures object with name-based lookup:**\n```javascript\n// Don't do this - separates test data from the test via indirection\nconst fixtures = {\n    'nested aggs': { input: termsAgg, expected: { /* ... */ } }\n};\n\ndescribe.each([{ name: 'nested aggs' }])('$name', ({ name }) => {\n    test('aggregation()', () => {\n        const { input, expected } = fixtures[name];  // BAD: lookup by name\n        // ...\n    });\n});\n```\n\n**Key distinction:**\n- ✅ Object instances in `describe.each` array with `input`/`expected` inline → OK\n- ❌ Separate fixtures object with name-based lookup → BAD\n\n## Factory Functions for Instance Creation\n\nSimple factory functions like `getInstance()` are acceptable for reducing boilerplate when creating test instances, as long as they don't hide test logic or contain conditional setup.\n\n**✅ GOOD - Simple factory for common instance:**\n```javascript\nconst getInstance = (order) => new Sort('my_field', order);\n\ndescribe('Sort', () => {\n    test('sets nestedPath option', () => {\n        const result = getInstance().nestedPath('offer').toJSON();\n        expect(result).toEqual({ my_field: { nested_path: 'offer' } });\n    });\n});\n```\n\n**❌ BAD - Factory with hidden logic:**\n```javascript\n// BAD - contains conditional logic that should be in the test\nconst getInstance = (type) => {\n    if (type === 'complex') {\n        return new Sort('field').nestedPath('path').nestedFilter(query);\n    }\n    return new Sort('field');\n};\n```\n\n**Key principle:** Factory functions should only encapsulate simple, unconditional instance creation. Any setup that varies between tests should be explicit in the test itself.\n\n## Summary\n\nWhen writing or refactoring tests:\n\n### Core Testing Principles\n\n1. **One test, one concern** - Each test should verify a single method or behavior\n2. **Be explicit** - No dynamic method calls, no template literal test names, no conditional logic\n3. **Prefer repetition over abstraction** - Explicit, repeated tests are better than DRY abstractions\n4. **Use `test.each` for parameterized tests** - Never use `forEach` loops; always use Vitest's `test.each` or `describe.each`\n5. **No functions in test data** - No functions or factory methods in `test.each` arrays; object instances are allowed for alias method testing\n6. **Separate unrelated methods** - Even if they validate the same condition, give each method its own test\n7. **Use Vitest's `vi`** - No external mocking libraries\n8. **Use lifecycle hooks for spy cleanup** - Always use `beforeEach`/`afterEach` to ensure cleanup runs even on failure\n9. **Consolidate error assertions** - Use `toThrow(new TypeError('message'))` instead of separate type and message checks\n10. **Use `expect.assertions()` in async tests** - All async tests must declare expected assertion count\n11. **Consistent file organization** - Group all tests under `describe` blocks with consistent ordering\n12. **Split validation tests** - Separate valid and invalid value tests into distinct `test.each` blocks; always verify error messages\n13. **Minimal case-sensitivity testing** - One uppercase variant per enum value is sufficient to prove case-insensitivity\n14. **Test alias methods with table-driven tests** - Use `describe.each` with input/expected inline; object instances as input are allowed\n15. **Keep test data close to tests** - Never extract test data to module-scope fixture objects; inline data in each test\n16. **Simple factory functions are OK** - Use for instance creation, but don't hide conditional logic in them\n\n### AVA Migration Checklist\n\nWhen refactoring AVA tests to Vitest:\n\n1. **Replace imports** - `import test from 'ava'` → `import { describe, test, expect } from 'vitest'`\n2. **Remove `_macros` imports** - All macro usages must be replaced with explicit inline tests\n3. **Remove `t` parameter** - Test callbacks no longer receive the test context\n4. **Convert assertions** - Use the assertion mapping table (e.g., `t.deepEqual` → `expect().toEqual()`)\n5. **Eliminate macros** - Replace `test(macro, args)` patterns with explicit tests\n6. **Wrap in describe blocks** - All tests should be under a `describe('ClassName')` block\n7. **Consolidate error assertions** - Merge `t.throws` + `t.is(err.message)` into single `toThrow(new Error())` calls\n8. **Split validation tests** - Separate valid and invalid cases into distinct `test.each` blocks\n"
  },
  {
    "path": "test/testutil/index.js",
    "content": "/**\n * Test utility package for elastic-builder tests\n *\n * This package provides minimal, essential utilities for writing explicit Vitest tests.\n * We deliberately keep this minimal - most test logic should be explicit in the test files.\n */\n\n/**\n * Re-export recursiveToJSON from source for convenience in tests.\n * This is NOT a duplicate - it's a convenience re-export of the existing function.\n *\n * Use this to convert nested builder objects to JSON for assertions.\n *\n * @example\n * import { recursiveToJSON } from '../testutil/index.js';\n * const result = recursiveToJSON(queryBuilder.toJSON());\n */\nexport { recursiveToJSON } from '../../src/core/util.js';\n"
  },
  {
    "path": "test/typedef.test.ts",
    "content": "import * as esb from '../';\n\nnew esb.RequestBodySearch().query(new esb.MatchQuery('message', 'this is a test')).toJSON();\n\nesb\n    .requestBodySearch()\n    .query(\n        esb\n            .boolQuery()\n            .must(esb.matchQuery('last_name', 'smith'))\n            .filter(esb.rangeQuery('age').gt(30))\n    );\n\n// Multi Match Query\nesb\n    .requestBodySearch()\n    .query(\n        esb\n            .multiMatchQuery(['title', 'body'], 'Quick brown fox')\n            .type('best_fields')\n            .tieBreaker(0.3)\n            .minimumShouldMatch('30%')\n    );\n\n// Combined Fields Query\nesb\n    .requestBodySearch()\n    .query(\n        esb\n            .combinedFieldsQuery(['title', 'body'], 'Quick brown fox')\n            .operator('and')\n            .autoGenerateSynonymsPhraseQuery(true)\n            .zeroTermsQuery('all')\n    );\n\n// Combined Fields Query with single field\nesb\n    .requestBodySearch()\n    .query(\n        esb\n            .combinedFieldsQuery('title', 'Quick brown fox')\n            .field('description')\n            .fields(['tags', 'content^2'])\n    );\n\n// Combined Fields Query - class constructor\nnew esb.CombinedFieldsQuery(['title', 'content'], 'search terms')\n    .operator('or')\n    .autoGenerateSynonymsPhraseQuery(false)\n    .toJSON();\n\n// Percolate Query\nesb\n    .requestBodySearch()\n    .query(\n        esb\n            .percolateQuery('query', 'people')\n            .document({ name: 'Will Smith' })\n            .documents([{ name: 'Willow Smith'}, { name: 'Jaden Smith' }])\n    );\n\n// Aggregation\nesb.requestBodySearch().size(0).agg(esb.termsAggregation('popular_colors', 'color'));\n\n// Nested Aggregation\nesb\n    .requestBodySearch()\n    .size(0)\n    .agg(\n        esb\n            .termsAggregation('colors', 'color')\n            .agg(esb.avgAggregation('avg_price', 'price'))\n            .agg(esb.termsAggregation('make', 'make'))\n    );\n\nnew esb.TermsAggregation('countries', 'artist.country')\n    .order('rock>playback_stats.avg', 'desc')\n    .agg(\n        new esb.FilterAggregation('rock', new esb.TermQuery('genre', 'rock')).agg(\n            new esb.StatsAggregation('playback_stats', 'play_count')\n        )\n    )\n    .toJSON();\n\n// Sort\nesb\n    .requestBodySearch()\n    .query(esb.boolQuery().filter(esb.termQuery('message', 'test')))\n    .sort(esb.sort('timestamp', 'desc'))\n    .sorts([\n        esb.sort('channel', 'desc'),\n        esb.sort('categories', 'desc'),\n        // The order defaults to desc when sorting on the _score,\n        // and defaults to asc when sorting on anything else.\n        esb.sort('content'),\n        esb.sort('price').order('desc').mode('avg')\n    ]);\n\n// From / size\nesb.requestBodySearch().query(esb.matchAllQuery()).size(5).from(10);\n\nesb.recipes.filterQuery(esb.matchQuery('message', 'this is a test'))\n\n"
  },
  {
    "path": "vitest.config.js",
    "content": "import { defineConfig } from 'vitest/config';\n\nexport default defineConfig({\n    test: {\n        // Test file patterns\n        include: ['test/**/*.test.js'],\n\n        // Exclude TypeScript typedef test (handled by tsc)\n        exclude: ['test/typedef.test.ts', '**/node_modules/**'],\n\n        // Enable coverage\n        coverage: {\n            // Coverage reporters: html for viewing, lcov for Coveralls, text for console\n            reporter: ['html', 'lcov', 'text'],\n\n            // Coverage output directory\n            reportsDirectory: './coverage',\n\n            // Include source files\n            include: ['src/**/*.js'],\n\n            // Exclude non-source files and files with v8 coverage instrumentation issues\n            exclude: [\n                'src/index.d.ts',\n                'src/core/inspect.js',\n                'src/core/util.js',\n                'src/_/index.js',\n                '**/*.test.js',\n                '**/node_modules/**'\n            ]\n        },\n\n        // Enable watch mode support for local development\n        watch: false,\n\n        // Use Node.js environment (not browser)\n        environment: 'node',\n\n        // Globals (using explicit imports instead)\n        globals: false,\n\n        // Silent mode: suppress console output during tests\n        // Console output will only be shown for failing tests\n        silent: false,\n\n        // Only show console output on test failures\n        onConsoleLog(log, type) {\n            // Return false to suppress the log\n            // This will be overridden to show logs when tests fail\n            return false;\n        },\n\n        // Use single-threaded execution to avoid worker serialization issues\n        pool: 'threads',\n        poolOptions: {\n            threads: {\n                singleThread: true\n            }\n        },\n\n        // Disable file parallelism to avoid worker issues\n        fileParallelism: false,\n\n        // Limit test timeout\n        testTimeout: 10000,\n\n        // Limit hook timeout\n        hookTimeout: 10000\n    }\n});\n"
  },
  {
    "path": "webpack.config.js",
    "content": "'use strict';\n\nconst TerserPlugin = require('terser-webpack-plugin');\n\nmodule.exports = {\n    output: {\n        library: {\n            name: 'esb',\n            type: 'umd'\n        }\n    },\n    optimization: {\n        minimizer: [\n            new TerserPlugin({\n                terserOptions: {\n                    format: {\n                        beautify: false,\n                        comments: false\n                    },\n                    mangle: {\n                        toplevel: true,\n                        keep_fnames: false\n                    },\n                    compress: {\n                        warnings: false,\n                        conditionals: true,\n                        unused: true,\n                        comparisons: true,\n                        sequences: true,\n                        dead_code: true,\n                        evaluate: true,\n                        if_return: true,\n                        join_vars: true,\n                        negate_iife: false\n                    }\n                }\n            })\n        ]\n    }\n};\n"
  }
]