[
  {
    "path": ".credo.exs",
    "content": "# This file contains the configuration for Credo and you are probably reading\n# this after creating it with `mix credo.gen.config`.\n#\n# If you find anything wrong or unclear in this file, please report an\n# issue on GitHub: https://github.com/rrrene/credo/issues\n#\n%{\n  #\n  # You can have as many configs as you like in the `configs:` field.\n  configs: [\n    %{\n      #\n      # Run any config using `mix credo -C <name>`. If no config name is given\n      # \"default\" is used.\n      #\n      name: \"default\",\n      #\n      # These are the files included in the analysis:\n      files: %{\n        #\n        # You can give explicit globs or simply directories.\n        # In the latter case `**/*.{ex,exs}` will be used.\n        #\n        included: [\n          \"lib/\",\n          \"src/\",\n          \"test/\"\n        ],\n        excluded: [~r\"/_build/\", ~r\"/deps/\", ~r\"/node_modules/\"]\n      },\n      #\n      # Load and configure plugins here:\n      #\n      plugins: [],\n      #\n      # If you create your own checks, you must specify the source files for\n      # them here, so they can be loaded by Credo before running the analysis.\n      #\n      requires: [],\n      #\n      # If you want to enforce a style guide and need a more traditional linting\n      # experience, you can change `strict` to `true` below:\n      #\n      strict: false,\n      #\n      # To modify the timeout for parsing files, change this value:\n      #\n      parse_timeout: 5000,\n      #\n      # If you want to use uncolored output by default, you can change `color`\n      # to `false` below:\n      #\n      color: true,\n      #\n      # You can customize the parameters of any check by adding a second element\n      # to the tuple.\n      #\n      # To disable a check put `false` as second element:\n      #\n      #     {Credo.Check.Design.DuplicatedCode, false}\n      #\n      checks: [\n        #\n        ## Consistency Checks\n        #\n        {Credo.Check.Consistency.ExceptionNames, []},\n        {Credo.Check.Consistency.LineEndings, []},\n        {Credo.Check.Consistency.ParameterPatternMatching, []},\n        {Credo.Check.Consistency.SpaceAroundOperators, []},\n        {Credo.Check.Consistency.SpaceInParentheses, []},\n        {Credo.Check.Consistency.TabsOrSpaces, []},\n\n        #\n        ## Design Checks\n        #\n        # You can customize the priority of any check\n        # Priority values are: `low, normal, high, higher`\n        #\n        {Credo.Check.Design.AliasUsage,\n         [priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]},\n        # You can also customize the exit_status of each check.\n        # If you don't want TODO comments to cause `mix credo` to fail, just\n        # set this value to 0 (zero).\n        #\n        {Credo.Check.Design.TagTODO, [exit_status: 2]},\n        {Credo.Check.Design.TagFIXME, []},\n\n        #\n        ## Readability Checks\n        #\n        {Credo.Check.Readability.AliasOrder, []},\n        {Credo.Check.Readability.FunctionNames, []},\n        {Credo.Check.Readability.LargeNumbers, []},\n        {Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]},\n        {Credo.Check.Readability.ModuleAttributeNames, []},\n        {Credo.Check.Readability.ModuleDoc, false},\n        {Credo.Check.Readability.ModuleNames, []},\n        {Credo.Check.Readability.ParenthesesInCondition, []},\n        {Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},\n        {Credo.Check.Readability.PredicateFunctionNames, []},\n        {Credo.Check.Readability.PreferImplicitTry, []},\n        {Credo.Check.Readability.RedundantBlankLines, []},\n        {Credo.Check.Readability.Semicolons, []},\n        {Credo.Check.Readability.SpaceAfterCommas, []},\n        {Credo.Check.Readability.StringSigils, []},\n        {Credo.Check.Readability.TrailingBlankLine, []},\n        {Credo.Check.Readability.TrailingWhiteSpace, []},\n        {Credo.Check.Readability.UnnecessaryAliasExpansion, []},\n        {Credo.Check.Readability.VariableNames, []},\n\n        #\n        ## Refactoring Opportunities\n        #\n        {Credo.Check.Refactor.CondStatements, []},\n        {Credo.Check.Refactor.CyclomaticComplexity, []},\n        {Credo.Check.Refactor.FunctionArity, []},\n        {Credo.Check.Refactor.LongQuoteBlocks, []},\n        # {Credo.Check.Refactor.MapInto, []},\n        {Credo.Check.Refactor.MatchInCondition, []},\n        {Credo.Check.Refactor.NegatedConditionsInUnless, []},\n        {Credo.Check.Refactor.NegatedConditionsWithElse, []},\n        {Credo.Check.Refactor.Nesting, []},\n        {Credo.Check.Refactor.UnlessWithElse, []},\n        {Credo.Check.Refactor.WithClauses, []},\n\n        #\n        ## Warnings\n        #\n        {Credo.Check.Warning.ApplicationConfigInModuleAttribute, []},\n        {Credo.Check.Warning.BoolOperationOnSameValues, []},\n        {Credo.Check.Warning.ExpensiveEmptyEnumCheck, []},\n        {Credo.Check.Warning.IExPry, []},\n        {Credo.Check.Warning.IoInspect, []},\n        # {Credo.Check.Warning.LazyLogging, []},\n        {Credo.Check.Warning.MixEnv, false},\n        {Credo.Check.Warning.OperationOnSameValues, []},\n        {Credo.Check.Warning.OperationWithConstantResult, []},\n        {Credo.Check.Warning.RaiseInsideRescue, []},\n        {Credo.Check.Warning.UnusedEnumOperation, []},\n        {Credo.Check.Warning.UnusedFileOperation, []},\n        {Credo.Check.Warning.UnusedKeywordOperation, []},\n        {Credo.Check.Warning.UnusedListOperation, []},\n        {Credo.Check.Warning.UnusedPathOperation, []},\n        {Credo.Check.Warning.UnusedRegexOperation, []},\n        {Credo.Check.Warning.UnusedStringOperation, []},\n        {Credo.Check.Warning.UnusedTupleOperation, []},\n        {Credo.Check.Warning.UnsafeExec, []},\n\n        #\n        # Checks scheduled for next check update (opt-in for now, just replace `false` with `[]`)\n\n        #\n        # Controversial and experimental checks (opt-in, just replace `false` with `[]`)\n        #\n        {Credo.Check.Consistency.MultiAliasImportRequireUse, false},\n        {Credo.Check.Consistency.UnusedVariableNames, false},\n        {Credo.Check.Design.DuplicatedCode, false},\n        {Credo.Check.Readability.AliasAs, false},\n        {Credo.Check.Readability.BlockPipe, false},\n        {Credo.Check.Readability.ImplTrue, false},\n        {Credo.Check.Readability.MultiAlias, false},\n        {Credo.Check.Readability.SeparateAliasRequire, false},\n        {Credo.Check.Readability.SinglePipe, false},\n        {Credo.Check.Readability.Specs, false},\n        {Credo.Check.Readability.StrictModuleLayout, false},\n        {Credo.Check.Readability.WithCustomTaggedTuple, false},\n        {Credo.Check.Refactor.ABCSize, false},\n        {Credo.Check.Refactor.AppendSingleItem, false},\n        {Credo.Check.Refactor.DoubleBooleanNegation, false},\n        {Credo.Check.Refactor.ModuleDependencies, false},\n        {Credo.Check.Refactor.NegatedIsNil, false},\n        {Credo.Check.Refactor.PipeChainStart, false},\n        {Credo.Check.Refactor.VariableRebinding, false},\n        {Credo.Check.Warning.LeakyEnvironment, false},\n        {Credo.Check.Warning.MapGetUnsafePass, false},\n        {Credo.Check.Warning.UnsafeToAtom, false}\n\n        #\n        # Custom checks can be created using `mix credo.gen.check`.\n        #\n      ]\n    }\n  ]\n}\n"
  },
  {
    "path": ".formatter.exs",
    "content": "# Used by \"mix format\"\n[\n  inputs: [\"{mix,.formatter}.exs\", \"{config,lib,test}/**/*.{ex,exs}\"]\n]\n"
  },
  {
    "path": ".github/FUNDING.yml",
    "content": "# These are supported funding model platforms\n\ngithub: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]\npatreon: atandarash\nopen_collective: # Replace with a single Open Collective username\nko_fi: # Replace with a single Ko-fi username\ntidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel\ncommunity_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry\nliberapay: # Replace with a single Liberapay username\nissuehunt: # Replace with a single IssueHunt username\notechie: # Replace with a single Otechie username\nlfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry\ncustom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']\n"
  },
  {
    "path": ".github/pull_request_template.md",
    "content": "## Overview\n\n_Write a short description of what your PR does_\n\n## Related Issues\n\n_List all related issues. Add linking prefixes (closes, fixes, resolves, etc.)_\n\n## TODO\n\n_Write down what steps need to be done for this PR._\n\n- [ ] Update PR to include task link\n- [ ] Add error handling\n- [ ] Add Loom video demo\n- [ ] GitHub Actions are all passing\n- [ ] Ensure your PR has been reviewed and you have also implemented all feedback requested\n- [ ] Update PR label to the right stage\n\n## Testing\n\n### How to test:\n\n_Write down steps needed, if any, to test your PR locally in case the preview links do not work_\n\n1. Create an index\n2. Add these documents\n   ...\n\n### What to test:\n\n_Write down a checklist for others to copy and tick when testing your PR_\n\n- [ ] Searching for \"me\" or other variations works\n"
  },
  {
    "path": ".github/workflows/dialyzer.yml",
    "content": "name: Dialyzer\n\non: push\n\njobs:\n  analyze:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v2\n      - uses: erlef/setup-beam@v1\n        id: beam\n        with:\n          otp-version: \"24\"\n          elixir-version: \"1.12\"\n\n      - name: Restore Cached Dependencies\n        uses: actions/cache@v2\n        id: mix-cache\n        with:\n          path: |\n            deps\n            _build\n          key: ${{ runner.os }}-${{ steps.beam.outputs.elixir-version }}-${{ steps.beam.outputs.otp-version }}-${{ hashFiles('mix.lock') }}\n\n      - name: Restore PLT cache\n        uses: actions/cache@v2\n        id: plt-cache\n        with:\n          key: |\n            ${{ runner.os }}-${{ steps.beam.outputs.elixir-version }}-${{ steps.beam.outputs.otp-version }}-plt\n          restore-keys: |\n            ${{ runner.os }}-${{ steps.beam.outputs.elixir-version }}-${{ steps.beam.outputs.otp-version }}-plt\n          path: |\n            priv/plts\n\n      - name: Install Dependencies\n        if: steps.mix-cache.outputs.cache-hit != 'true'\n        run: |\n          mix deps.get\n\n      - name: Create PLTs\n        if: steps.plt-cache.outputs.cache-hit != 'true'\n        run: mix dialyzer --plt\n\n      - name: Run dialyzer\n        run: mix dialyzer\n"
  },
  {
    "path": ".github/workflows/test.yml",
    "content": "name: Test\n\non: push\n\nenv:\n  MIX_ENV: test\n  GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n\njobs:\n  test:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v2\n      - uses: erlef/setup-beam@v1\n        id: beam\n        with:\n          otp-version: \"24\"\n          elixir-version: \"1.12\"\n\n      - name: Restore Cached Dependencies\n        uses: actions/cache@v2\n        id: mix-cache\n        with:\n          path: |\n            deps\n            _build\n          key: ${{ runner.os }}-${{ steps.beam.outputs.elixir-version }}-${{ steps.beam.outputs.otp-version }}-${{ hashFiles('mix.lock') }}\n\n      - name: Install Dependencies\n        if: steps.mix-cache.outputs.cache-hit != 'true'\n        run: |\n          mix deps.get\n\n      - name: Check Codebase Standard\n        run: |\n          mix format --check-formatted\n          mix credo\n\n      - name: Run Tests\n        run: |\n          mix coveralls.json\n      \n      - name: Upload Coverage Reports\n        uses: codecov/codecov-action@v2\n        with:\n          directory: ./cover\n          fail_ci_if_error: true\n"
  },
  {
    "path": ".gitignore",
    "content": "# The directory Mix will write compiled artifacts to.\n/_build/\n\n# If you run \"mix test --cover\", coverage assets end up here.\n/cover/\n\n# The directory Mix downloads your dependencies sources to.\n/deps/\n\n# Where third-party dependencies like ExDoc output generated docs.\n/doc/\n\n# Ignore .fetch files in case you like to edit your project deps locally.\n/.fetch\n\n# If the VM crashes, it generates a dump, let's ignore it too.\nerl_crash.dump\n\n# Also ignore archive artifacts (built via \"mix archive.build\").\n*.ez\n\n# Ignore package tarball (built via \"mix hex.build\").\nelasticlunr-*.tar\n\n\n# Temporary files for e.g. tests\n/tmp\n\n# Generated files for Dialyxir\n\n/priv/plts/*.plt\n/priv/plts/*.plt.hash"
  },
  {
    "path": ".vscode/extensions.json",
    "content": "{\n    \"recommendations\": [\n        \"pantajoe.vscode-elixir-credo\",\n        \"jakebecker.elixir-ls\",\n        \"pgourlain.erlang\"\n    ]\n}"
  },
  {
    "path": "BACKERS.md",
    "content": "<h1 align=\"center\">Sponsors &amp; Backers</h1>\n\nElasticlunr is an MIT-licensed open source project. It's an independent project with its ongoing development made possible entirely thanks to the support by these awesome [backers](https://github.com/heywhy/ex_elasticlunr/blob/dev/BACKERS.md). If you'd like to join them, please consider:\n\n- [Become a backer or sponsor on Patreon](https://www.patreon.com/atandarash)."
  },
  {
    "path": "LICENSE",
    "content": "MIT License\n\nCopyright (c) 2021 Atanda Rasheed\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE."
  },
  {
    "path": "README.md",
    "content": "# Elasticlunr\n\n[![Dialyzer](https://github.com/heywhy/ex_elasticlunr/actions/workflows/dialyzer.yml/badge.svg?branch=master)](https://github.com/heywhy/ex_elasticlunr/actions) [![Test](https://github.com/heywhy/ex_elasticlunr/actions/workflows/test.yml/badge.svg?branch=master)](https://github.com/heywhy/ex_elasticlunr/actions) [![Codecov](https://codecov.io/gh/heywhy/ex_elasticlunr/branch/master/graph/badge.svg?token=ZDA9GUTAFJ)](https://codecov.io/gh/heywhy/ex_elasticlunr)\n\nElasticlunr is a small, full-text search library for use in the Elixir environment. It indexes JSON documents and provides a friendly search interface to retrieve documents.\n\n## Why\n\nThe library is built for web applications that do not require the deployment complexities of popular search engines while taking advantage of the Beam capabilities.\n\nImagine how much is gained when the search functionality of your application resides in the same environment (Beam VM) as your business logic; search resolves faster, the number of services (Elasticsearch, Solr, and so on) to monitor reduces.\n\n## Installation\n\nThe library can be installed by adding `elasticlunr` to your list of dependencies in mix.exs:\n\n```elixir\ndef deps do\n  [\n    {:elasticlunr, \"~> 0.6\"}\n  ]\nend\n```\n\nDocumentation can be found at [hexdocs.pm](https://hexdocs.pm/elasticlunr). See blog post [Introduction to Elasticlunr](https://atandarash.me/blog/introduction-to-elasticlunr) and [Livebook](#livebook) for examples.\n\n## Features\n\n1. Query-Time Boosting, you don't need to set up boosting weight in the index building procedure, Query-Time Boosting makes it more flexible so you could try different boosting schemes\n2. More Rational Scoring Mechanism, Elasticlunr uses a similar scoring mechanism as Elasticsearch, and also this scoring mechanism is used by Lucene\n3. Field-Search, you can choose which field to index and which field to search\n4. Boolean Model, you can set which field to search and the boolean model for each query token, such as \"OR\" and \"AND\"\n5. Combined Boolean Model, TF/IDF Model, and the Vector Space Model make the results ranking more reliable.\n\n## Token Expansion\n\nSometimes users want to expand a query token to increase RECALL. For example, user query token is \"micro\", and assume \"microwave\" and \"microscope\" are in the index, if the user chooses to expand the query token \"micro\" to increase RECALL, both \"microwave\" and \"microscope\" will be returned and search in the index. The query results from expanded tokens are penalized because they are not the same as the query token.\n\n## Livebook\n\nThe repository includes a livebook file that you can run. You can click the button below to run it using [livebook.dev](https://livebook.dev)!\n\n[![Run in Livebook](https://livebook.dev/badge/v1/blue.svg)](https://livebook.dev/run?url=https%3A%2F%2Fgithub.com%2Fheywhy%2Fex_elasticlunr%2Fblob%2Fmaster%2Fdocs.livemd)\n\n## Storage\n\nElasticlunr allows you to write your indexes to whatever storage provider you want. You don't need to acess the `Elasticlunr.Storage` module directly, it is used by the `Elasticlunr.IndexManager`. See available providers below:\n\n* [Blackhole](https://github.com/heywhy/ex_elasticlunr/blob/master/lib/elasticlunr/storage/blackhole.ex)\n* [Disk](https://github.com/heywhy/ex_elasticlunr/blob/master/lib/elasticlunr/storage/disk.ex)\n* [S3](https://github.com/heywhy/ex_elasticlunr_s3)\n\nTo configure what provider to use:\n\n```elixir\nconfig :elasticlunr,\n  storage: Elasticlunr.Storage.S3\n```\n\nNote that all indexes in storage are preloaded on application startup. To see the available provider configuration, you should reference it module.\n\n## License\n\nElasticlunr is released under the MIT License - see the [LICENSE](https://github.com/heywhy/ex_elasticlunr/blob/master/LICENSE) file."
  },
  {
    "path": "coveralls.json",
    "content": "{\n  \"coverage_options\": {\n    \"treat_no_relevant_lines_as_covered\": true,\n    \"minimum_coverage\": 90\n  }\n}"
  },
  {
    "path": "docs.livemd",
    "content": "# Elasticlunr\n\n## Description\n\nElasticlunr is a small, full-text search library for use in the Elixir environment. It indexes JSON documents and provides a friendly search interface to retrieve documents.\n\nThe library is built for web applications that do not require the deployment complexities of popular search engines while taking advantage of the Beam capabilities.\n\nImagine how much is gained when the search functionality of your application resides in the same environment (Beam VM) as your business logic; search resolves faster, the number of services (Elasticsearch, Solr, and so on) to monitor reduces.\n\n## Getting Started\n\n```elixir\nMix.install([\n  {:kino, \"~> 0.4\"},\n  {:elasticlunr, \"~> 0.6\"}\n])\n```\n\n## What's an Index?\n\nAn index is a collection of structured data that is referred to when looking for results that are relevant to a specific query.\n\nIn RDBMS, a table can be likened to an index, meaning that you can store, update, delete and search documents in an index. But the difference here is that an index has a pipeline that every JSON document passes through before it becomes searchable.\n\n```elixir\nalias Elasticlunr.{Index, Pipeline}\n\n# the library comes with a default set of pipeline functions\npipeline = Pipeline.new(Pipeline.default_runners())\n\nindex = Index.new(pipeline: pipeline)\n```\n\nThe above code block creates a new index with a pipeline of default functions that work with the English language.\n\nThe new index does not define the expected structure of the JSON documents to be indexed. To fix this, let's assume we are building an index of blog posts, and each post consists of the `author`, `content`, `category`, and `title` attributes.\n\n```elixir\nindex =\n  index\n  |> Index.add_field(\"title\")\n  |> Index.add_field(\"author\")\n  |> Index.add_field(\"content\")\n  |> Index.add_field(\"category\")\n```\n\n## Indexing Documents\n\nFollowing our example or use-case above, to make the blog posts searchable we need to add them to the index so that they can be analyzed and transformed appropriately.\n\n```elixir\ndocuments = [\n  %{\n    \"id\" => 1,\n    \"author\" => \"Mark Ericksen\",\n    \"title\" => \"Saving and Restoring LiveView State using the Browser\",\n    \"category\" => \"elixir liveview browser\",\n    \"content\" =>\n      \"There are multiple ways to save and restore state for your LiveView processes. You can use an external cache like Redis, your database, or even the browser itself. Sometimes there are situations where you either can’t or don’t want to store the state on the server. In situations like that, you do have the option of storing the state in the user’s browser. This post explains how you use the browser to store state and how your LiveView process can get it back later. We’ll go through the code so you can add something similar to your own project. We cover what data to store, how to do it securely, and restoring the state on demand.\"\n  },\n  %{\n    \"id\" => 2,\n    \"author\" => \"Mika Kalathil\",\n    \"title\" => \"Creating Reusable Ecto Code\",\n    \"category\" => \"elixir ecto sql\",\n    \"content\" =>\n      \"Creating a highly reusable Ecto API is one of the ways we can create long-term sustainable code for ourselves, while growing it with our application to allow for infinite combination possibilites and high code reusability. If we write our Ecto code correctly, we can not only have a very well defined split between query definition and combination/execution using our context but also have the ability to re-use the queries we design individually, together with others to create larger complex queries.\"\n  },\n  %{\n    \"id\" => 3,\n    \"author\" => \"Mark Ericksen\",\n    \"title\" => \"ThinkingElixir 079: Collaborative Music in LiveView with Nathan Willson\",\n    \"category\" => \"elixir podcast liveview\",\n    \"content\" =>\n      \"In episode 79 of Thinking Elixir, we talk with Nathan Willson about GEMS, his collaborative music generator written in LiveView. He explains how it’s built, the JS sound library integrations, what could be done by Phoenix and what is done in the browser. Nathan shares how he deployed it globally to 10 regions using Fly.io. We go over some of the challenges he overcame creating an audio focused web application. It’s a fun open-source project that pushes the boundaries of what we think LiveView apps can do!\"\n  },\n  %{\n    \"id\" => 4,\n    \"title\" => \"ThinkingElixir 078: Logflare with Chase Granberry\",\n    \"author\" => \"Mark Ericksen\",\n    \"category\" => \"elixir podcast logging logflare\",\n    \"content\" =>\n      \"In episode 78 of Thinking Elixir, we talk with Chase Granberry about Logflare. We learn why Chase started the company, what Logflare does, how it’s built on Elixir, about their custom Elixir logger, where the data is stored, how it’s queried, and more! We talk about dealing with the constant stream of log data, how Logflare is collecting and displaying metrics, and talk more about Supabase acquiring the company!\"\n  }\n]\n\nindex = Index.add_documents(index, documents)\n```\n\n## Search Index\n\nThe search results is a list of maps and each map contains specific keys, `matched`, `positions`, `ref`, and `score`. See the definitions below:\n\n* **matched:** this field tells the number of attributes where the given query matches\n* **score:** the value shows how well the document ranks compared to other documents\n* **ref:** this is the document id\n* **positions:** this is a map that shows the positions of the matching words in the document\n\n```elixir\nsearch_query = Kino.Input.text(\"Search\", default: \"elixir\")\n```\n\n```elixir\nsearch_query = Kino.Input.read(search_query)\nresults = Index.search(index, search_query)\n```\n\n**NB:** Don't forget to fiddle with the search input.\n\n## Nested Document Attributes\n\nAs seen in the earlier example all documents indexed were without nested attributes. But Imagine a situation where your data source returns documents with nested attributes, and you want to search by these attributes - it's possible with Elasticlunr by specifying the top-level attribute.\n\nLet's say our data source returns a list of users with their address which is an object and you want to index this information so that you can query them.\n\n```elixir\n# the library comes with a default set of pipeline functions\npipeline = Pipeline.new(Pipeline.default_runners())\n\nusers_index =\n  Index.new(pipeline: pipeline)\n  |> Index.add_field(\"name\")\n  |> Index.add_field(\"address\")\n  |> Index.add_field(\"education\")\n```\n\nAutomatically, Elasticlunr will flatten the nested attributes to the level that when using the advanced query DSL you can use dot notation to filter the search results. Now, let's add a few user objects to the index:\n\n```elixir\ndocuments = [\n  %{\n    \"id\" => 1,\n    \"name\" => \"rose mary\",\n    \"education\" => \"BSc.\",\n    \"address\" => %{\n      \"line1\" => \"Brooklyn Street\",\n      \"line2\" => \"4181\",\n      \"city\" => \"Portland\",\n      \"state\" => \"Oregon\",\n      \"country\" => \"USA\"\n    }\n  },\n  %{\n    \"id\" => 2,\n    \"name\" => \"jason richard\",\n    \"education\" => \"Msc.\",\n    \"address\" => %{\n      \"line1\" => \"Crown Street\",\n      \"line2\" => \"2057\",\n      \"city\" => \"St Malo\",\n      \"state\" => \"Quebec\",\n      \"country\" => \"CA\"\n    }\n  },\n  %{\n    \"id\" => 3,\n    \"name\" => \"peters book\",\n    \"education\" => \"BSc.\",\n    \"address\" => %{\n      \"line1\" => \"Murry Street\",\n      \"line2\" => \"2285\",\n      \"city\" => \"Norfolk\",\n      \"state\" => \"Virginia\",\n      \"country\" => \"USA\"\n    }\n  },\n  %{\n    \"id\" => 4,\n    \"name\" => \"jason mount\",\n    \"education\" => \"Highschool\",\n    \"address\" => %{\n      \"line1\" => \"Aspen Court\",\n      \"line2\" => \"2057\",\n      \"city\" => \"Boston\",\n      \"state\" => \"Massachusetts\",\n      \"country\" => \"USA\"\n    }\n  }\n]\n\nusers_index = Index.add_documents(users_index, documents)\n```\n\n```elixir\nsearch_query = Kino.Input.text(\"Search users\", default: \"jason murry\")\n```\n\n```elixir\nsearch_query = Kino.Input.read(search_query)\nIndex.search(users_index, search_query)\n```\n\n## Index Manager\n\nThe manager includes different CRUD functions to help you manage your index after mutating the state. First of all, let's get indexes to manage by the manager:\n\n```elixir\nalias Elasticlunr.IndexManager\n\nIndexManager.loaded_indices()\n```\n\nAs seen above the list is empty. Now let's add an index:\n\n```elixir\nIndexManager.save(users_index)\n\nIndexManager.loaded_indices()\n|> Enum.any?(&(&1 == users_index.name))\n|> IO.inspect(label: :users_index_exists)\n\nIndexManager.loaded_indices()\n```\n\nThe manager now has the `users_index` in memory for access.\n\n## Query DSL\n\nLike every other search engine, you can make more advanced search queries depending on your\nrequirements, and I'm pleased to tell you that Elasticlunr has not left out such capabilities.\nSo, in the proceeding parts of this docs, I will be highlighting the available query types\nprovided by the library and how you can use them.\n\nIt's important to note that Elasticlunr tries to replicate popular Query DSL (Domain Specific Language)\nwith the same behavior as Elasticsearch, which means the learning curve reduces if you have\nexperience using the search engine. For Elasticlunr, there are the `bool`, `match`, `match_all`,\n`not`, and `terms` query types you can use to retrieve insights about an index.\n\n## Bool\n\nThe `bool` query is used with a combination of queries to retrieve documents matching the boolean\ncombinations of clauses. Consider these clauses to be everything that comes after the `SELECT`\nstatement in relational databases.\n\n<!-- livebook:{\"break_markdown\":true} -->\n\nThe `bool` query is built using one or more clauses to achieve desired results, and each clause\nhas its type, see below:\n\nClause | Description\n---|---\n`must` | The clause must appear in the matching documents, and this affects the document's score.\n`must_not` | The clause must not appear in the matching document. Scoring is ignored because the clause is executed in the filter context.\n`filter` | Like `must`, the clause must appear in the matching documents but scoring is ignored for the query.\n`should` | The clause should appear in the matching document.\n\nIt's important to note that only scores from the `must` and `should`  clauses contribute to the\nfinal score of the matching document.\n\n```elixir\nIndex.search(index, %{\n  \"query\" => %{\n    \"bool\" => %{\n      \"must\" => %{\n        \"terms\" => %{\"content\" => \"use\"}\n      },\n      \"should\" => %{\n        \"terms\" => %{\"category\" => \"elixir\"}\n      },\n      \"filter\" => %{\n        \"match\" => %{\n          \"id\" => 3\n        }\n      },\n      \"must_not\" => %{\n        \"match\" => %{\n          \"author\" => \"mika\"\n        }\n      },\n      \"minimum_should_match\" => 1\n    }\n  }\n})\n```\n\nYou can use the minimum_should_match parameter to specify the number or percentage of should\nclauses returned documents must match.\nIf the bool query includes at least one should clause and no must or filter clauses, the default\nvalue is 1. Otherwise, the default value is 0.\n\n## Match\n\nThe `match` query is the standard query used for full-text search, including support for fuzzy\nmatching. The provided text is analyzed before matching it against documents.\n\n```elixir\nIndex.search(index, %{\n  \"query\" => %{\n    \"match\" => %{\n      \"content\" => %{\n        \"query\" => \"liveview browser\"\n      }\n    }\n  }\n})\n```\n\nA `match` query accepts one or more top-level fields you wish to search, in the example above,\nit's the `content` field. Note that when you have more than one top-level fields, the `match`\nquery is rewritten to a `bool` query internally by the library. Now, let's see what parameters\nare accepted by the `match` query below:\n\nParameter | Description\n---|---\n`query` | String you wish to find in the provided field.\n`expand` | Increase token recall, see [token expansion](https://github.com/heywhy/ex_elasticlunr#token-expansion).\n`fuzziness` | Maximum edit distance allowed for matching.\n`boost` | Floating point number used to decrease or increase the relevance scores of a query. Defaults to 1.0.\n`operator` | The boolean operator used to interpret the `query` value. Available values for the `operator` option are `or` and `and`. Defaults to `or`.\n`minimum_should_match` | Minimum number of clauses that a document must match for it to be returned.\n\n## Match All\n\nThe most simple query, which matches all documents, gives them a score of 1.0 each.\n\nParameter | Description\n---|---\n`boost` | Floating point number used to decrease or increase the relevance scores of a query. Defaults to 1.0.\n\n```elixir\nIndex.search(index, %{\n  \"query\" => %{\n    \"match_all\" => %{}\n  }\n})\n```\n\n## Not\n\nThe `not` query inverts the result of the nested query giving the matched documents a score of\n1.0 each.\n\n```elixir\nIndex.search(index, %{\n  \"query\" => %{\n    \"not\" => %{\n      \"match\" => %{\n        \"content\" => %{\n          \"query\" => \"elixir\"\n        }\n      }\n    }\n  }\n})\n```\n\n## Terms\n\nThe query return documents that contain the exact terms in a given field. The `terms` query should\nbe used to find documents based on a precise value such as a price, a product ID, or a username.\n\n```elixir\nIndex.search(index, %{\n  \"query\" => %{\n    \"terms\" => %{\n      \"content\" => %{\n        \"value\" => \"think\"\n      }\n    }\n  }\n})\n```\n\nA `terms` query accepts one or more top-level fields you wish to search, in the example above,\nit's the `content` field. Note that when you have more than one top-level fields, the `terms`\nquery is rewritten to a `bool` query internally by the library. Now, let's see what parameters\nare accepted by the `terms` query below:\n\nParameter | Description\n---|---\n`value` | A term you wish to find in the provided field. The term must match exactly the field value to return a document.\n`boost` | Floating point number used to decrease or increase the relevance scores of a query. Defaults to 1.0.\n"
  },
  {
    "path": "lib/elasticlunr/application.ex",
    "content": "defmodule Elasticlunr.Application do\n  # See https://hexdocs.pm/elixir/Application.html\n  # for more information on OTP Applications\n  @moduledoc false\n\n  use Application\n\n  alias Elasticlunr.IndexManager\n\n  @impl true\n  def start(_type, _args) do\n    children = [\n      {Registry, name: Elasticlunr.IndexRegistry, keys: :unique},\n      {DynamicSupervisor, name: Elasticlunr.IndexSupervisor, strategy: :one_for_one}\n      # Starts a worker by calling: Elasticlunr.Worker.start_link(arg)\n      # {Elasticlunr.Worker, arg}\n    ]\n\n    # See https://hexdocs.pm/elixir/Supervisor.html\n    # for other strategies and supported options\n    opts = [strategy: :one_for_one, name: Elasticlunr.Supervisor]\n\n    case Supervisor.start_link(children, opts) do\n      {:ok, _} = result ->\n        :ok = IndexManager.preload()\n        result\n\n      err ->\n        err\n    end\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/core/document_store.ex",
    "content": "defmodule Elasticlunr.DocumentStore do\n  alias Elasticlunr.{Field, Index}\n\n  defstruct save: true, documents: %{}, document_info: %{}, length: 0\n\n  @type t :: %__MODULE__{\n          save: boolean(),\n          documents: map(),\n          document_info: map(),\n          length: pos_integer()\n        }\n\n  @spec new(boolean()) :: t()\n  def new(save \\\\ true) do\n    struct!(%__MODULE__{}, %{save: save})\n  end\n\n  @spec add(t(), Field.document_ref(), map()) :: t()\n  def add(%__MODULE__{documents: documents, length: length, save: save} = store, ref, document) do\n    length =\n      case exists?(store, ref) do\n        true ->\n          length\n\n        false ->\n          length + 1\n      end\n\n    documents =\n      case save do\n        true ->\n          Map.put(documents, ref, document)\n\n        false ->\n          Map.put(documents, ref, nil)\n      end\n\n    %{store | length: length, documents: documents}\n  end\n\n  @spec get(t(), Field.document_ref()) :: map() | nil\n  def get(%__MODULE__{documents: documents}, ref), do: Map.get(documents, ref)\n\n  @spec remove(t(), Field.document_ref()) :: map() | nil\n  def remove(\n        %__MODULE__{document_info: document_info, documents: documents, length: length} = store,\n        ref\n      ) do\n    case exists?(store, ref) do\n      true ->\n        length = length - 1\n        documents = Map.delete(documents, ref)\n        document_info = Map.delete(document_info, ref)\n\n        %{store | document_info: document_info, documents: documents, length: length}\n\n      false ->\n        store\n    end\n  end\n\n  @spec exists?(t(), Field.document_ref()) :: boolean()\n  def exists?(%__MODULE__{documents: documents}, ref), do: Map.has_key?(documents, ref)\n\n  @spec add_field_length(t(), Field.document_ref(), Index.document_field(), pos_integer()) :: t()\n  def add_field_length(%__MODULE__{document_info: document_info} = store, ref, field, length) do\n    case exists?(store, ref) do\n      false ->\n        store\n\n      true ->\n        info =\n          document_info\n          |> Map.get(ref, %{})\n          |> Map.put(field, length)\n\n        document_info = Map.put(document_info, ref, info)\n        %{store | document_info: document_info}\n    end\n  end\n\n  @spec update_field_length(t(), Field.document_ref(), Index.document_field(), pos_integer()) ::\n          t()\n  def update_field_length(%__MODULE__{} = store, ref, field, length),\n    do: add_field_length(store, ref, field, length)\n\n  @spec get_field_length(t(), Field.document_ref(), Index.document_field()) :: pos_integer()\n  def get_field_length(%__MODULE__{document_info: document_info} = store, ref, field) do\n    case exists?(store, ref) do\n      false ->\n        nil\n\n      true ->\n        document_info\n        |> Map.get(ref, %{})\n        |> Map.get(field)\n    end\n  end\n\n  @spec reset(t(), boolean()) :: t()\n  def reset(%__MODULE__{}, save \\\\ true), do: new(save)\nend\n"
  },
  {
    "path": "lib/elasticlunr/core/field.ex",
    "content": "defmodule Elasticlunr.Field do\n  alias Elasticlunr.{DB, Pipeline, Token, Utils}\n\n  @fields ~w[db name pipeline query_pipeline store store_positions]a\n\n  @enforce_keys @fields\n  defstruct @fields\n\n  @type flnorm :: integer() | float()\n\n  @type t :: %__MODULE__{\n          db: DB.t(),\n          name: String.t(),\n          pipeline: Pipeline.t() | nil,\n          query_pipeline: Pipeline.t() | nil,\n          store: boolean(),\n          store_positions: boolean()\n        }\n\n  @type document_ref :: atom() | binary()\n  @type document :: %{id: document_ref(), content: binary()}\n  @type token_info :: %{\n          term: term,\n          tf: map(),\n          idf: map(),\n          flnorm: flnorm(),\n          documents: map()\n        }\n\n  @spec new(keyword) :: t()\n  def new(opts) do\n    attrs = [\n      db: Keyword.get(opts, :db),\n      name: Keyword.get(opts, :name),\n      pipeline: Keyword.get(opts, :pipeline),\n      store: Keyword.get(opts, :store_documents, false),\n      query_pipeline: Keyword.get(opts, :query_pipeline),\n      store_positions: Keyword.get(opts, :store_positions, false)\n    ]\n\n    struct!(__MODULE__, attrs)\n  end\n\n  @spec documents(t()) :: list(document_ref())\n  def documents(%__MODULE__{db: db, name: name}) do\n    case DB.match_object(db, {{:field_ids, name, :_}}) do\n      [] ->\n        []\n\n      ids ->\n        Stream.map(ids, fn {{:field_ids, _, id}} -> id end)\n    end\n  end\n\n  @spec term_frequency(t(), binary()) :: map()\n  def term_frequency(%__MODULE__{} = field, term) do\n    tf_lookup(field, term)\n  end\n\n  @spec has_token(t(), binary()) :: boolean()\n  def has_token(%__MODULE__{} = field, term) do\n    DB.member?(field.db, {:field_idf, field.name, term})\n  end\n\n  @spec get_token(t(), binary()) :: token_info() | nil\n  def get_token(%__MODULE__{} = field, term) do\n    case idf_lookup(field, term) do\n      nil ->\n        nil\n\n      _ ->\n        flnorm = flnorm_lookup(field)\n        to_field_token(field, term, flnorm)\n    end\n  end\n\n  @spec set_query_pipeline(t(), module()) :: t()\n  def set_query_pipeline(%__MODULE__{} = field, pipeline) do\n    %{field | query_pipeline: pipeline}\n  end\n\n  @spec add(t(), list(document())) :: t()\n  def add(%__MODULE__{pipeline: pipeline} = field, documents) do\n    Enum.each(documents, fn %{id: id, content: content} ->\n      unless DB.member?(field.db, {:field_ids, field.name, id}) do\n        tokens = Pipeline.run(pipeline, content)\n\n        add_id(field, id)\n        update_field_stats(field, id, tokens)\n      end\n    end)\n\n    recalculate_idf(field)\n  end\n\n  @spec length(t(), atom()) :: pos_integer()\n  def length(%__MODULE__{db: db, name: name}, :ids) do\n    fun = [{{{:field_ids, name, :_}}, [], [true]}]\n    DB.select_count(db, fun)\n  end\n\n  @spec length(t(), atom(), String.t()) :: pos_integer()\n  def length(%__MODULE__{db: db, name: name}, :term, term) do\n    fun = [\n      {{{:field_term, name, term, :_}, :_}, [], [true]}\n    ]\n\n    DB.select_count(db, fun)\n  end\n\n  def length(%__MODULE__{db: db, name: name}, :tf, term) do\n    fun = [\n      {{{:field_tf, name, term, :_}, :_}, [], [true]}\n    ]\n\n    DB.select_count(db, fun)\n  end\n\n  def length(%__MODULE__{db: db, name: name}, :idf, term) do\n    fun = [\n      {{{:field_idf, name, term}, :_}, [], [true]}\n    ]\n\n    DB.select_count(db, fun)\n  end\n\n  @spec update(t(), list(document())) :: t()\n  def update(%__MODULE__{} = field, documents) do\n    document_ids = Enum.map(documents, & &1.id)\n\n    field\n    |> remove(document_ids)\n    |> add(documents)\n  end\n\n  @spec remove(t(), list(document_ref())) :: t()\n  def remove(%__MODULE__{db: db, name: name} = field, document_ids) do\n    Enum.each(document_ids, fn id ->\n      true = DB.match_delete(db, {{:field_term, name, :_, id}, :_})\n      true = DB.match_delete(db, {{:field_tf, name, :_, id}, :_})\n      true = DB.match_delete(db, {{:field_idf, name, :_}, :_})\n      true = DB.delete(db, {:field_ids, name, id})\n    end)\n\n    recalculate_idf(field)\n  end\n\n  @spec analyze(t(), any(), keyword) :: list(Token.t())\n  def analyze(%__MODULE__{pipeline: pipeline, query_pipeline: query_pipeline}, content, options) do\n    case Keyword.get(options, :is_query, false) && not is_nil(query_pipeline) do\n      true ->\n        Pipeline.run(query_pipeline, content)\n\n      false ->\n        Pipeline.run(pipeline, content)\n    end\n  end\n\n  @spec terms(t(), keyword()) :: any()\n  def terms(%__MODULE__{} = field, query) do\n    fuzz = Keyword.get(query, :fuzziness, 0)\n    msm = Keyword.get(query, :minimum_should_match, 1)\n\n    terms = terms_lookup(field)\n\n    matching_docs =\n      Stream.map(query[:terms], fn\n        %Regex{} = re -> re\n        val -> to_token(val)\n      end)\n      |> Enum.reduce(%{}, fn\n        %Regex{} = re, matching_docs ->\n          matched_terms = Stream.filter(terms, &Regex.match?(re, elem(&1, 0)))\n\n          Enum.reduce(matched_terms, matching_docs, fn {term, _, _}, matching_docs ->\n            ids = matching_ids(field, term)\n\n            filter_ids(field, ids, term, matching_docs, query)\n          end)\n\n        %Token{token: term}, matching_docs ->\n          matching_docs =\n            case fuzz == 0 && length(field, :term, term) > 0 do\n              true ->\n                ids = matching_ids(field, term)\n\n                filter_ids(field, ids, term, matching_docs, query)\n\n              false ->\n                matching_docs\n            end\n\n          match_with_fuzz(field, term, fuzz, query, matching_docs)\n      end)\n\n    if msm <= 1 do\n      matching_docs\n    else\n      matching_docs\n      |> Stream.filter(fn {_key, content} ->\n        Enum.count(content) >= msm\n      end)\n      |> Enum.into(%{})\n    end\n  end\n\n  @spec tokens(Elasticlunr.Field.t()) :: Enumerable.t()\n  def tokens(%__MODULE__{} = field) do\n    flnorm = flnorm_lookup(field)\n\n    unique_terms_lookup(field)\n    |> Stream.map(fn {term, _, _} ->\n      to_field_token(field, term, flnorm)\n    end)\n  end\n\n  defp update_field_stats(%{db: db, name: name} = field, id, tokens) do\n    Enum.each(tokens, fn token ->\n      %Token{token: term} = token\n\n      term_attrs = term_lookup(field, term, id)\n\n      term_attrs =\n        case Token.get_position(token) do\n          nil ->\n            term_attrs\n\n          position ->\n            %{term_attrs | positions: term_attrs.positions ++ [position]}\n        end\n\n      term_attrs = %{term_attrs | total: term_attrs.total + 1}\n\n      true = DB.insert(db, {{:field_term, name, term, id}, term_attrs})\n      true = DB.insert(db, {{:field_tf, name, term, id}, :math.sqrt(term_attrs.total)})\n    end)\n  end\n\n  defp add_id(%{db: db, name: name}, id) do\n    true = DB.insert(db, {{:field_ids, name, id}})\n  end\n\n  defp matched_documents_for_term(%{db: db, name: name}, term) do\n    db\n    |> DB.match_object({{:field_term, name, term, :_}, :_})\n    |> Stream.map(fn {{:field_term, _, _, id}, _} -> id end)\n  end\n\n  defp term_lookup(%{db: db, name: name}, term, id) do\n    case DB.match_object(db, {{:field_term, name, term, id}, :_}) do\n      [] ->\n        %{total: 0, positions: []}\n\n      [{_, attrs}] ->\n        attrs\n    end\n  end\n\n  defp terms_lookup(%{db: db, name: name}) do\n    db\n    |> DB.match_object({{:field_term, name, :_, :_}, :_})\n    |> Stream.map(&termify/1)\n  end\n\n  defp terms_lookup(%{db: db, name: name}, term) do\n    db\n    |> DB.match_object({{:field_term, name, term, :_}, :_})\n    |> Stream.map(&termify/1)\n  end\n\n  defp termify({{:field_term, _, term, id}, attrs}), do: {term, id, attrs}\n\n  defp tf_lookup(%{db: db, name: name}, term) do\n    case DB.match_object(db, {{:field_tf, name, term, :_}, :_}) do\n      [] ->\n        nil\n\n      terms ->\n        terms\n        |> Stream.map(fn {{:field_tf, _, _, id}, count} ->\n          {id, count}\n        end)\n    end\n  end\n\n  defp tf_lookup(%{db: db, name: name}, term, id) do\n    case DB.match_object(db, {{:field_tf, name, term, id}, :_}) do\n      [] ->\n        nil\n\n      [{{:field_tf, _, _, id}, count}] ->\n        {id, count}\n    end\n  end\n\n  defp idf_lookup(%{db: db, name: name}, term) do\n    case DB.match_object(db, {{:field_idf, name, term}, :_}) do\n      [] ->\n        nil\n\n      [{{:field_idf, _, _}, value}] ->\n        value\n    end\n  end\n\n  defp flnorm_lookup(%{db: db, name: name}) do\n    case DB.lookup(db, {:field_flnorm, name}) do\n      [] ->\n        1\n\n      [{{:field_flnorm, _}, value}] ->\n        value\n    end\n  end\n\n  defp unique_terms_lookup(field) do\n    terms_lookup(field)\n    |> Stream.uniq_by(&elem(&1, 0))\n  end\n\n  defp recalculate_idf(field) do\n    terms = unique_terms_lookup(field)\n\n    terms_length = Enum.count(terms)\n\n    ids_length = length(field, :ids)\n\n    flnorm =\n      case terms_length > 0 do\n        true ->\n          1 / :math.sqrt(terms_length)\n\n        false ->\n          0\n      end\n\n    :ok =\n      terms\n      |> Task.async_stream(fn {term, _id, _attrs} ->\n        count = length(field, :term, term) + 1\n        value = 1 + :math.log10(ids_length / count)\n\n        true = DB.insert(field.db, {{:field_idf, field.name, term}, value})\n      end)\n      |> Stream.run()\n\n    true = DB.insert(field.db, {{:field_flnorm, field.name}, flnorm})\n    field\n  end\n\n  defp filter_ids(field, ids, term, matching_docs, query) do\n    docs = Keyword.get(query, :docs)\n\n    case docs do\n      docs when is_list(docs) ->\n        Stream.filter(ids, &(&1 in docs))\n\n      _ ->\n        ids\n    end\n    |> get_matching_docs(field, term, matching_docs)\n  end\n\n  defp get_matching_docs(docs, field, term, matching_docs) do\n    docs\n    |> Enum.reduce(matching_docs, fn id, matching_docs ->\n      matched =\n        matching_docs\n        |> Map.get(id, [])\n        |> Kernel.++([extract_matched(field, term, id)])\n\n      Map.put(matching_docs, id, matched)\n    end)\n  end\n\n  defp match_with_fuzz(field, term, fuzz, query, matching_docs) when fuzz > 0 do\n    field\n    |> unique_terms_lookup()\n    |> Enum.reduce(matching_docs, fn {key, _id, _attr}, matching_docs ->\n      if Utils.levenshtein_distance(key, term) <= fuzz do\n        ids = matching_ids(field, term)\n        filter_ids(field, ids, key, matching_docs, query)\n      else\n        matching_docs\n      end\n    end)\n  end\n\n  defp match_with_fuzz(_field, _term, _fuzz, _query, matching_docs), do: matching_docs\n\n  defp matching_ids(field, term) do\n    terms_lookup(field, term)\n    |> Stream.map(&elem(&1, 1))\n  end\n\n  defp get_content(_field, _id) do\n    nil\n  end\n\n  defp extract_matched(field, term, id) do\n    attrs = term_lookup(field, term, id)\n    positions = Map.get(attrs, :positions)\n    {^id, tf} = tf_lookup(field, term, id)\n\n    %{\n      tf: tf,\n      ref: id,\n      positions: positions,\n      norm: flnorm_lookup(field),\n      idf: idf_lookup(field, term),\n      content: get_content(field, id)\n    }\n  end\n\n  defp to_token(%Token{} = token), do: token\n  defp to_token(token), do: Token.new(token)\n\n  defp to_field_token(field, term, flnorm) do\n    %{\n      term: term,\n      norm: flnorm,\n      tf: length(field, :tf, term),\n      idf: idf_lookup(field, term),\n      documents: matched_documents_for_term(field, term)\n    }\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/core/index.ex",
    "content": "defmodule Elasticlunr.Index.IdPipeline do\n  @moduledoc false\n\n  alias Elasticlunr.{Pipeline, Token}\n\n  @behaviour Pipeline\n\n  @impl true\n  def call(%Token{} = token), do: token\nend\n\ndefmodule Elasticlunr.Index do\n  alias Elasticlunr.{DB, Field, Pipeline}\n  alias Elasticlunr.Index.IdPipeline\n  alias Elasticlunr.Dsl.{Query, QueryRepository}\n  alias Uniq.UUID\n\n  @fields ~w[db fields name ref pipeline documents_size store_positions store_documents]a\n  @enforce_keys @fields\n  defstruct @fields\n\n  @type document_field :: atom() | binary()\n\n  @type t :: %__MODULE__{\n          db: DB.t(),\n          fields: map(),\n          documents_size: integer(),\n          ref: Field.document_ref(),\n          pipeline: Pipeline.t(),\n          name: atom() | binary(),\n          store_positions: boolean(),\n          store_documents: boolean()\n        }\n\n  @type search_query :: binary() | map()\n  @type search_result :: any()\n\n  @spec new(keyword()) :: t()\n  def new(opts \\\\ []) do\n    ref = Keyword.get(opts, :ref, \"id\")\n    pipeline = Keyword.get_lazy(opts, :pipeline, &Pipeline.new/0)\n\n    name = Keyword.get_lazy(opts, :name, &UUID.uuid4/0)\n    db_name = String.to_atom(\"elasticlunr_#{name}\")\n    db = DB.init(db_name, ~w[ordered_set public]a)\n\n    id_field = Field.new(db: db, name: ref, pipeline: Pipeline.new([IdPipeline]))\n    fields = Map.put(%{}, to_string(ref), id_field)\n\n    attrs = %{\n      db: db,\n      documents_size: 0,\n      ref: ref,\n      fields: fields,\n      pipeline: pipeline,\n      name: name,\n      store_documents: Keyword.get(opts, :store_documents, true),\n      store_positions: Keyword.get(opts, :store_positions, true)\n    }\n\n    struct!(__MODULE__, attrs)\n  end\n\n  @spec add_field(t(), document_field(), keyword()) :: t()\n  def add_field(\n        %__MODULE__{\n          db: db,\n          fields: fields,\n          pipeline: pipeline,\n          store_positions: store_positions,\n          store_documents: store_documents\n        } = index,\n        field,\n        opts \\\\ []\n      )\n      when is_binary(field) do\n    opts =\n      opts\n      |> Keyword.put(:db, db)\n      |> Keyword.put(:name, field)\n      |> Keyword.put_new(:pipeline, pipeline)\n      |> Keyword.put_new(:store_documents, store_documents)\n      |> Keyword.put_new(:store_positions, store_positions)\n\n    %{index | fields: Map.put(fields, field, Field.new(opts))}\n  end\n\n  @spec update_field(t(), document_field(), Field.t()) :: t()\n  def update_field(%__MODULE__{fields: fields} = index, name, %Field{} = field) do\n    if not Map.has_key?(fields, name) do\n      raise \"Unknown field #{name} in index\"\n    end\n\n    update_documents_size(%{index | fields: Map.put(fields, name, field)})\n  end\n\n  @spec get_fields(t()) :: list(Field.document_ref() | document_field())\n  def get_fields(%__MODULE__{fields: fields}), do: Map.keys(fields)\n\n  @spec get_field(t(), document_field()) :: Field.t()\n  def get_field(%__MODULE__{fields: fields}, field) do\n    Map.get(fields, field)\n  end\n\n  @spec save_document(t(), boolean()) :: t()\n  def save_document(%__MODULE__{fields: fields} = index, save) do\n    fields =\n      fields\n      |> Enum.map(fn {key, field} -> {key, %{field | store: save}} end)\n      |> Enum.into(%{})\n\n    %{index | fields: fields}\n  end\n\n  @spec add_documents(t(), list(map())) :: t()\n  def add_documents(%__MODULE__{fields: fields, ref: ref} = index, documents) do\n    :ok = persist(fields, ref, documents, &Field.add/2)\n\n    update_documents_size(index)\n  end\n\n  @spec update_documents(t(), list(map())) :: t()\n  def update_documents(%__MODULE__{ref: ref, fields: fields} = index, documents) do\n    :ok = persist(fields, ref, documents, &Field.update/2)\n\n    update_documents_size(index)\n  end\n\n  @spec remove_documents(t(), list(Field.document_ref())) :: t()\n  def remove_documents(%__MODULE__{fields: fields} = index, document_ids) do\n    Enum.each(fields, fn {_, field} ->\n      Field.remove(field, document_ids)\n    end)\n\n    update_documents_size(index)\n  end\n\n  @spec analyze(t(), document_field(), any(), keyword()) :: Enumerable.t()\n  def analyze(%__MODULE__{fields: fields}, field, content, options) do\n    fields\n    |> Map.get(field)\n    |> Field.analyze(content, options)\n  end\n\n  @spec terms(t(), keyword()) :: Enumerable.t()\n  def terms(%__MODULE__{fields: fields}, query) do\n    field = Keyword.get(query, :field)\n\n    fields\n    |> Map.get(field)\n    |> Field.terms(query)\n  end\n\n  @spec all(t()) :: list(Field.document_ref())\n  def all(%__MODULE__{ref: ref, fields: fields}) do\n    fields\n    |> Map.get(ref)\n    |> Field.documents()\n  end\n\n  @spec update_documents_size(t()) :: t()\n  def update_documents_size(%__MODULE__{fields: fields} = index) do\n    size =\n      Enum.reduce(fields, 0, fn {_, field}, acc ->\n        size = Field.length(field, :ids)\n\n        if size > acc do\n          size\n        else\n          acc\n        end\n      end)\n\n    %{index | documents_size: size}\n  end\n\n  @spec search(t(), search_query(), map() | nil) :: list(search_result())\n  def search(index, query, opts \\\\ nil)\n  def search(%__MODULE__{}, nil, _opts), do: []\n\n  def search(%__MODULE__{ref: ref} = index, query, nil) when is_binary(query) do\n    fields = get_fields(index)\n\n    matches =\n      fields\n      |> Enum.reject(&(&1 == ref))\n      |> Enum.map(fn field ->\n        %{\"match\" => %{field => query}}\n      end)\n\n    elasticsearch(index, %{\n      \"query\" => %{\n        \"bool\" => %{\n          \"should\" => matches\n        }\n      }\n    })\n  end\n\n  def search(%__MODULE__{ref: ref} = index, query, %{\"fields\" => fields}) when is_binary(query) do\n    matches =\n      fields\n      |> Enum.filter(fn field ->\n        with true <- field != ref,\n             true <- Map.has_key?(fields, field),\n             %{\"boost\" => boost} <- Map.get(fields, field) do\n          boost > 0\n        end\n      end)\n      |> Enum.map(fn field ->\n        %{\"boost\" => boost} = Map.get(fields, field)\n        match = %{field => query}\n\n        %{\"match\" => match, \"boost\" => boost}\n      end)\n\n    elasticsearch(index, %{\n      \"query\" => %{\n        \"bool\" => %{\n          \"should\" => matches\n        }\n      }\n    })\n  end\n\n  def search(%__MODULE__{} = index, %{\"query\" => _} = query, _opts),\n    do: elasticsearch(index, query)\n\n  def search(%__MODULE__{} = index, query, nil) when is_map(query),\n    do: search(index, query, %{\"operator\" => \"OR\"})\n\n  def search(%__MODULE__{} = index, %{} = query, options) do\n    matches =\n      query\n      |> Enum.map(fn {field, content} ->\n        expand = Map.get(options, \"expand\", false)\n\n        operator =\n          options\n          |> Map.get(\"bool\", \"or\")\n          |> String.downcase()\n\n        %{\n          \"expand\" => expand,\n          \"match\" => %{\"operator\" => operator, field => content}\n        }\n      end)\n\n    elasticsearch(index, %{\n      \"query\" => %{\n        \"bool\" => %{\n          \"should\" => matches\n        }\n      }\n    })\n  end\n\n  defp elasticsearch(index, %{\"query\" => root}) do\n    {key, value} = Query.split_root(root)\n\n    query = QueryRepository.parse(key, value, root)\n\n    query\n    |> QueryRepository.score(index)\n    |> Enum.sort(fn a, b -> a.score > b.score end)\n  end\n\n  defp elasticsearch(_index, _query) do\n    raise \"Root object must have a query element\"\n  end\n\n  defp flatten_document(document, prefix \\\\ \"\") do\n    Enum.reduce(document, %{}, fn\n      {key, value}, transformed when is_map(value) ->\n        mapped = flatten_document(value, \"#{prefix}#{key}.\")\n        Map.merge(transformed, mapped)\n\n      {key, value}, transformed ->\n        Map.put(transformed, \"#{prefix}#{key}\", value)\n    end)\n  end\n\n  defp persist(fields, ref, documents, persist_fn) do\n    Task.async_stream(documents, fn document ->\n      document = flatten_document(document)\n      save(fields, ref, document, persist_fn)\n    end)\n    |> Stream.run()\n  end\n\n  defp save(fields, ref, document, callback) do\n    Enum.each(fields, fn {attribute, field} ->\n      if document[attribute] do\n        data = [\n          %{id: document[ref], content: document[attribute]}\n        ]\n\n        callback.(field, data)\n      end\n    end)\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/core/token.ex",
    "content": "defmodule Elasticlunr.Token do\n  defstruct ~w[token metadata]a\n\n  @type t :: %__MODULE__{\n          token: binary(),\n          metadata: map()\n        }\n\n  @spec new(binary(), map()) :: t()\n  def new(token, metadata \\\\ %{}) do\n    struct!(__MODULE__, token: token, metadata: metadata)\n  end\n\n  @spec update(t(), keyword()) :: t()\n  def update(%__MODULE__{token: str, metadata: metadata} = token, opts) do\n    opts =\n      opts\n      |> Keyword.put_new(:token, str)\n      |> Keyword.put_new(:metadata, metadata)\n\n    struct!(token, opts)\n  end\n\n  @spec get_position(t()) :: {integer(), integer()} | nil\n  def get_position(%__MODULE__{metadata: %{start: start, end: end_1}}), do: {start, end_1}\n  def get_position(%__MODULE__{metadata: %{}}), do: nil\nend\n"
  },
  {
    "path": "lib/elasticlunr/db.ex",
    "content": "defmodule Elasticlunr.DB do\n  defstruct [:name, :options]\n\n  @type t :: %__MODULE__{\n          name: atom(),\n          options: list(atom())\n        }\n\n  @spec init(atom(), list()) :: t()\n  def init(name, opts \\\\ []) when is_atom(name) do\n    default = ~w[compressed named_table]a\n    options = Enum.uniq(default ++ opts)\n\n    unless Enum.member?(:ets.all(), name) do\n      :ets.new(name, options)\n    end\n\n    struct!(__MODULE__, name: name, options: options)\n  end\n\n  @spec delete(t(), term()) :: boolean()\n  def delete(%__MODULE__{name: name}, pattern), do: :ets.delete(name, pattern)\n\n  @spec destroy(t()) :: boolean()\n  def destroy(%__MODULE__{name: name}) do\n    if Enum.member?(:ets.all(), name) do\n      :ets.delete(name)\n    else\n      true\n    end\n  end\n\n  @spec insert(t(), term()) :: boolean()\n  def insert(%__MODULE__{name: name}, data), do: :ets.insert(name, data)\n\n  @spec lookup(t(), term()) :: list(term())\n  def lookup(%__MODULE__{name: name}, key), do: :ets.lookup(name, key)\n\n  @spec member?(t(), term()) :: boolean()\n  def member?(%__MODULE__{name: name}, key), do: :ets.member(name, key)\n\n  @spec match_delete(t(), term()) :: boolean()\n  def match_delete(%__MODULE__{name: name}, pattern), do: :ets.match_delete(name, pattern)\n\n  @spec match_object(t(), term()) :: list(term())\n  def match_object(%__MODULE__{name: name}, spec), do: :ets.match_object(name, spec)\n\n  @spec select_count(t(), term()) :: pos_integer()\n  def select_count(%__MODULE__{name: name}, spec), do: :ets.select_count(name, spec)\n\n  @spec from(t(), keyword()) :: {:ok, t()}\n  def from(%__MODULE__{name: name} = db, file: file) do\n    with true <- File.exists?(file),\n         {:ok, ^name} <- :dets.open_file(name, file: file),\n         true <- :ets.from_dets(name, name) do\n      {:ok, db}\n    end\n  end\n\n  @spec to(t(), keyword()) :: :ok\n  def to(%__MODULE__{name: name}, file: file) do\n    unless Enum.member?(:dets.all(), name) do\n      :dets.open_file(name, ram_file: true, file: file)\n    end\n\n    with ^name <- :ets.to_dets(name, name) do\n      :dets.close(name)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/deserializer.ex",
    "content": "defprotocol Elasticlunr.Deserializer do\n  @spec deserialize(Enum.t()) :: Elasticlunr.Index.t()\n  def deserialize(data)\nend\n\ndefmodule Elasticlunr.Deserializer.Parser do\n  alias Elasticlunr.{Index, Pipeline}\n\n  @spec process(Enum.t()) :: Index.t()\n  def process(data) do\n    Enum.reduce(data, nil, fn line, acc ->\n      [command | opts] =\n        String.trim(line)\n        |> String.split(\"#\")\n\n      case parse(command, acc, opts) do\n        {%Index{}, _extra} = acc ->\n          acc\n\n        %Index{} = index ->\n          index\n      end\n    end)\n    |> case do\n      {%Index{} = index, _} ->\n        index\n\n      result ->\n        result\n    end\n  end\n\n  defp parse(command, acc, [opts]), do: parse(command, acc, opts)\n\n  defp parse(\"settings\", nil, opts) do\n    opts = to_options(opts)\n\n    {_, pipeline_map} =\n      opts[:pipeline]\n      |> String.split(\",\")\n      |> Enum.reduce({0, %{}}, fn callback, {index, map} ->\n        {index + 1, Map.put(map, to_string(index), String.to_atom(callback))}\n      end)\n\n    opts = Keyword.replace(opts, :pipeline, parse_pipeline(opts[:pipeline]))\n\n    {Index.new(opts), %{pipeline: pipeline_map}}\n  end\n\n  defp parse(\"db\", acc, _), do: acc\n\n  defp parse(\"field\", {index, extra}, opts) do\n    opts = to_options(opts)\n\n    opts =\n      Enum.map(opts, fn\n        {:pipeline, value} ->\n          {:pipeline, parse_pipeline(value, extra[:pipeline])}\n\n        option ->\n          option\n      end)\n\n    index = Index.add_field(index, opts[:name], opts)\n    {index, extra}\n  end\n\n  defp parse(_, acc, _), do: acc\n\n  defp parse_pipeline(option, cache \\\\ %{}) do\n    callbacks =\n      option\n      |> String.split(\",\")\n      |> Enum.map(fn callback ->\n        Map.get_lazy(cache, callback, fn -> String.to_atom(callback) end)\n      end)\n\n    Pipeline.new(callbacks)\n  end\n\n  defp to_options(options) when is_binary(options) do\n    String.split(options, \"|\")\n    |> Enum.reduce([], fn option, acc ->\n      [key | values] = String.split(option, \":\")\n      [value] = values\n      Keyword.put(acc, String.to_atom(key), parse_value(value))\n    end)\n  end\n\n  defp parse_value(\"true\"), do: true\n  defp parse_value(\"false\"), do: false\n  defp parse_value(val), do: val\nend\n"
  },
  {
    "path": "lib/elasticlunr/dsl/query/bool_query.ex",
    "content": "defmodule Elasticlunr.Dsl.BoolQuery do\n  use Elasticlunr.Dsl.Query\n\n  alias Elasticlunr.Index\n  alias Elasticlunr.Dsl.{NotQuery, Query, QueryRepository}\n\n  defstruct ~w[rewritten should must must_not filter minimum_should_match]a\n\n  @type clause :: struct() | list(struct())\n\n  @type t :: %__MODULE__{\n          filter: clause(),\n          should: clause(),\n          must: nil | struct(),\n          must_not: nil | struct(),\n          rewritten: boolean(),\n          minimum_should_match: integer()\n        }\n\n  @spec new(keyword) :: t()\n  def new(opts) do\n    attrs = %{\n      should: Keyword.get(opts, :should, []),\n      must: Keyword.get(opts, :must),\n      must_not: Keyword.get(opts, :must_not),\n      filter: Keyword.get(opts, :filter),\n      rewritten: Keyword.get(opts, :rewritten, false),\n      minimum_should_match: extract_minimum_should_match(opts)\n    }\n\n    struct!(__MODULE__, attrs)\n  end\n\n  @impl true\n  def rewrite(\n        %__MODULE__{\n          filter: filter,\n          must: must,\n          must_not: must_not,\n          should: should,\n          minimum_should_match: minimum_should_match\n        },\n        %Index{} = index\n      ) do\n    should =\n      should\n      |> Kernel.||([])\n      |> Enum.map(&QueryRepository.rewrite(&1, index))\n\n    must =\n      case must do\n        nil ->\n          nil\n\n        mod when is_struct(mod) ->\n          QueryRepository.rewrite(mod, index)\n      end\n\n    filters = filter || []\n\n    filters =\n      case must_not do\n        nil ->\n          filters\n\n        must_not when is_struct(must_not) ->\n          query =\n            must_not\n            |> QueryRepository.rewrite(index)\n            |> NotQuery.new()\n\n          [query] ++ filters\n      end\n      |> Enum.map(&QueryRepository.rewrite(&1, index))\n\n    opts = [\n      must: must,\n      should: should,\n      filter: filters,\n      rewritten: true,\n      minimum_should_match: minimum_should_match\n    ]\n\n    new(opts)\n  end\n\n  @impl true\n  def score(%__MODULE__{rewritten: false} = query, %Index{} = index, options) do\n    query\n    |> rewrite(index)\n    |> score(index, options)\n  end\n\n  def score(\n        %__MODULE__{\n          must: must,\n          filter: filter,\n          should: should,\n          minimum_should_match: minimum_should_match\n        },\n        %Index{} = index,\n        _options\n      ) do\n    filter_results = filter_result(filter, index)\n    filter_results = filter_must(must, filter_results, index)\n\n    {docs, filtered} =\n      case filter_results do\n        false ->\n          {%{}, nil}\n\n        value ->\n          Enum.reduce(value, {%{}, []}, fn %{ref: ref, score: score}, {docs, filtered} ->\n            filtered = [ref] ++ filtered\n\n            doc = %{\n              ref: ref,\n              matched: 0,\n              positions: %{},\n              score: score || 0\n            }\n\n            docs = Map.put(docs, ref, doc)\n\n            {docs, filtered}\n          end)\n      end\n\n    {docs, _filtered} =\n      should\n      |> Enum.reduce({docs, filtered}, fn query, {docs, filtered} ->\n        opts =\n          case filtered do\n            nil ->\n              []\n\n            filtered ->\n              [filtered: filtered]\n          end\n\n        results = QueryRepository.score(query, index, opts)\n\n        docs =\n          results\n          |> Enum.reduce(docs, fn doc, docs ->\n            ob =\n              Map.get(docs, doc.ref, %{\n                ref: doc.ref,\n                score: 0,\n                matched: 0,\n                positions: %{}\n              })\n\n            %{matched: matched, score: score, positions: positions} = ob\n\n            # credo:disable-for-lines:3\n            positions =\n              Map.get(doc, :positions, %{})\n              |> Enum.reduce(positions, fn {field, tokens}, positions ->\n                p = Map.get(positions, field, [])\n                p = Enum.reduce(tokens, p, &(&2 ++ [&1]))\n                Map.put(positions, field, p)\n              end)\n\n            doc_score = Map.get(doc, :score, 0)\n\n            ob = %{ob | positions: positions, matched: matched + 1, score: score + doc_score}\n\n            Map.put(docs, doc.ref, ob)\n          end)\n\n        {docs, filtered}\n      end)\n\n    docs\n    |> Stream.map(&elem(&1, 1))\n    |> Stream.filter(fn doc -> doc.matched >= minimum_should_match && doc.score > 0 end)\n  end\n\n  defp filter_result(nil, _index), do: false\n  defp filter_result([], _index), do: false\n\n  defp filter_result(filter, index) do\n    filter\n    |> Enum.reduce(false, fn query, acc ->\n      q =\n        case acc do\n          false ->\n            []\n\n          val ->\n            [filtered: Enum.map(val, & &1.ref)]\n        end\n\n      QueryRepository.filter(query, index, q)\n    end)\n  end\n\n  defp filter_must(nil, filter_results, _index), do: filter_results\n\n  defp filter_must(must_query, filter_results, index) when is_struct(must_query) do\n    q =\n      case filter_results do\n        false ->\n          []\n\n        results ->\n          [filtered: Enum.map(results, & &1.ref)]\n      end\n\n    QueryRepository.score(must_query, index, q)\n  end\n\n  @impl true\n  def parse(options, _query_options, repo) do\n    default_mapper = fn query ->\n      case Query.split_root(query) do\n        {key, value} ->\n          repo.parse(key, value, query)\n\n        _ ->\n          repo.parse(\"match_all\", [])\n      end\n    end\n\n    []\n    |> patch_options(:should, options, default_mapper)\n    |> patch_options(:filter, options, default_mapper)\n    |> patch_options(:must, options, repo)\n    |> patch_options(:must_not, options, repo)\n    |> patch_options(:minimum_should_match, options)\n    |> __MODULE__.new()\n  end\n\n  defp patch_options(opts, :should, options, mapper) do\n    case Map.get(options, \"should\") do\n      nil ->\n        opts\n\n      should when is_list(should) ->\n        should =\n          should\n          |> Enum.map(mapper)\n\n        Keyword.put(opts, :should, should)\n\n      should ->\n        Keyword.put(opts, :should, [mapper.(should)])\n    end\n  end\n\n  defp patch_options(opts, :filter, options, mapper) do\n    case Map.get(options, \"filter\") do\n      nil ->\n        opts\n\n      filter when is_list(filter) ->\n        filter = Enum.map(filter, mapper)\n        Keyword.put(opts, :filter, filter)\n\n      filter ->\n        Keyword.put(opts, :filter, [mapper.(filter)])\n    end\n  end\n\n  defp patch_options(opts, :must, options, repo) do\n    case Map.get(options, \"must\") do\n      nil ->\n        opts\n\n      must when is_map(must) ->\n        {key, options} = Query.split_root(must)\n        must = repo.parse(key, options, must)\n\n        Keyword.put(opts, :must, must)\n    end\n  end\n\n  defp patch_options(opts, :must_not, options, repo) do\n    case Map.get(options, \"must_not\") do\n      nil ->\n        opts\n\n      must_not ->\n        {key, options} = Query.split_root(must_not)\n\n        q = repo.parse(key, options, must_not)\n\n        Keyword.put(opts, :must_not, q)\n    end\n  end\n\n  defp patch_options(opts, :minimum_should_match, options) do\n    options\n    |> Map.get(\"minimum_should_match\")\n    |> case do\n      nil ->\n        opts\n\n      value when is_integer(value) ->\n        value <= Keyword.get(opts, :should) |> Enum.count()\n    end\n    |> case do\n      true ->\n        minimum_should_match = Map.get(options, \"minimum_should_match\")\n        Keyword.put(opts, :minimum_should_match, minimum_should_match)\n\n      _ ->\n        opts\n    end\n  end\n\n  defp extract_minimum_should_match(opts) do\n    default_value =\n      case not is_empty_clause?(opts[:should]) and\n             (is_empty_clause?(opts[:must]) or is_empty_clause?(opts[:filter])) do\n        true -> 1\n        false -> 0\n      end\n\n    Keyword.get(opts, :minimum_should_match, default_value)\n  end\n\n  defp is_empty_clause?(nil), do: true\n  defp is_empty_clause?(list) when is_list(list), do: Enum.empty?(list)\n  defp is_empty_clause?(%{}), do: false\nend\n"
  },
  {
    "path": "lib/elasticlunr/dsl/query/match_all_query.ex",
    "content": "defmodule Elasticlunr.Dsl.MatchAllQuery do\n  use Elasticlunr.Dsl.Query\n\n  alias Elasticlunr.Index\n\n  defstruct ~w[boost]a\n  @type t :: %__MODULE__{boost: integer()}\n\n  def new(boost \\\\ 1), do: struct!(__MODULE__, boost: boost)\n\n  @impl true\n  def parse(options, _query_options, _repo) do\n    options\n    |> Map.get(\"boost\", 1)\n    |> __MODULE__.new()\n  end\n\n  @impl true\n  def score(%__MODULE__{boost: boost}, %Index{} = index, _options) do\n    doc_ids = Index.all(index)\n\n    Stream.map(doc_ids, &%{ref: &1, score: 1.0 * boost})\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/dsl/query/match_query.ex",
    "content": "defmodule Elasticlunr.Dsl.MatchQuery do\n  use Elasticlunr.Dsl.Query\n\n  alias Elasticlunr.{Index}\n  alias Elasticlunr.Dsl.{MatchAllQuery, Query, QueryRepository, TermsQuery}\n\n  defstruct ~w[expand field query boost fuzziness minimum_should_match operator]a\n\n  @type t :: %__MODULE__{\n          expand: boolean(),\n          boost: integer(),\n          field: Index.document_field(),\n          query: any(),\n          fuzziness: integer(),\n          operator: binary(),\n          minimum_should_match: pos_integer()\n        }\n\n  @spec new(keyword) :: t()\n  def new(opts) do\n    attrs = %{\n      expand: Keyword.get(opts, :expand, false),\n      field: Keyword.get(opts, :field, \"\"),\n      query: Keyword.get(opts, :query, \"\"),\n      boost: Keyword.get(opts, :boost, 1),\n      fuzziness: Keyword.get(opts, :fuzziness, 0),\n      operator: Keyword.get(opts, :operator, \"or\"),\n      minimum_should_match: Keyword.get(opts, :minimum_should_match, 1)\n    }\n\n    struct!(__MODULE__, attrs)\n  end\n\n  @impl true\n  def rewrite(\n        %__MODULE__{\n          boost: boost,\n          field: field,\n          query: query,\n          expand: expand,\n          operator: operator,\n          fuzziness: fuzziness,\n          minimum_should_match: minimum_should_match\n        },\n        %Index{} = index\n      ) do\n    tokens = Index.analyze(index, field, query, is_query: true)\n\n    tokens_length = length(tokens)\n\n    cond do\n      tokens_length > 1 ->\n        minimum_should_match =\n          case operator == \"and\" && minimum_should_match == 0 do\n            true ->\n              tokens_length\n\n            false ->\n              minimum_should_match\n          end\n\n        TermsQuery.new(\n          field: field,\n          expand: expand,\n          terms: tokens,\n          fuzziness: fuzziness,\n          boost: boost,\n          minimum_should_match: minimum_should_match\n        )\n\n      tokens_length == 1 ->\n        TermsQuery.new(\n          field: field,\n          expand: expand,\n          terms: tokens,\n          fuzziness: fuzziness,\n          boost: boost\n        )\n\n      true ->\n        MatchAllQuery.new()\n    end\n  end\n\n  @impl true\n  def score(%__MODULE__{} = module, %Index{} = index, options) do\n    module\n    |> rewrite(index)\n    |> QueryRepository.score(index, options)\n  end\n\n  @impl true\n  def parse(options, _query_options, repo) do\n    cond do\n      Enum.empty?(options) ->\n        repo.parse(\"match_all\", %{})\n\n      Enum.count(options) > 1 ->\n        minimum_should_match = Enum.count(options)\n\n        should =\n          Enum.map(options, fn {field, content} ->\n            %{\"match\" => %{field => content}}\n          end)\n\n        repo.parse(\"bool\", %{\n          \"should\" => should,\n          \"minimum_should_match\" => minimum_should_match\n        })\n\n      true ->\n        {field, params} = Query.split_root(options)\n\n        opts = to_match_params(params)\n\n        new(\n          field: field,\n          query: Keyword.get(opts, :query),\n          expand: Keyword.get(opts, :expand),\n          operator: Keyword.get(opts, :operator),\n          fuzziness: Keyword.get(opts, :fuzziness),\n          minimum_should_match: Keyword.get(opts, :minimum_should_match)\n        )\n    end\n  end\n\n  defp to_match_params(params) when is_map(params) do\n    query = Map.get(params, \"query\")\n    fuzziness = Map.get(params, \"fuzziness\", 0)\n    operator = Map.get(params, \"operator\", \"or\")\n    expand = Map.get(params, \"expand\", false)\n\n    minimum_should_match = Map.get(params, \"minimum_should_match\", default_min_match(params))\n\n    [\n      query: query,\n      expand: expand,\n      operator: operator,\n      fuzziness: fuzziness,\n      minimum_should_match: minimum_should_match\n    ]\n  end\n\n  defp to_match_params(params), do: to_match_params(%{\"query\" => params})\n\n  defp default_min_match(params) do\n    case Map.get(params, \"operator\") == \"and\" do\n      true ->\n        0\n\n      false ->\n        1\n    end\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/dsl/query/not_query.ex",
    "content": "defmodule Elasticlunr.Dsl.NotQuery do\n  use Elasticlunr.Dsl.Query\n\n  alias Elasticlunr.Index\n  alias Elasticlunr.Dsl.{Query, QueryRepository}\n\n  defstruct ~w[inner_query]a\n  @type t :: %__MODULE__{inner_query: struct()}\n\n  @spec new(struct()) :: t()\n  def new(inner_query), do: %__MODULE__{inner_query: inner_query}\n\n  @impl true\n  def parse(options, _query_options, _repo) do\n    {key, value} = Query.split_root(options)\n\n    key\n    |> QueryRepository.parse(value, options)\n    |> new()\n  end\n\n  @impl true\n  def score(%__MODULE__{inner_query: inner_query}, %Index{} = index, options) do\n    query_all = Index.all(index)\n    query_score = QueryRepository.score(inner_query, index, options)\n\n    matched_ids = Enum.map(query_score, & &1.ref)\n\n    query_all\n    |> Stream.reject(&(&1 in matched_ids))\n    |> Stream.map(&%{ref: &1, score: 1})\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/dsl/query/terms_query.ex",
    "content": "defmodule Elasticlunr.Dsl.TermsQuery do\n  use Elasticlunr.Dsl.Query\n\n  alias Elasticlunr.Dsl.Query\n  alias Elasticlunr.{Index, Token}\n\n  defstruct ~w[minimum_should_match expand field terms boost fuzziness]a\n\n  @type t :: %__MODULE__{\n          minimum_should_match: pos_integer(),\n          expand: boolean(),\n          field: Index.document_field(),\n          terms: list(Token.t()),\n          boost: integer(),\n          fuzziness: integer()\n        }\n\n  @options ~w[boost expand fuzziness minimum_should_match]\n\n  @spec new(keyword()) :: t()\n  def new(opts) do\n    attrs = %{\n      minimum_should_match: Keyword.get(opts, :minimum_should_match, 1),\n      expand: Keyword.get(opts, :expand, false),\n      field: Keyword.get(opts, :field, \"\"),\n      terms: Keyword.get(opts, :terms, []),\n      boost: Keyword.get(opts, :boost, 1),\n      fuzziness: Keyword.get(opts, :fuzziness, 0)\n    }\n\n    struct!(__MODULE__, attrs)\n  end\n\n  @impl true\n  def score(\n        %__MODULE__{\n          boost: boost,\n          field: field,\n          expand: expand,\n          terms: terms,\n          fuzziness: fuzziness,\n          minimum_should_match: minimum_should_match\n        },\n        %Index{} = index,\n        options \\\\ []\n      ) do\n    terms =\n      case expand do\n        true ->\n          Enum.map(terms, fn\n            %Token{token: token} ->\n              Regex.compile!(\"^#{token}.*\")\n\n            token ->\n              Regex.compile!(\"^#{token}.*\")\n          end)\n\n        false ->\n          terms\n      end\n\n    query = [\n      field: field,\n      terms: terms,\n      fuzziness: fuzziness,\n      minimum_should_match: minimum_should_match\n    ]\n\n    query =\n      case Keyword.get(options, :filtered) do\n        nil ->\n          query\n\n        filtered when is_list(filtered) ->\n          Keyword.put(query, :docs, filtered)\n      end\n\n    docs = Index.terms(index, query)\n\n    pick_highest_score = fn a, b ->\n      if(hd(a) > hd(b), do: a, else: b)\n    end\n\n    Stream.map(docs, &elem(&1, 0))\n    |> Enum.reduce([], fn id, matched ->\n      [score, doc] =\n        Map.get(docs, id)\n        |> Stream.map(fn doc ->\n          [doc.tf * :math.pow(doc.idf, 2) * doc.norm, doc]\n        end)\n        |> Enum.reduce([0, nil], pick_highest_score)\n\n      ob = %{\n        ref: id,\n        field: field,\n        score: score * boost,\n        positions: Map.put(%{}, field, doc.positions)\n      }\n\n      matched ++ [ob]\n    end)\n  end\n\n  @impl true\n  def parse(options, _query_options, repo) do\n    cond do\n      Enum.empty?(options) ->\n        repo.parse(\"match_all\", %{})\n\n      Enum.count(options) > 1 ->\n        should =\n          options\n          |> Enum.reject(fn {key, _field} -> key in @options end)\n          |> Enum.map(fn {field, terms} ->\n            %{\"terms\" => %{field => terms}}\n          end)\n\n        repo.parse(\"bool\", %{\"should\" => should})\n\n      true ->\n        {field, params} = Query.split_root(options)\n        terms = get_terms(params)\n        opts = to_terms_params(params)\n\n        __MODULE__.new([field: field, terms: terms] ++ opts)\n    end\n  end\n\n  defp get_terms(params) when is_map(params) do\n    params\n    |> Map.get(\"value\")\n    |> to_list()\n  end\n\n  defp get_terms(value), do: to_list(value)\n\n  defp to_terms_params(params) when is_map(params) do\n    []\n    |> update_options(params, :minimum_should_match)\n    |> update_options(params, :fuzziness)\n    |> update_options(params, :expand)\n    |> update_options(params, :boost)\n  end\n\n  defp to_terms_params(params), do: to_terms_params(%{\"value\" => params})\n\n  defp update_options(opts, params, key) do\n    case Map.get(params, to_string(key)) do\n      nil ->\n        opts\n\n      value ->\n        Keyword.put(opts, key, value)\n    end\n  end\n\n  defp to_list(value) when is_list(value), do: value\n  defp to_list(value), do: [value]\nend\n"
  },
  {
    "path": "lib/elasticlunr/dsl/query.ex",
    "content": "defmodule Elasticlunr.Dsl.Query do\n  alias Elasticlunr.{Field, Index, Dsl.QueryRepository}\n\n  @type score_results ::\n          list(%{\n            score: integer(),\n            ref: Field.document_ref()\n          })\n\n  @callback filter(module :: struct(), index :: Index.t(), options :: keyword()) :: list()\n  @callback score(module :: struct(), index :: Index.t(), options :: keyword()) ::\n              score_results() | %Stream{}\n  @callback rewrite(module :: struct(), index :: Index.t()) :: struct()\n  @callback parse(options :: map(), query_options :: map(), repo :: module()) ::\n              struct()\n\n  @spec split_root(map() | tuple()) :: {atom(), any()} | any()\n  def split_root(root) when is_map(root) do\n    [root_key] = Map.keys(root)\n    value = Map.get(root, root_key)\n\n    {root_key, value}\n  end\n\n  def split_root({_, _} = root), do: root\n  def split_root(root), do: root\n\n  defmacro __using__(_) do\n    quote location: :keep do\n      @before_compile Elasticlunr.Dsl.Query\n      @behaviour Elasticlunr.Dsl.Query\n    end\n  end\n\n  defmacro __before_compile__(_) do\n    mod = __CALLER__.module\n\n    quote bind_quoted: [mod: mod] do\n      if not Module.defines?(mod, {:filter, 3}) do\n        @impl true\n        def filter(query, index, options) do\n          query\n          |> QueryRepository.score(index, options)\n          |> Enum.filter(&(&1.score > 0))\n        end\n      end\n\n      if not Module.defines?(mod, {:rewrite, 2}) do\n        @impl true\n        def rewrite(query, _index), do: query\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/dsl/query_repository.ex",
    "content": "defmodule Elasticlunr.Dsl.QueryRepository do\n  alias Elasticlunr.Index\n  alias Elasticlunr.Dsl.{BoolQuery, MatchAllQuery, MatchQuery, NotQuery, TermsQuery}\n\n  def get(\"not\"), do: NotQuery\n  def get(\"bool\"), do: BoolQuery\n  def get(\"match\"), do: MatchQuery\n  def get(\"terms\"), do: TermsQuery\n  def get(\"match_all\"), do: MatchAllQuery\n  def get(element), do: raise(\"Unknown query type #{element}\")\n\n  @spec parse(binary(), map(), map(), module()) :: struct()\n  def parse(module, options, query_options \\\\ %{}, repo \\\\ __MODULE__) do\n    module = get(module)\n    module.parse(options, query_options, repo)\n  end\n\n  @spec score(struct(), Index.t(), keyword()) :: list()\n  def score(query, index, options \\\\ []) when is_struct(query) do\n    query.__struct__.score(query, index, options)\n  end\n\n  @spec filter(struct(), Index.t(), keyword()) :: list()\n  def filter(query, index, options \\\\ []) when is_struct(query) do\n    query.__struct__.filter(query, index, options)\n  end\n\n  @spec rewrite(struct(), Index.t()) :: struct()\n  def rewrite(query, index) when is_struct(query) do\n    query.__struct__.rewrite(query, index)\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/manager/index_manager.ex",
    "content": "defmodule Elasticlunr.IndexManager do\n  use GenServer\n\n  alias Elasticlunr.{Index, IndexRegistry, IndexSupervisor, Storage}\n  alias Elasticlunr.Utils.Process\n\n  @spec preload() :: :ok\n  def preload do\n    Storage.all()\n    |> Stream.each(&start/1)\n    |> Stream.run()\n  end\n\n  @spec get(binary()) :: Index.t() | :not_running\n  def get(name) do\n    case loaded?(name) do\n      true -> name |> via |> GenServer.call(:get)\n      false -> :not_running\n    end\n  end\n\n  @spec save(Index.t()) :: {:ok, Index.t()} | {:error, any()}\n  def save(%Index{} = index) do\n    with {:ok, _} <- start(index),\n         :ok <- Storage.write(index) do\n      {:ok, index}\n    end\n  end\n\n  @spec update(Index.t()) :: Index.t() | :not_running\n  def update(%Index{name: name} = index) do\n    with true <- loaded?(name),\n         index <- name |> via |> GenServer.call({:update, index}),\n         :ok <- Storage.write(index) do\n      index\n    else\n      false ->\n        :not_running\n\n      err ->\n        err\n    end\n  end\n\n  @spec remove(Index.t()) :: :ok | :not_running\n  def remove(%Index{name: name}) do\n    with [{pid, _}] <- Registry.lookup(IndexRegistry, name),\n         :ok <- Storage.delete(name),\n         :ok <- DynamicSupervisor.terminate_child(IndexSupervisor, pid) do\n      :ok\n    else\n      _ ->\n        :not_running\n    end\n  end\n\n  @spec loaded?(binary()) :: boolean()\n  def loaded?(name) do\n    loaded_indices()\n    |> Enum.any?(fn\n      ^name ->\n        true\n\n      _ ->\n        false\n    end)\n  end\n\n  @spec loaded_indices :: [binary()]\n  def loaded_indices do\n    Process.active_processes(IndexSupervisor, IndexRegistry, __MODULE__)\n  end\n\n  @spec init(Index.t()) :: {:ok, Index.t()}\n  def init(%Index{} = index) do\n    {:ok, index}\n  end\n\n  @spec start_link(Index.t()) :: :ignore | {:error, any} | {:ok, pid}\n  def start_link(%Index{name: name} = index) do\n    GenServer.start_link(__MODULE__, index, name: via(name), hibernate_after: 5_000)\n  end\n\n  @spec child_spec(Index.t()) :: map()\n  def child_spec(%Index{name: id} = index) do\n    %{\n      id: {__MODULE__, id},\n      start: {__MODULE__, :start_link, [index]},\n      restart: :transient\n    }\n  end\n\n  @spec via(binary()) :: {:via, Registry, {IndexRegistry, atom()}}\n  def via(name) do\n    {:via, Registry, {IndexRegistry, name}}\n  end\n\n  def handle_call(:get, _from, index) do\n    {:reply, index, index}\n  end\n\n  def handle_call({:update, index}, _from, _state) do\n    {:reply, index, index}\n  end\n\n  defp start(index) do\n    DynamicSupervisor.start_child(IndexSupervisor, {__MODULE__, index})\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/pipeline/stemmer.ex",
    "content": "defmodule Elasticlunr.Pipeline.Stemmer do\n  alias Elasticlunr.Token\n\n  @behaviour Elasticlunr.Pipeline\n\n  @impl true\n  def call(%Token{token: str} = token) do\n    Token.update(token, token: Stemmer.stem(str))\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/pipeline/stop_word_filter.ex",
    "content": "defmodule Elasticlunr.Pipeline.StopWordFilter do\n  alias Elasticlunr.Token\n\n  @behaviour Elasticlunr.Pipeline\n\n  @default_stop_words [\n    \"a\",\n    \"able\",\n    \"about\",\n    \"across\",\n    \"after\",\n    \"all\",\n    \"almost\",\n    \"also\",\n    \"am\",\n    \"among\",\n    \"an\",\n    \"and\",\n    \"any\",\n    \"are\",\n    \"as\",\n    \"at\",\n    \"be\",\n    \"because\",\n    \"been\",\n    \"but\",\n    \"by\",\n    \"can\",\n    \"cannot\",\n    \"could\",\n    \"dear\",\n    \"did\",\n    \"do\",\n    \"does\",\n    \"either\",\n    \"else\",\n    \"ever\",\n    \"every\",\n    \"for\",\n    \"from\",\n    \"get\",\n    \"got\",\n    \"had\",\n    \"has\",\n    \"have\",\n    \"he\",\n    \"her\",\n    \"hers\",\n    \"him\",\n    \"his\",\n    \"how\",\n    \"however\",\n    \"i\",\n    \"if\",\n    \"in\",\n    \"into\",\n    \"is\",\n    \"it\",\n    \"its\",\n    \"just\",\n    \"least\",\n    \"let\",\n    \"like\",\n    \"likely\",\n    \"may\",\n    \"me\",\n    \"might\",\n    \"most\",\n    \"must\",\n    \"my\",\n    \"neither\",\n    \"no\",\n    \"nor\",\n    \"not\",\n    \"of\",\n    \"off\",\n    \"often\",\n    \"on\",\n    \"only\",\n    \"or\",\n    \"other\",\n    \"our\",\n    \"own\",\n    \"rather\",\n    \"said\",\n    \"say\",\n    \"says\",\n    \"she\",\n    \"should\",\n    \"since\",\n    \"so\",\n    \"some\",\n    \"than\",\n    \"that\",\n    \"the\",\n    \"their\",\n    \"them\",\n    \"then\",\n    \"there\",\n    \"these\",\n    \"they\",\n    \"this\",\n    \"tis\",\n    \"to\",\n    \"too\",\n    \"twas\",\n    \"us\",\n    \"wants\",\n    \"was\",\n    \"we\",\n    \"were\",\n    \"what\",\n    \"when\",\n    \"where\",\n    \"which\",\n    \"while\",\n    \"who\",\n    \"whom\",\n    \"why\",\n    \"will\",\n    \"with\",\n    \"would\",\n    \"yet\",\n    \"you\",\n    \"your\"\n  ]\n\n  @impl true\n  def call(%Token{token: token}) when token in @default_stop_words, do: nil\n  def call(token), do: token\nend\n"
  },
  {
    "path": "lib/elasticlunr/pipeline/trimmer.ex",
    "content": "defmodule Elasticlunr.Pipeline.Trimmer do\n  alias Elasticlunr.Token\n\n  @behaviour Elasticlunr.Pipeline\n\n  @impl true\n  def call(%Token{token: str} = token) do\n    str = Regex.replace(~r/^\\W+/, str, \"\")\n    str = Regex.replace(~r/\\W+$/, str, \"\")\n\n    Token.update(token, token: str)\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/pipeline.ex",
    "content": "defmodule Elasticlunr.Pipeline do\n  alias Elasticlunr.{Token, Tokenizer}\n  alias Elasticlunr.Pipeline.{Stemmer, StopWordFilter, Trimmer}\n\n  defstruct callback: []\n\n  @type t :: %__MODULE__{\n          callback: list(module() | function())\n        }\n\n  @callback call(Token.t()) :: Token.t() | list(Token.t()) | nil\n\n  @spec new(list(module())) :: struct\n  def new(callbacks \\\\ []) do\n    struct!(__MODULE__, callback: callbacks)\n  end\n\n  @spec add(t(), module()) :: t()\n  def add(%__MODULE__{callback: callback} = pipeline, module) do\n    callback = Enum.uniq([module] ++ callback)\n    %{pipeline | callback: callback}\n  end\n\n  @spec default_runners() :: list(module())\n  def default_runners, do: [Trimmer, StopWordFilter, Stemmer]\n\n  @spec run(Elasticlunr.Pipeline.t(), list(Token.t())) :: list(Token.t())\n  def run(%__MODULE__{} = pipeline, tokens) when not is_list(tokens) do\n    tokens = Tokenizer.tokenize(tokens)\n    run(pipeline, tokens)\n  end\n\n  def run(%__MODULE__{callback: []}, tokens), do: tokens\n\n  def run(%__MODULE__{callback: callback}, tokens) do\n    callback\n    |> Enum.reduce(tokens, fn module, acc ->\n      excute_runner(acc, module)\n    end)\n  end\n\n  @spec insert_before(t(), module(), module()) :: t()\n  def insert_before(%__MODULE__{callback: callback} = pipeline, module, before_module) do\n    case Enum.find_index(callback, &(&1 == before_module)) do\n      nil ->\n        add(pipeline, module)\n\n      index ->\n        callback =\n          callback\n          |> List.insert_at(index, module)\n          |> Enum.uniq()\n\n        %{pipeline | callback: callback}\n    end\n  end\n\n  @spec insert_after(t(), module(), module()) :: t()\n  def insert_after(%__MODULE__{callback: callback} = pipeline, module, before_module) do\n    case Enum.find_index(callback, &(&1 == before_module)) do\n      nil ->\n        add(pipeline, module)\n\n      index ->\n        callback =\n          callback\n          |> List.insert_at(index + 1, module)\n          |> Enum.uniq()\n\n        %{pipeline | callback: callback}\n    end\n  end\n\n  @spec remove(t(), module()) :: t()\n  def remove(%__MODULE__{callback: callback} = pipeline, module) do\n    callback = Enum.reject(callback, &(&1 == module))\n    %{pipeline | callback: callback}\n  end\n\n  defp excute_runner(tokens, module) do\n    Enum.reduce(tokens, [], fn token, state ->\n      output = execute(module, token)\n\n      output =\n        case is_list(output) do\n          true ->\n            output\n\n          false ->\n            [output]\n        end\n\n      output = Enum.filter(output, &(not is_nil(&1)))\n\n      state ++ output\n    end)\n  end\n\n  defp execute(callback, token) when is_function(callback), do: callback.(token)\n  defp execute(module, token), do: module.call(token)\nend\n"
  },
  {
    "path": "lib/elasticlunr/protocol_implementations.ex",
    "content": "defimpl Elasticlunr.Serializer, for: Elasticlunr.Pipeline do\n  alias Elasticlunr.Pipeline\n\n  def serialize(%Pipeline{callback: callback}, opts) do\n    cache = Keyword.get(opts, :pipeline, %{})\n\n    Enum.map_join(callback, \",\", &Map.get(cache, &1, &1))\n  end\nend\n\ndefimpl Elasticlunr.Serializer, for: Elasticlunr.Field do\n  alias Elasticlunr.{Field, Serializer}\n\n  def serialize(\n        %Field{\n          pipeline: pipeline,\n          store: store_documents,\n          store_positions: store_positions\n        },\n        opts\n      ) do\n    name = Keyword.get(opts, :name)\n    pipeline = Serializer.serialize(pipeline, opts)\n\n    \"field#name:#{name}|pipeline:#{pipeline}|store_documents:#{store_documents}|store_positions:#{store_positions}\"\n  end\nend\n\ndefimpl Elasticlunr.Serializer, for: Elasticlunr.DB do\n  alias Elasticlunr.DB\n\n  def serialize(%DB{name: name, options: options}, _opts) do\n    options = Enum.map_join(options, \",\", &to_string(&1))\n\n    \"db#name:#{name}|options:#{options}\"\n  end\nend\n\ndefimpl Elasticlunr.Serializer, for: Elasticlunr.Index do\n  alias Elasticlunr.{Index, Serializer}\n\n  def serialize(%Index{db: db, fields: fields, name: name, pipeline: pipeline, ref: ref}, _opts) do\n    pipeline_opt = Serializer.serialize(pipeline)\n    db_settings = Serializer.serialize(db)\n\n    {_, pipeline_map} =\n      Enum.reduce(pipeline.callback, {0, %{}}, fn callback, {index, map} ->\n        {index + 1, Map.put(map, callback, index)}\n      end)\n\n    settings = \"settings#name:#{name}|ref:#{ref}|pipeline:#{pipeline_opt}\"\n\n    fields_settings =\n      Stream.map(fields, fn {name, field} ->\n        Serializer.serialize(field, name: name, pipeline: pipeline_map)\n      end)\n\n    [settings, db_settings, fields_settings]\n    |> Stream.flat_map(fn\n      list when is_list(list) -> list\n      value when is_binary(value) -> [value]\n      value -> value\n    end)\n  end\nend\n\ndefimpl Jason.Encoder, for: Tuple do\n  def encode({start_pos, end_pos}, opts) do\n    [start_pos, end_pos]\n    |> Jason.Encode.list(opts)\n  end\nend\n\ndefimpl Elasticlunr.Deserializer, for: Stream do\n  alias Elasticlunr.Deserializer.Parser\n\n  def deserialize(data) do\n    Parser.process(data)\n  end\nend\n\ndefimpl Elasticlunr.Deserializer, for: File.Stream do\n  alias Elasticlunr.Deserializer.Parser\n\n  def deserialize(data) do\n    Parser.process(data)\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/serializer.ex",
    "content": "defprotocol Elasticlunr.Serializer do\n  @spec serialize(struct(), keyword()) :: binary() | function()\n  def serialize(index, opts \\\\ [])\nend\n"
  },
  {
    "path": "lib/elasticlunr/storage/blackhole.ex",
    "content": "defmodule Elasticlunr.Storage.Blackhole do\n  @moduledoc \"\"\"\n  As the name implies, nothing is written nowhere.\n  \"\"\"\n  use Elasticlunr.Storage\n\n  @impl true\n  def load_all, do: []\n\n  @impl true\n  def write(_index), do: :ok\n\n  @impl true\n  def read(_name), do: {:error, \"can't read index from blackhole\"}\n\n  @impl true\n  def delete(_name), do: :ok\nend\n"
  },
  {
    "path": "lib/elasticlunr/storage/disk.ex",
    "content": "defmodule Elasticlunr.Storage.Disk do\n  @moduledoc \"\"\"\n  This storage provider writes data to the local disk of the running application.\n  ```elixir\n  config :elasticlunr,\n    storage: Elasticlunr.Storage.Disk\n  config :elasticlunr, Elasticlunr.Storage.Disk,\n    directory: \"/path/to/project/storage\"\n  ```\n  \"\"\"\n  use Elasticlunr.Storage\n\n  alias Elasticlunr.{DB, Deserializer, Index, Serializer}\n\n  require Logger\n\n  @data_file_ext \"data\"\n  @index_file_ext \"index\"\n\n  @extensions [@data_file_ext, @index_file_ext]\n\n  @impl true\n  def write(%Index{db: db, name: name} = index) do\n    directory = config(:directory, \".\")\n    data = Serializer.serialize(index)\n\n    with %{data: data_file, index: index_file} <- filenames(directory, name),\n         :ok <- DB.to(db, file: data_file) do\n      write_serialized_index_to_file(index_file, data)\n    end\n  end\n\n  @impl true\n  def read(name) do\n    directory = config(:directory, \".\")\n    %{data: data_file, index: index_file} = filenames(directory, name)\n\n    index =\n      File.stream!(index_file, ~w[compressed]a)\n      |> Deserializer.deserialize()\n\n    with %Index{db: db} <- index,\n         {:ok, db} <- DB.from(db, file: data_file) do\n      Index.update_documents_size(%{index | db: db})\n    else\n      false ->\n        Logger.info(\"[elasticlunr] unable to data for index #{index.name}\")\n        index\n    end\n  end\n\n  @impl true\n  def load_all do\n    files()\n    |> Stream.filter(&String.ends_with?(&1, @index_file_ext))\n    |> Stream.map(fn file ->\n      name = without_ext(file, @index_file_ext)\n      read(name)\n    end)\n  end\n\n  @impl true\n  def delete(name) do\n    directory = config(:directory, \".\")\n    %{data: data_file, index: index_file} = filenames(directory, name)\n\n    with :ok <- File.rm(index_file) do\n      File.rm(data_file)\n    end\n  end\n\n  @spec files() :: list(binary())\n  def files do\n    directory = config(:directory, \".\")\n    extensions = Enum.map_join(@extensions, \",\", & &1)\n    match = Path.join(directory, \"*.{#{extensions}}\")\n\n    Path.wildcard(match)\n    |> Enum.map(&Path.expand/1)\n  end\n\n  @spec write_serialized_index_to_file(binary(), Enum.t()) :: :ok\n  def write_serialized_index_to_file(path, data) do\n    data\n    |> Stream.into(File.stream!(path, ~w[compressed]a), &\"#{&1}\\n\")\n    |> Stream.run()\n  end\n\n  defp filenames(directory, name) do\n    %{\n      index: Path.join(directory, \"#{name}.#{@index_file_ext}\"),\n      data: Path.join(directory, \"#{name}.#{@data_file_ext}\") |> String.to_charlist()\n    }\n  end\n\n  defp without_ext(file, ext), do: Path.basename(file, \".#{ext}\")\nend\n"
  },
  {
    "path": "lib/elasticlunr/storage/provider.ex",
    "content": "defmodule Elasticlunr.Storage.Provider do\n  @moduledoc false\n\n  alias Elasticlunr.Index\n\n  @callback load_all() :: Enum.t()\n  @callback read(name :: binary()) :: Index.t() | {:error, any()}\n  @callback delete(name :: binary()) :: :ok | {:error, any()}\n  @callback write(index :: Index.t()) :: :ok | {:error, any()}\nend\n"
  },
  {
    "path": "lib/elasticlunr/storage.ex",
    "content": "defmodule Elasticlunr.Storage do\n  @moduledoc \"\"\"\n  This is the storage interface that's used by the index manager.\n  ```elixir\n  config :elasticlunr,\n    storage: Elasticlunr.Storage.Blackhole # this is the default provider\n  ```\n  \"\"\"\n  alias Elasticlunr.Index\n  alias Elasticlunr.Storage.Blackhole\n\n  @spec all() :: Enum.t()\n  def all do\n    provider().load_all()\n  end\n\n  @spec write(Index.t()) :: :ok | {:error, any()}\n  def write(%Index{} = index) do\n    provider().write(index)\n  end\n\n  @spec read(binary()) :: Index.t() | {:error, any()}\n  def read(index_name) do\n    provider().read(index_name)\n  end\n\n  @spec delete(binary()) :: :ok | {:error, any()}\n  def delete(index_name) do\n    provider().delete(index_name)\n  end\n\n  defp provider, do: Application.get_env(:elasticlunr, :storage, Blackhole)\n\n  defmacro __using__(_) do\n    quote location: :keep do\n      @behaviour Elasticlunr.Storage.Provider\n\n      defp config(key, default \\\\ nil) do\n        Keyword.get(config_all(), key, default)\n      end\n\n      defp config_all, do: Application.get_env(:elasticlunr, __MODULE__, [])\n    end\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/tokenizer.ex",
    "content": "defmodule Elasticlunr.Tokenizer do\n  alias Elasticlunr.Token\n\n  @default_separator ~r/[\\s\\-]+/\n\n  @spec tokenize(binary() | number(), Regex.t()) :: list(Token.t())\n  def tokenize(str, separator \\\\ @default_separator)\n  def tokenize(str, separator) when is_binary(str), do: split(str, separator)\n\n  def tokenize(num, separator) when is_number(num) do\n    num\n    |> to_string()\n    |> split(separator)\n  end\n\n  defp split(str, separator) do\n    slice_end = 0\n    slice_start = 0\n    str_length = String.length(str)\n\n    str\n    |> String.downcase()\n    |> run_split(separator, slice_start, slice_end, str_length, [])\n  end\n\n  defp run_split(str, separator, slice_start, slice_end, str_length, tokens)\n       when slice_end <= str_length do\n    char = String.at(str, slice_end)\n    slice_length = slice_end - slice_start\n\n    with true <- match_string?(char, separator) || slice_end == str_length,\n         {:s, true} <- {:s, slice_length > 0} do\n      token =\n        str\n        |> String.slice(slice_start, slice_length)\n        |> to_token(slice_start, slice_length)\n\n      tokens = tokens ++ [token]\n      slice_start = slice_end + 1\n      run_split(str, separator, slice_start, slice_end + 1, str_length, tokens)\n    else\n      {:s, false} ->\n        index = slice_end + 1\n        run_split(str, separator, index, index, str_length, tokens)\n\n      false ->\n        run_split(str, separator, slice_start, slice_end + 1, str_length, tokens)\n    end\n  end\n\n  defp run_split(_str, _separator, _slice_start, _slice_end, _str_length, tokens) do\n    tokens\n  end\n\n  defp match_string?(nil, _separator), do: false\n\n  defp match_string?(char, separator) do\n    String.match?(char, separator)\n  end\n\n  defp to_token(str, start_index, end_index) do\n    Token.new(str, %{\n      end: end_index,\n      start: start_index\n    })\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/utils/process.ex",
    "content": "defmodule Elasticlunr.Utils.Process do\n  @spec child_pid?(tuple, atom) :: boolean\n  def child_pid?({:undefined, pid, :worker, [mod]}, mod) when is_pid(pid), do: true\n  def child_pid?(_child, _module), do: false\n\n  @spec id_from_pid(tuple, atom, atom) :: [atom | binary]\n  def id_from_pid({:undefined, pid, :worker, [mod]}, registry, mod),\n    do: Registry.keys(registry, pid)\n\n  @spec active_processes(atom, atom, atom) :: [any()]\n  def active_processes(supervisor, registry, module) do\n    supervisor\n    |> DynamicSupervisor.which_children()\n    |> Enum.filter(&child_pid?(&1, module))\n    |> Enum.flat_map(&id_from_pid(&1, registry, module))\n  end\nend\n"
  },
  {
    "path": "lib/elasticlunr/utlis.ex",
    "content": "defmodule Elasticlunr.Utils do\n  @spec levenshtein_distance(binary, binary) :: integer()\n  def levenshtein_distance(a, b) do\n    ta = String.downcase(a) |> to_charlist |> List.to_tuple()\n    tb = String.downcase(b) |> to_charlist |> List.to_tuple()\n    m = tuple_size(ta)\n    n = tuple_size(tb)\n    costs = Enum.reduce(0..m, %{}, fn i, acc -> Map.put(acc, {i, 0}, i) end)\n    costs = Enum.reduce(0..n, costs, fn j, acc -> Map.put(acc, {0, j}, j) end)\n\n    Enum.reduce(0..(n - 1), costs, fn j, acc ->\n      Enum.reduce(0..(m - 1), acc, fn i, map ->\n        # credo:disable-for-lines:2\n        d =\n          if elem(ta, i) == elem(tb, j) do\n            map[{i, j}]\n          else\n            # deletion\n            Enum.min([\n              map[{i, j + 1}] + 1,\n              # insertion\n              map[{i + 1, j}] + 1,\n              # substitution\n              map[{i, j}] + 1\n            ])\n          end\n\n        Map.put(map, {i + 1, j + 1}, d)\n      end)\n    end)\n    |> Map.get({m, n})\n  end\nend\n"
  },
  {
    "path": "mix.exs",
    "content": "defmodule Elasticlunr.MixProject do\n  use Mix.Project\n\n  @source_url \"https://github.com/heywhy/ex_elasticlunr\"\n\n  def project do\n    [\n      app: :elasticlunr,\n      version: \"0.6.4\",\n      elixir: \"~> 1.11\",\n      elixirc_paths: elixirc_paths(Mix.env()),\n      start_permanent: Mix.env() == :prod,\n      description: description(),\n      package: package(),\n      aliases: aliases(),\n      deps: deps(),\n      source_url: @source_url,\n\n      # Coverage\n      test_coverage: [tool: ExCoveralls],\n      preferred_cli_env: [\n        coveralls: :test,\n        \"coveralls.detail\": :test,\n        \"coveralls.post\": :test,\n        \"coveralls.html\": :test,\n        \"coveralls.json\": :test\n      ],\n\n      # Dialyxir\n      dialyzer: [\n        plt_file: {:no_warn, \"priv/plts/dialyzer.plt\"}\n      ],\n\n      # Docs\n      name: \"Elasticlunr\",\n      homepage_url: \"https://hexdocs.pm/elasticlunr\",\n      docs: [\n        main: \"readme\",\n        extras: [\"README.md\", \"LICENSE\"]\n      ]\n    ]\n  end\n\n  # Run \"mix help compile.app\" to learn about applications.\n  def application do\n    [\n      extra_applications: [:logger, :crypto],\n      mod: {Elasticlunr.Application, []}\n    ]\n  end\n\n  # Specifies which paths to compile per environment.\n  defp elixirc_paths(:test), do: [\"lib\", \"test/support\"]\n  defp elixirc_paths(_), do: [\"lib\"]\n\n  # Run \"mix help deps\" to learn about dependencies.\n  defp deps do\n    [\n      {:credo, \"~> 1.5\", only: [:dev, :test], runtime: false},\n      {:dialyxir, \"~> 1.1\", only: :dev, runtime: false},\n      {:ex_doc, \"~> 0.25\", only: :dev, runtime: false},\n      {:excoveralls, \"~> 0.14\", only: :test},\n      {:faker, \"~> 0.16\", only: :test},\n      {:jason, \"~> 1.3\"},\n      {:mox, \"~> 1.0\", only: :test},\n      {:stemmer, \"~> 1.0\"},\n      {:uniq, \"~> 0.4\"}\n    ]\n  end\n\n  defp aliases do\n    [\n      test: ~w[format credo test]\n    ]\n  end\n\n  defp description do\n    \"Elasticlunr is a lightweight full-text search engine. It's a port of Elasticlunr.js with more improvements.\"\n  end\n\n  defp package do\n    [\n      files: [\"lib\", \"mix.exs\", \"README.md\"],\n      maintainers: [\"Atanda Rasheed\"],\n      licenses: [\"MIT License\"],\n      links: %{\n        \"GitHub\" => @source_url,\n        \"Docs\" => \"https://hexdocs.pm/elasticlunr\"\n      }\n    ]\n  end\nend\n"
  },
  {
    "path": "test/core/document_store_test.exs",
    "content": "defmodule Elasticlunr.DocumentStoreTest do\n  use ExUnit.Case\n\n  alias Elasticlunr.DocumentStore\n\n  describe \"creating a new document store\" do\n    test \"defaults save attribute to true\" do\n      assert %DocumentStore{documents: %{}, document_info: %{}, length: 0, save: true} =\n               DocumentStore.new()\n    end\n\n    test \"without saving documents\" do\n      assert %DocumentStore{documents: %{}, document_info: %{}, length: 0, save: false} =\n               DocumentStore.new(false)\n    end\n  end\n\n  describe \"adding document to document store\" do\n    test \"adds a new document and save document\" do\n      document = %{id: 10}\n      document_store = DocumentStore.new()\n\n      assert %DocumentStore{documents: %{10 => ^document}} =\n               DocumentStore.add(document_store, 10, document)\n    end\n\n    test \"saves document and update length\" do\n      document_store = DocumentStore.new()\n\n      assert document_store = DocumentStore.add(document_store, 10, %{id: 10})\n      assert %DocumentStore{length: 1} = document_store\n      assert %DocumentStore{length: 2} = DocumentStore.add(document_store, 1, %{id: 1})\n    end\n\n    test \"updates document data and does not update length\" do\n      document_store = DocumentStore.new()\n\n      assert document_store = DocumentStore.add(document_store, 10, %{id: 10})\n      assert %DocumentStore{length: 1, documents: %{10 => %{id: 10}}} = document_store\n\n      assert %DocumentStore{length: 1, documents: %{10 => %{id: 1}}} =\n               DocumentStore.add(document_store, 10, %{id: 1})\n    end\n\n    test \"checks if document exists\" do\n      document_store = DocumentStore.new()\n\n      assert document_store = DocumentStore.add(document_store, 10, %{id: 10})\n      assert DocumentStore.exists?(document_store, 10)\n      refute DocumentStore.exists?(document_store, 100)\n    end\n  end\n\n  describe \"retrieving document from document store\" do\n    test \"returns document\" do\n      document = %{id: 10}\n\n      document_store =\n        DocumentStore.new()\n        |> DocumentStore.add(10, document)\n\n      assert ^document = DocumentStore.get(document_store, 10)\n    end\n\n    test \"returns nil for non-existing document\" do\n      document_store = DocumentStore.new()\n\n      assert is_nil(DocumentStore.get(document_store, 10))\n    end\n\n    test \"returns nil for non-persitent store\" do\n      document = %{id: 10}\n\n      document_store =\n        DocumentStore.new(false)\n        |> DocumentStore.add(10, document)\n\n      refute ^document = DocumentStore.get(document_store, 10)\n    end\n  end\n\n  describe \"removing document from document store\" do\n    test \"removes document\" do\n      document = %{id: 10}\n\n      document_store =\n        DocumentStore.new()\n        |> DocumentStore.add(10, document)\n\n      assert %DocumentStore{length: 1, documents: %{10 => %{id: 10}}} = document_store\n      assert %DocumentStore{length: 0, documents: %{}} = DocumentStore.remove(document_store, 10)\n    end\n  end\n\n  describe \"adding field length of document field\" do\n    test \"adds field length\" do\n      document = %{id: 10}\n\n      document_store =\n        DocumentStore.new()\n        |> DocumentStore.add(10, document)\n\n      assert %DocumentStore{\n               length: 1,\n               documents: %{10 => %{id: 10}},\n               document_info: %{10 => %{name: 20}}\n             } = DocumentStore.add_field_length(document_store, 10, :name, 20)\n    end\n\n    test \"updates field length\" do\n      document = %{id: 10}\n\n      document_store =\n        DocumentStore.new()\n        |> DocumentStore.add(10, document)\n\n      assert %DocumentStore{document_info: %{10 => %{name: 20}}} =\n               DocumentStore.add_field_length(document_store, 10, :name, 20)\n\n      assert %DocumentStore{document_info: %{10 => %{name: 36}}} =\n               DocumentStore.update_field_length(document_store, 10, :name, 36)\n    end\n  end\n\n  describe \"retrieving document field length\" do\n    test \"returns nil\" do\n      document = %{id: 10}\n\n      document_store =\n        DocumentStore.new()\n        |> DocumentStore.add(10, document)\n\n      assert is_nil(DocumentStore.get_field_length(document_store, 10, :name))\n    end\n\n    test \"returns field length\" do\n      document = %{id: 10}\n\n      document_store =\n        DocumentStore.new()\n        |> DocumentStore.add(10, document)\n        |> DocumentStore.add_field_length(10, :name, 20)\n\n      assert 20 = DocumentStore.get_field_length(document_store, 10, :name)\n    end\n  end\n\n  describe \"reset document store\" do\n    test \"clears store attributes\" do\n      document = %{id: 10}\n\n      assert document_store =\n               DocumentStore.new()\n               |> DocumentStore.add(10, document)\n               |> DocumentStore.add_field_length(10, :name, 20)\n\n      assert %DocumentStore{} = document_store\n\n      assert %DocumentStore{documents: %{}, document_info: %{}, length: 0, save: true} =\n               DocumentStore.reset(document_store)\n    end\n  end\nend\n"
  },
  {
    "path": "test/core/field_test.exs",
    "content": "defmodule Elasticlunr.FieldTest do\n  use ExUnit.Case\n\n  alias Elasticlunr.{DB, Field, Pipeline, Token}\n\n  setup context do\n    opts = [\n      pipeline: Pipeline.new(),\n      db: DB.init(:field_test, ~w[public]a)\n    ]\n\n    field =\n      Field.new(opts)\n      |> Field.add([%{id: 1, content: \"hello world\"}])\n\n    :ok = on_exit(fn -> true = DB.destroy(field.db) end)\n\n    Map.put(context, :field, field)\n  end\n\n  test \"tokens/1\", %{field: field} do\n    tokens = Field.tokens(field)\n\n    assert %Stream{} = tokens\n    refute Enum.empty?(tokens)\n    assert [%{tf: 1, documents: documents} | _] = Enum.to_list(tokens)\n    assert [1] = Enum.to_list(documents)\n  end\n\n  test \"documents/1\", %{field: field} do\n    assert documents = Field.documents(field)\n    assert [1] = Enum.to_list(documents)\n  end\n\n  test \"term_frequency/2\", %{field: field} do\n    assert tf = Field.term_frequency(field, \"hello\")\n    assert [{1, 1.0}] = Enum.to_list(tf)\n    refute Field.term_frequency(field, \"missing\")\n  end\n\n  test \"has_token/2\", %{field: field} do\n    assert Field.has_token(field, \"hello\")\n    refute Field.has_token(field, \"missing\")\n  end\n\n  test \"get_token/2\", %{field: field} do\n    assert %{term: \"hello\", tf: 1} = Field.get_token(field, \"hello\")\n    refute Field.get_token(field, \"missing\")\n  end\n\n  test \"set_query_pipeline/2\", %{field: field} do\n    pipeline = Pipeline.new()\n    assert %Field{query_pipeline: nil} = field\n    assert %Field{query_pipeline: ^pipeline} = Field.set_query_pipeline(field, pipeline)\n  end\n\n  test \"add/2\", %{field: field} do\n    assert Enum.count(Field.documents(field)) == 1\n    assert field = Field.add(field, [%{id: 10, content: \"testing\"}])\n    assert Enum.count(Field.documents(field)) == 2\n    assert Field.has_token(field, \"testing\")\n  end\n\n  test \"length/2\", %{field: field} do\n    assert Field.length(field, :ids) == 1\n    assert Field.length(field, :idf, \"hello\") == 1\n    assert Field.length(field, :term, \"world\") == 1\n    assert Field.length(field, :tf, \"world\") == 1\n  end\n\n  test \"update/2\", %{field: field} do\n    assert field = Field.update(field, [%{id: 1, content: \"worse\"}])\n    assert Field.has_token(field, \"worse\")\n    assert Enum.count(Field.documents(field)) == 1\n  end\n\n  test \"remove/2\", %{field: field} do\n    assert field = Field.remove(field, [1])\n    refute Field.has_token(field, \"worse\")\n    assert Enum.empty?(Field.documents(field))\n  end\n\n  test \"analyze/3\", %{field: field} do\n    assert [%Token{token: \"coming\"}] = Field.analyze(field, \"coming\", [])\n    assert [%Token{token: \"coming\"}] = Field.analyze(field, \"coming\", is_query: true)\n\n    assert [%Token{token: \"foo\"}] =\n             field\n             |> Field.set_query_pipeline(Pipeline.new([fn _ -> Token.new(\"foo\") end]))\n             |> Field.analyze(\"coming\", is_query: true)\n  end\n\n  test \"terms/3\", %{field: field} do\n    assert %{1 => _} = Field.terms(field, terms: [\"hello\"])\n    assert %{1 => _} = Field.terms(field, terms: [~r/hello/])\n    assert %{1 => _} = Field.terms(field, terms: [\"hello\"], fuzziness: 2)\n    assert Enum.empty?(Field.terms(field, terms: [\"missing\"]))\n  end\nend\n"
  },
  {
    "path": "test/core/index_test.exs",
    "content": "defmodule Elasticlunr.IndexTest do\n  use ExUnit.Case\n\n  alias Elasticlunr.{Field, Index, Pipeline, Token}\n  alias Faker.Address.En, as: Address\n\n  describe \"creating an index\" do\n    test \"creates a new instance\" do\n      assert %Index{name: name} = Index.new()\n      assert is_binary(name)\n      assert %Index{name: :test_index, ref: \"id\", fields: %{}} = Index.new(name: :test_index)\n\n      assert %Index{name: :test_index, ref: \"name\", fields: %{}} =\n               Index.new(name: :test_index, ref: \"name\")\n    end\n\n    test \"creates a new instance and populate fields\" do\n      assert %Index{fields: %{\"id\" => %Field{}, \"name\" => %Field{}}} =\n               Index.add_field(Index.new(), \"name\")\n    end\n  end\n\n  describe \"modifying an index\" do\n    test \"adds new fields\" do\n      index = Index.new()\n      assert %Index{fields: %{}} = index\n      assert index = Index.add_field(index, \"name\")\n      assert %Index{fields: %{\"name\" => %Field{}}} = index\n\n      assert %Index{fields: %{\"name\" => %Field{}, \"bio\" => %Field{}}} =\n               Index.add_field(index, \"bio\")\n    end\n\n    test \"save document\" do\n      index = Index.add_field(Index.new(), \"name\")\n\n      assert %Index{fields: %{\"name\" => %Field{store: true}}} = index\n      assert %Index{fields: %{\"name\" => %Field{store: false}}} = Index.save_document(index, false)\n    end\n\n    test \"updates a field\" do\n      index = Index.new()\n      assert %Index{fields: %{}} = index\n      assert index = Index.add_field(index, \"name\")\n      assert field = Index.get_field(index, \"name\")\n      assert %Field{query_pipeline: nil} = field\n\n      pipeline = Pipeline.new()\n\n      assert %Field{query_pipeline: ^pipeline} =\n               index\n               |> Index.update_field(\"name\", %{field | query_pipeline: pipeline})\n               |> Index.get_field(\"name\")\n    end\n\n    test \"fails to update missing field\" do\n      index = Index.new()\n      assert %Index{fields: %{}} = index\n\n      assert_raise RuntimeError, \"Unknown field address in index\", fn ->\n        Index.update_field(index, \"address\", Field.new([]))\n      end\n    end\n  end\n\n  describe \"fiddling with an index\" do\n    test \"adds document\" do\n      index =\n        Index.new()\n        |> Index.add_field(\"bio\")\n\n      assert index =\n               Index.add_documents(index, [\n                 %{\n                   \"id\" => 10,\n                   \"bio\" => Faker.Lorem.paragraph()\n                 }\n               ])\n\n      assert %Index{documents_size: 1} = index\n\n      assert %Index{documents_size: 2} =\n               Index.add_documents(index, [\n                 %{\n                   \"id\" => 29,\n                   \"bio\" => Faker.Lorem.paragraph()\n                 }\n               ])\n    end\n\n    @tag :skip\n    test \"adds documents and flatten nested attributes\" do\n      index =\n        Index.new()\n        |> Index.add_field(\"name\")\n        |> Index.add_field(\"address\")\n\n      document = %{\n        \"id\" => 20,\n        \"name\" => \"nelson\",\n        \"address\" => %{\n          \"city\" => Address.city(),\n          \"country\" => Address.country_code(),\n          \"line1\" => Address.street_address(),\n          \"line2\" => Address.secondary_address(),\n          \"state\" => Address.state()\n        }\n      }\n\n      index = Index.add_documents(index, [document])\n\n      query = %{\n        \"bool\" => %{\n          \"should\" => %{\n            \"match\" => %{\"address.city\" => get_in(document, ~w[address city])}\n          }\n        }\n      }\n\n      assert %Index{fields: %{\"address.city\" => %Field{}}, documents_size: 1} = index\n      refute Index.search(index, %{\"query\" => query}) |> Enum.empty?()\n    end\n\n    @tag :skip\n    test \"removes documents with nested attributes\" do\n      index =\n        Index.new()\n        |> Index.add_field(\"name\")\n        |> Index.add_field(\"address\")\n\n      document = %{\n        \"id\" => 20,\n        \"name\" => \"nelson\",\n        \"address\" => %{\n          \"city\" => Address.city(),\n          \"country\" => Address.country_code(),\n          \"line1\" => Address.street_address(),\n          \"line2\" => Address.secondary_address(),\n          \"state\" => Address.state()\n        }\n      }\n\n      index = Index.add_documents(index, [document])\n\n      assert %Index{fields: %{\"address.city\" => %Field{}}, documents_size: 1} = index\n\n      assert %Index{fields: %{\"address.city\" => %Field{}}, documents_size: 0} =\n               Index.remove_documents(index, [20])\n    end\n\n    test \"allows addition of document with empty field\" do\n      index =\n        Index.new()\n        |> Index.add_field(\"bio\")\n        |> Index.add_field(\"title\")\n\n      assert index = Index.add_documents(index, [%{\"id\" => 10, \"bio\" => \"\", \"title\" => \"test\"}])\n\n      assert term_frequency =\n               index\n               |> Index.get_field(\"title\")\n               |> Field.term_frequency(\"test\")\n\n      assert index\n             |> Index.get_field(\"title\")\n             |> Field.length(:tf, \"test\")\n             |> Kernel.==(1)\n\n      assert term_frequency\n             |> Enum.find(&(elem(&1, 0) == 10))\n             |> Kernel.==({10, 1})\n    end\n\n    @tag :skip\n    test \"fails when adding duplicate document\" do\n      index = Index.add_field(Index.new(), \"bio\")\n\n      document = %{\n        \"id\" => 10,\n        \"bio\" => Faker.Lorem.paragraph()\n      }\n\n      assert index = Index.add_documents(index, [document])\n\n      assert_raise RuntimeError, \"Document id 10 already exists in the index\", fn ->\n        Index.add_documents(index, [document])\n      end\n    end\n\n    test \"removes document\" do\n      index =\n        Index.new()\n        |> Index.add_field(\"id\")\n        |> Index.add_field(\"bio\")\n\n      document = %{\n        \"id\" => 10,\n        \"bio\" => \"this is a test\"\n      }\n\n      document_2 = %{\n        \"id\" => 30,\n        \"bio\" => \"this is another test\"\n      }\n\n      assert index = Index.add_documents(index, [document_2, document])\n      assert %Index{documents_size: 2} = index\n      assert index = Index.remove_documents(index, [10])\n      assert %Index{documents_size: 1} = index\n      assert field = Index.get_field(index, \"bio\")\n      refute Field.has_token(field, \"a\")\n      assert Field.has_token(field, \"another\")\n      assert is_nil(Field.get_token(field, \"a\"))\n      assert %{idf: idf} = Field.get_token(field, \"another\")\n      assert idf > 0\n      %{documents: documents} = Field.get_token(field, \"another\")\n      assert [30] = Enum.to_list(documents)\n    end\n\n    test \"does not remove unknown document\" do\n      index = Index.add_field(Index.new(), \"bio\")\n\n      document = %{\n        \"id\" => 10,\n        \"bio\" => Faker.Lorem.paragraph()\n      }\n\n      assert index = Index.add_documents(index, [document])\n      assert %Index{documents_size: 1} = index\n      assert %Index{documents_size: 1} = Index.remove_documents(index, [11])\n    end\n\n    test \"update existing document\" do\n      index = Index.add_field(Index.new(), \"bio\")\n\n      document = %{\n        \"id\" => 10,\n        \"bio\" => Faker.Lorem.paragraph()\n      }\n\n      index = Index.add_documents(index, [document])\n\n      assert %Index{documents_size: 1} = index\n      updated_document = %{document | \"bio\" => Faker.Lorem.paragraph()}\n      assert %Index{documents_size: 1} = Index.update_documents(index, [updated_document])\n    end\n\n    test \"search for a document\" do\n      index = Index.add_field(Index.new(), \"bio\")\n\n      document = %{\n        \"id\" => 10,\n        \"bio\" => \"foo\"\n      }\n\n      index = Index.add_documents(index, [document])\n\n      assert Index.search(index, \"foo\") |> Enum.count() == 1\n      updated_document = %{document | \"bio\" => \"bar\"}\n      index = Index.update_documents(index, [updated_document])\n      assert Index.search(index, \"bar\") |> Enum.count() == 1\n      assert Index.search(index, \"foo\") |> Enum.empty?()\n    end\n\n    test \"allows the use of multiple, different pipelines for searching and indexing\" do\n      index = Index.add_field(Index.new(), \"info\")\n\n      callback = fn %Token{token: token} ->\n        tokens = [token]\n\n        case token == \"foo\" do\n          false ->\n            tokens\n\n          true ->\n            ~w[bar baz barry] ++ tokens\n        end\n      end\n\n      query_pipeline = Pipeline.new([callback])\n\n      field =\n        index\n        |> Index.get_field(\"info\")\n        |> Field.set_query_pipeline(query_pipeline)\n\n      index = Index.update_field(index, \"info\", field)\n\n      index =\n        index\n        |> Index.add_documents([\n          %{\"id\" => \"a\", \"info\" => \"Barry had a beer with Fred in the bar\"},\n          %{\"id\" => \"b\", \"info\" => \"the bar is empty\"}\n        ])\n\n      results =\n        Index.search(index, %{\n          \"query\" => %{\n            \"match\" => %{\"info\" => \"foo\"}\n          }\n        })\n\n      assert Enum.count(results) == 2\n      assert [%{score: score_1}, %{score: score_2}] = results\n      assert score_2 < score_1\n\n      results =\n        Index.search(index, %{\n          \"query\" => %{\n            \"match\" => %{\"info\" => \"fred\"}\n          }\n        })\n\n      assert Enum.count(results) == 1\n    end\n  end\nend\n"
  },
  {
    "path": "test/deserializer_test.exs",
    "content": "defmodule Elasticlunr.DeserializerTest do\n  use ExUnit.Case\n\n  alias Elasticlunr.{Deserializer, Index}\n\n  test \"deserialize index\" do\n    data = [\n      \"settings#name:index|ref:id|pipeline:\",\n      \"db#name:elasticlunr_index|options:compressed,named_table,set,public\",\n      \"field#name:id|pipeline:Elixir.Elasticlunr.Index.IdPipeline|store_documents:false|store_positions:false\"\n    ]\n\n    index =\n      to_stream(data)\n      |> Deserializer.deserialize()\n\n    assert %Index{name: \"index\"} = index\n  end\n\n  defp to_stream(data) do\n    Stream.iterate(0, &(&1 + 1))\n    |> Stream.map(&Enum.at(data, &1))\n    |> Stream.take(Enum.count(data))\n  end\nend\n"
  },
  {
    "path": "test/dsl_test.exs",
    "content": "defmodule Elasticlunr.DslTest do\n  use ExUnit.Case\n\n  alias Elasticlunr.{Index, Pipeline, Token}\n  alias Elasticlunr.Dsl.{BoolQuery, MatchAllQuery, MatchQuery, NotQuery, TermsQuery}\n  alias Elasticlunr.Dsl.QueryRepository\n\n  setup context do\n    callback = fn\n      %Token{} = token ->\n        token\n\n      str ->\n        str\n        |> String.split(\" \")\n        |> String.downcase()\n        |> Enum.map(&Token.new(&1))\n    end\n\n    pipeline = Pipeline.new([callback])\n\n    index =\n      Index.new()\n      |> Index.add_field(\"content\", pipeline: pipeline)\n      |> Index.add_documents([\n        %{\"id\" => 1, \"content\" => \"The quick fox jumped over the lazy dog\"},\n        %{\n          \"id\" => 2,\n          \"content\" =>\n            \"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas viverra enim non purus rutrum porta ut non urna. Nullam eu ante eget nisi laoreet pretium. Curabitur varius velit vel viverra facilisis. Pellentesque et condimentum mauris. Quisque faucibus varius interdum. Fusce cursus pretium tempus. Ut gravida tortor et mi dignissim sagittis. Aliquam ullamcorper dignissim arcu sollicitudin fermentum. Nunc elementum tortor ex, sit amet posuere lectus accumsan quis. Vivamus sit amet eros blandit, sagittis quam at, vulputate felis. Ut faucibus pretium feugiat. Fusce diam felis, euismod ac tellus id, blandit venenatis dolor. Nullam porttitor suscipit diam, a feugiat dui pharetra at.\"\n        },\n        %{\"id\" => 3, \"content\" => \"Lorem dog\"},\n        %{\n          \"id\" => 4,\n          \"content\" => \"livebook is elixir's own jupyter. it's a very impressive impression.\"\n        },\n        %{\n          \"id\" => 5,\n          \"content\" =>\n            \"there are lots of contributors to the elixir project and many cool projects using elixir, ex. livebook, elixir_nx and so on\"\n        }\n      ])\n\n    Map.put(context, :index, index)\n  end\n\n  describe \"match_all\" do\n    test \"parses correctly\" do\n      assert %MatchAllQuery{boost: 2.5} = QueryRepository.parse(\"match_all\", %{\"boost\" => 2.5})\n    end\n\n    test \"correctly operates match_all query\", %{index: index} do\n      query = MatchAllQuery.new()\n\n      assert result = MatchAllQuery.score(query, index, [])\n      assert Enum.count(result) == 5\n\n      for %{score: score} <- result do\n        assert score == 1\n      end\n    end\n  end\n\n  describe \"terms\" do\n    test \"parses correctly\" do\n      assert %MatchAllQuery{} = QueryRepository.parse(\"terms\", %{})\n\n      assert %TermsQuery{field: \"name\", terms: [\"nelson\"]} =\n               QueryRepository.parse(\"terms\", %{\"name\" => \"nelson\"})\n\n      assert %TermsQuery{field: \"name\", terms: [\"kim\"]} =\n               QueryRepository.parse(\"terms\", %{\n                 \"name\" => %{\"value\" => \"kim\"}\n               })\n\n      assert %BoolQuery{\n               should: [\n                 %TermsQuery{field: \"country\", terms: [\"us\"], boost: 1},\n                 %TermsQuery{field: \"name\", terms: [\"john\"], boost: 1}\n               ]\n             } = QueryRepository.parse(\"terms\", %{\"name\" => \"john\", \"country\" => \"us\"})\n    end\n\n    test \"performs base functionality\", %{index: index} do\n      query =\n        TermsQuery.new(\n          field: \"content\",\n          terms: [\"fox\"]\n        )\n\n      assert result = TermsQuery.score(query, index, [])\n      assert Enum.count(result) == 1\n      assert [%{ref: 1}] = result\n    end\n\n    test \"boost\", %{index: index} do\n      non_boost_query =\n        TermsQuery.new(\n          field: \"content\",\n          terms: [\"fox\"]\n        )\n\n      boost_query =\n        TermsQuery.new(\n          field: \"content\",\n          terms: [\"fox\"],\n          boost: 2\n        )\n\n      assert boost_result = TermsQuery.score(boost_query, index, [])\n      assert non_boost_result = TermsQuery.score(non_boost_query, index, [])\n      assert Enum.count(boost_result) == Enum.count(non_boost_result)\n      assert [%{score: score_1}] = boost_result\n      assert [%{score: score_2}] = non_boost_result\n      assert score_1 == score_2 * 2\n    end\n  end\n\n  describe \"bool\" do\n    test \"parses correctly\" do\n      assert %BoolQuery{must: %TermsQuery{field: \"country\", terms: [\"us\"]}} =\n               QueryRepository.parse(\"bool\", %{\"must\" => %{\"terms\" => %{\"country\" => \"us\"}}})\n\n      assert %BoolQuery{\n               minimum_should_match: 0,\n               must: %TermsQuery{field: \"country\", terms: [\"us\"]},\n               must_not: %TermsQuery{field: \"gender\", terms: [\"male\"]},\n               filter: [%MatchQuery{field: \"balance\", query: 1000}],\n               should: [%MatchQuery{field: \"account_type\", query: \"savings\"}]\n             } =\n               QueryRepository.parse(\"bool\", %{\n                 \"must\" => %{\"terms\" => %{\"country\" => \"us\"}},\n                 \"must_not\" => %{\"terms\" => %{\"gender\" => \"male\"}},\n                 \"filter\" => %{\"match\" => %{\"balance\" => 1000}},\n                 \"should\" => %{\"match\" => %{\"account_type\" => \"savings\"}}\n               })\n    end\n\n    test \"filters via must functionality\", %{index: index} do\n      query =\n        BoolQuery.new(\n          must: TermsQuery.new(field: \"content\", terms: [\"lorem\"]),\n          should: [\n            TermsQuery.new(field: \"content\", terms: [\"dog\"])\n          ]\n        )\n\n      assert BoolQuery.score(query, index, []) |> Enum.count() == 1\n    end\n\n    test \"filters via must_not functionality\", %{index: index} do\n      query =\n        BoolQuery.new(\n          must: TermsQuery.new(field: \"content\", terms: [\"lorem\"]),\n          must_not: TermsQuery.new(field: \"content\", terms: [\"ipsum\"]),\n          should: [\n            TermsQuery.new(field: \"content\", terms: [\"dog\"])\n          ]\n        )\n\n      refute BoolQuery.score(query, index, [])\n             |> Enum.empty?()\n    end\n\n    test \"check if document has positions before trying to acess it\", %{index: index} do\n      assert Index.search(index, \"me\") |> Enum.empty?()\n    end\n  end\n\n  describe \"match\" do\n    test \"parses correctly\" do\n      assert %MatchAllQuery{boost: 1} = QueryRepository.parse(\"match\", %{})\n\n      assert %MatchQuery{field: \"country\", query: \"us\"} =\n               QueryRepository.parse(\"match\", %{\"country\" => \"us\"})\n\n      assert %MatchQuery{field: \"country\", query: \"us\", operator: \"and\"} =\n               QueryRepository.parse(\"match\", %{\n                 \"country\" => %{\"query\" => \"us\", \"operator\" => \"and\"}\n               })\n\n      assert %BoolQuery{\n               should: [\n                 %MatchQuery{field: \"city\", query: \"arizona\"},\n                 %MatchQuery{field: \"country\", query: \"us\"}\n               ]\n             } =\n               QueryRepository.parse(\"match\", %{\n                 \"city\" => \"arizona\",\n                 \"country\" => \"us\"\n               })\n    end\n\n    test \"performs base functionality\", %{index: index} do\n      query = MatchQuery.new(field: \"content\", query: \"brown fox\")\n\n      assert results = MatchQuery.score(query, index, [])\n      assert Enum.count(results) == 1\n      assert [%{ref: 1}] = results\n    end\n\n    test \"honours minimum_should_match\", %{index: index} do\n      query = MatchQuery.new(field: \"content\", query: \"brown fox quick\", minimum_should_match: 2)\n\n      assert results = MatchQuery.score(query, index, [])\n      assert Enum.count(results) == 1\n      assert [%{ref: 1}] = results\n    end\n\n    test \"honours and operator\", %{index: index} do\n      query =\n        MatchQuery.new(\n          field: \"content\",\n          query: \"fox quick\",\n          operator: \"and\"\n        )\n\n      assert results = MatchQuery.score(query, index, [])\n      assert Enum.count(results) == 1\n      assert [%{ref: 1}] = results\n    end\n  end\n\n  describe \"not\" do\n    test \"parses correctly\" do\n      assert %NotQuery{inner_query: %BoolQuery{}} = QueryRepository.parse(\"not\", %{\"bool\" => %{}})\n\n      assert %NotQuery{inner_query: %BoolQuery{}} =\n               QueryRepository.parse(\"not\", %{\n                 \"bool\" => %{\n                   \"should\" => [\n                     %{\"match\" => %{\"name\" => \"john\"}}\n                   ]\n                 }\n               })\n    end\n\n    test \"applies inner query\", %{index: index} do\n      query =\n        NotQuery.new(\n          BoolQuery.new(\n            should: [\n              MatchQuery.new(field: \"content\", query: \"quick\"),\n              MatchQuery.new(field: \"content\", query: \"lorem\")\n            ]\n          )\n        )\n\n      assert results = NotQuery.score(query, index, [])\n      assert Enum.count(results) == 2\n    end\n  end\nend\n"
  },
  {
    "path": "test/manager/index_manager_test.exs",
    "content": "defmodule Elasticlunr.IndexManagerTest do\n  use ExUnit.Case\n\n  alias Elasticlunr.{Index, IndexManager}\n\n  describe \"working with index manager\" do\n    test \"saves an index\" do\n      index = Index.new()\n\n      assert {:ok, ^index} = IndexManager.save(index)\n    end\n\n    test \"fails when saving duplicate index\" do\n      index = Index.new()\n\n      assert {:ok, ^index} = IndexManager.save(index)\n      assert {:error, {:already_started, _}} = IndexManager.save(index)\n    end\n\n    test \"updates existing index\" do\n      index = Index.new()\n\n      assert {:ok, ^index} = IndexManager.save(index)\n      assert ^index = IndexManager.update(index)\n    end\n\n    test \"fails update action for non-existent index\" do\n      index = Index.new()\n\n      assert :not_running = IndexManager.update(index)\n    end\n\n    test \"removes an index\" do\n      index = Index.new()\n\n      assert {:ok, ^index} = IndexManager.save(index)\n      assert :ok = IndexManager.remove(index)\n      assert :not_running = IndexManager.get(index.name)\n    end\n\n    test \"fails to remove a non-existent index\" do\n      index = Index.new()\n\n      assert :not_running = IndexManager.remove(index)\n    end\n\n    test \"return a running instance\" do\n      index = Index.new()\n\n      {:ok, _} = IndexManager.save(index)\n      assert ^index = IndexManager.get(index.name)\n      assert :not_running = IndexManager.get(\"unknown-index\")\n    end\n  end\nend\n"
  },
  {
    "path": "test/pipeline/stemmer_test.exs",
    "content": "defmodule Elasticlunr.Pipeline.StemmerTest do\n  use ExUnit.Case\n\n  alias Elasticlunr.Token\n  alias Elasticlunr.{Pipeline, Pipeline.Stemmer}\n\n  import Elasticlunr.Test.Fixture\n\n  describe \"running stemmer against tokens\" do\n    test \"works as expected\" do\n      stemmer_fixture()\n      |> Enum.each(fn {word, stemmed_word} ->\n        token = Token.new(word)\n        assert Stemmer.call(token) == Token.new(stemmed_word)\n      end)\n    end\n\n    test \"is a default runner for default pipeline\" do\n      assert Pipeline.default_runners()\n             |> Enum.any?(fn\n               Stemmer -> true\n               _ -> false\n             end)\n    end\n  end\nend\n"
  },
  {
    "path": "test/pipeline/stop_word_filter_test.exs",
    "content": "defmodule Elasticlunr.Pipeline.StopWordFilterTest do\n  @moduledoc false\n  use ExUnit.Case\n\n  alias Elasticlunr.{Pipeline, Token}\n  alias Elasticlunr.Pipeline.StopWordFilter\n\n  describe \"running stop_word_filter against tokens\" do\n    test \"is a default runner for default pipeline\" do\n      assert Pipeline.default_runners()\n             |> Enum.any?(fn\n               StopWordFilter -> true\n               _ -> false\n             end)\n    end\n\n    test \"removes stop words\" do\n      stop_words = ~w[the and but than when]\n\n      assert [] =\n               stop_words\n               |> Enum.map(&Token.new/1)\n               |> Enum.reject(&is_nil(StopWordFilter.call(&1)))\n    end\n  end\nend\n"
  },
  {
    "path": "test/pipeline/trimmer_test.exs",
    "content": "defmodule Elasticlunr.Pipeline.TimmerTest do\n  @moduledoc false\n  use ExUnit.Case\n\n  alias Elasticlunr.{Pipeline, Token}\n  alias Elasticlunr.Pipeline.Trimmer\n\n  describe \"running trimmer against tokens\" do\n    test \"is a default runner for default pipeline\" do\n      assert Pipeline.default_runners()\n             |> Enum.any?(fn\n               Trimmer -> true\n               _ -> false\n             end)\n    end\n\n    test \"passes through latin characters\" do\n      assert %Token{token: \"hello\"} = Token.new(\"hello\")\n    end\n\n    test \"removes leading and trailing punctuation\" do\n      assert %Token{token: \"hello\"} = Token.new(\"hello.\") |> Trimmer.call()\n      assert %Token{token: \"it's\"} = Token.new(\"it's\") |> Trimmer.call()\n      assert %Token{token: \"james\"} = Token.new(\"james'\") |> Trimmer.call()\n      assert %Token{token: \"stop\"} = Token.new(\"stop!'\") |> Trimmer.call()\n      assert %Token{token: \"first\"} = Token.new(\"first'\") |> Trimmer.call()\n      assert %Token{token: \"\"} = Token.new(\"\") |> Trimmer.call()\n      assert %Token{token: \"tag\"} = Token.new(\"[tag]\") |> Trimmer.call()\n      assert %Token{token: \"tag\"} = Token.new(\"[[[tag]]]\") |> Trimmer.call()\n      assert %Token{token: \"hello\"} = Token.new(\"[[!@#@!hello]]]}}}\") |> Trimmer.call()\n      assert %Token{token: \"hello\"} = Token.new(\"~!@@@hello***()()()]]\") |> Trimmer.call()\n    end\n  end\nend\n"
  },
  {
    "path": "test/pipeline_test.exs",
    "content": "defmodule Elasticlunr.PipelineTest do\n  use ExUnit.Case\n\n  alias Elasticlunr.{Pipeline, Token, Tokenizer}\n  alias Elasticlunr.Pipeline.{Stemmer, StopWordFilter, Trimmer}\n\n  describe \"creating pipeline\" do\n    test \"adds a runner to the queue\" do\n      assert pipeline = Pipeline.new([])\n      assert %Pipeline{callback: []} = pipeline\n      assert %Pipeline{callback: [Trimmer]} = Pipeline.add(pipeline, Trimmer)\n    end\n\n    test \"ignores duplicate runner in the queue\" do\n      pipeline = Pipeline.new([])\n      assert %Pipeline{callback: []} = pipeline\n      assert %Pipeline{callback: [Trimmer]} = Pipeline.add(pipeline, Trimmer)\n      assert %Pipeline{callback: [Trimmer]} = Pipeline.add(pipeline, Trimmer)\n    end\n  end\n\n  describe \"updating pipeline\" do\n    test \"removes runner from queue\" do\n      pipeline = Pipeline.new([Stemmer, Trimmer])\n\n      assert %Pipeline{callback: [Stemmer, Trimmer]} = pipeline\n      assert %Pipeline{callback: [Stemmer]} = Pipeline.remove(pipeline, Trimmer)\n    end\n\n    test \"inserts runner at position\" do\n      pipeline = Pipeline.new([Stemmer, Trimmer])\n\n      assert %Pipeline{callback: [Stemmer, Trimmer]} = pipeline\n\n      assert pipeline = Pipeline.insert_before(pipeline, StopWordFilter, Trimmer)\n      assert %Pipeline{callback: [Stemmer, StopWordFilter, Trimmer]} = pipeline\n      assert pipeline = Pipeline.remove(pipeline, Stemmer)\n      assert %Pipeline{callback: [StopWordFilter, Trimmer]} = pipeline\n\n      assert %Pipeline{callback: [StopWordFilter, Stemmer, Trimmer]} =\n               Pipeline.insert_after(pipeline, Stemmer, StopWordFilter)\n    end\n  end\n\n  describe \"running pipeline\" do\n    test \"executes runners in the queue\" do\n      pipeline = Pipeline.new(Pipeline.default_runners())\n      tokens = Tokenizer.tokenize(\"consignment worlds\")\n\n      assert [\n               %Token{token: \"consign\"},\n               %Token{token: \"world\"}\n             ] = Pipeline.run(pipeline, tokens)\n    end\n\n    test \"runs a custom function\" do\n      pipeline = Pipeline.new([& &1])\n      tokens = Tokenizer.tokenize(\"consignment worlds\")\n\n      assert ^tokens = Pipeline.run(pipeline, tokens)\n    end\n  end\nend\n"
  },
  {
    "path": "test/serializer_test.exs",
    "content": "defmodule Elasticlunr.SerializerTest do\n  use ExUnit.Case\n\n  alias Elasticlunr.{Index, Serializer}\n\n  test \"serialize index without documents\" do\n    index = Index.new(name: \"index\")\n\n    structure = [\n      \"settings#name:index|ref:id|pipeline:\",\n      \"db#name:elasticlunr_index|options:compressed,named_table,ordered_set,public\",\n      \"field#name:id|pipeline:Elixir.Elasticlunr.Index.IdPipeline|store_documents:false|store_positions:false\"\n    ]\n\n    data = Serializer.serialize(index) |> Enum.into([])\n\n    assert structure == data\n  end\n\n  test \"serialize index with documents\" do\n    index =\n      Index.new(name: \"index\")\n      |> Index.add_field(\"body\")\n      |> Index.add_documents([%{\"id\" => 1, \"body\" => \"hello world\"}])\n\n    structure = [\n      \"settings#name:index|ref:id|pipeline:\",\n      \"db#name:elasticlunr_index|options:compressed,named_table,ordered_set,public\",\n      \"field#name:body|pipeline:|store_documents:true|store_positions:true\",\n      \"field#name:id|pipeline:Elixir.Elasticlunr.Index.IdPipeline|store_documents:false|store_positions:false\"\n    ]\n\n    data = Serializer.serialize(index) |> Enum.into([])\n\n    assert structure == data\n  end\nend\n"
  },
  {
    "path": "test/storage/disk_test.exs",
    "content": "defmodule Elasticlunr.Storage.DiskTest do\n  use ExUnit.Case\n\n  alias Elasticlunr.Index\n  alias Elasticlunr.Pipeline\n  alias Elasticlunr.Storage.Disk\n\n  @otp_app :elasticlunr\n\n  setup do\n    storage_path = Path.join(__DIR__, \"../../storage\")\n\n    Application.put_env(@otp_app, Disk, directory: storage_path)\n\n    on_exit(fn ->\n      Enum.each(Disk.files(), &File.rm!/1)\n\n      Application.delete_env(@otp_app, Disk)\n    end)\n  end\n\n  defp fixture_storage(_context) do\n    opts = Application.get_env(@otp_app, Disk)\n    storage_path = Path.join(__DIR__, \"../support/fixture\")\n    Application.put_env(@otp_app, Disk, directory: storage_path)\n\n    on_exit(fn ->\n      Application.put_env(@otp_app, Disk, opts)\n    end)\n  end\n\n  describe \"serializing an index\" do\n    test \"writes to disk\" do\n      index = Index.new()\n      options = Application.get_env(@otp_app, Disk)\n      file = Path.join(options[:directory], \"#{index.name}.index\")\n\n      assert :ok = Disk.write(index)\n      assert File.exists?(file)\n      assert {:ok, %File.Stat{size: size}} = File.stat(file)\n      assert size > 0\n    end\n  end\n\n  describe \"unserializing an index\" do\n    test \"reads from disk\" do\n      pipeline = Pipeline.new(Pipeline.default_runners())\n\n      document = %{\n        \"id\" => Faker.UUID.v4(),\n        \"last_name\" => Faker.Person.last_name(),\n        \"first_name\" => Faker.Person.first_name()\n      }\n\n      index =\n        Index.new(pipeline: pipeline)\n        |> Index.add_field(\"first_name\")\n        |> Index.add_field(\"last_name\")\n        |> Index.add_documents([document])\n\n      :ok = Disk.write(index)\n\n      assert index == Disk.read(index.name)\n    end\n  end\n\n  describe \"getting all serialized indexes\" do\n    setup [:fixture_storage]\n\n    test \"loads and deserialize indexes\" do\n      assert [%Index{name: \"users\"} = index] =\n               Disk.load_all()\n               |> Enum.to_list()\n\n      assert [_] = Index.search(index, \"rose\")\n    end\n  end\n\n  describe \"deleting index from storage\" do\n    test \"works successfully\" do\n      index = Index.new()\n      options = Application.get_env(@otp_app, Disk)\n      file = Path.join(options[:directory], \"#{index.name}.index\")\n\n      :ok = Disk.write(index)\n      assert :ok = Disk.delete(index.name)\n      refute File.exists?(file)\n    end\n\n    test \"fails for missing index\" do\n      assert {:error, :enoent} = Disk.delete(\"missing\")\n    end\n  end\nend\n"
  },
  {
    "path": "test/storage_test.exs",
    "content": "defmodule Elasticlunr.StorageTest do\n  use ExUnit.Case\n\n  alias Elasticlunr.{Index, Storage}\n  alias Elasticlunr.Storage.{Blackhole, Mock}\n\n  import Mox\n\n  setup do\n    Mox.stub_with(Mock, Blackhole)\n    Application.put_env(:elasticlunr, :storage, Mock)\n\n    on_exit(fn ->\n      Application.delete_env(:elasticlunr, :storage)\n    end)\n  end\n\n  test \"preload/0\" do\n    index = Index.new()\n\n    expect(Mock, :load_all, fn -> [index] end)\n\n    assert [^index] = Storage.all()\n  end\n\n  test \"write/1\" do\n    index = Index.new()\n\n    expect(Mock, :write, 2, fn\n      ^index -> :ok\n      %{name: nil} -> {:error, \"invalid index\"}\n    end)\n\n    assert :ok = Storage.write(index)\n    assert {:error, \"invalid index\"} = Storage.write(Index.new(name: nil))\n  end\n\n  test \"read/1\" do\n    expect(Mock, :read, 2, fn\n      \"missing\" -> {:error, \"missing index\"}\n      name -> Index.new(name: name)\n    end)\n\n    assert {:error, \"missing index\"} = Storage.read(\"missing\")\n    assert %Index{name: \"users\"} = Storage.read(\"users\")\n  end\n\n  test \"delete/1\" do\n    expect(Mock, :delete, 2, fn\n      \"unknown-index\" -> :error\n      _ -> :ok\n    end)\n\n    assert :error = Storage.delete(\"unknown-index\")\n    assert :ok = Storage.delete(\"users\")\n  end\nend\n"
  },
  {
    "path": "test/support/fixture/fixture.ex",
    "content": "defmodule Elasticlunr.Test.Fixture do\n  @moduledoc false\n\n  @spec stemmer_fixture() :: map()\n  def stemmer_fixture do\n    with path <- Path.join(__DIR__, \"./stemmer_fixture.json\"),\n         {:ok, content} <- File.read(path),\n         {:ok, map} <- Jason.decode(content) do\n      map\n    end\n  end\nend\n"
  },
  {
    "path": "test/support/fixture/stemmer_fixture.json",
    "content": "{\n  \"consign\": \"consign\",\n  \"consigned\": \"consign\",\n  \"consigning\": \"consign\",\n  \"consignment\": \"consign\",\n  \"consist\": \"consist\",\n  \"consisted\": \"consist\",\n  \"consistency\": \"consist\",\n  \"consistent\": \"consist\",\n  \"consistently\": \"consist\",\n  \"consisting\": \"consist\",\n  \"consists\": \"consist\",\n  \"consolation\": \"consol\",\n  \"consolations\": \"consol\",\n  \"consolatory\": \"consolatori\",\n  \"console\": \"consol\",\n  \"consoled\": \"consol\",\n  \"consoles\": \"consol\",\n  \"consolidate\": \"consolid\",\n  \"consolidated\": \"consolid\",\n  \"consolidating\": \"consolid\",\n  \"consoling\": \"consol\",\n  \"consols\": \"consol\",\n  \"consonant\": \"conson\",\n  \"consort\": \"consort\",\n  \"consorted\": \"consort\",\n  \"consorting\": \"consort\",\n  \"conspicuous\": \"conspicu\",\n  \"conspicuously\": \"conspicu\",\n  \"conspiracy\": \"conspiraci\",\n  \"conspirator\": \"conspir\",\n  \"conspirators\": \"conspir\",\n  \"conspire\": \"conspir\",\n  \"conspired\": \"conspir\",\n  \"conspiring\": \"conspir\",\n  \"constable\": \"constabl\",\n  \"constables\": \"constabl\",\n  \"constance\": \"constanc\",\n  \"constancy\": \"constanc\",\n  \"constant\": \"constant\",\n  \"knack\": \"knack\",\n  \"knackeries\": \"knackeri\",\n  \"knacks\": \"knack\",\n  \"knag\": \"knag\",\n  \"knave\": \"knave\",\n  \"knaves\": \"knave\",\n  \"knavish\": \"knavish\",\n  \"kneaded\": \"knead\",\n  \"kneading\": \"knead\",\n  \"knee\": \"knee\",\n  \"kneel\": \"kneel\",\n  \"kneeled\": \"kneel\",\n  \"kneeling\": \"kneel\",\n  \"kneels\": \"kneel\",\n  \"knees\": \"knee\",\n  \"knell\": \"knell\",\n  \"knelt\": \"knelt\",\n  \"knew\": \"knew\",\n  \"knick\": \"knick\",\n  \"knif\": \"knif\",\n  \"knife\": \"knife\",\n  \"knight\": \"knight\",\n  \"knights\": \"knight\",\n  \"knit\": \"knit\",\n  \"knits\": \"knit\",\n  \"knitted\": \"knit\",\n  \"knitting\": \"knit\",\n  \"knives\": \"knive\",\n  \"knob\": \"knob\",\n  \"knobs\": \"knob\",\n  \"knock\": \"knock\",\n  \"knocked\": \"knock\",\n  \"knocker\": \"knocker\",\n  \"knockers\": \"knocker\",\n  \"knocking\": \"knock\",\n  \"knocks\": \"knock\",\n  \"knopp\": \"knopp\",\n  \"knot\": \"knot\",\n  \"knots\": \"knot\",\n  \"lay\": \"lay\",\n  \"try\": \"tri\"\n}"
  },
  {
    "path": "test/test_helper.exs",
    "content": "ExUnit.start()\nFaker.start()\n\nMox.defmock(Elasticlunr.Storage.Mock, for: Elasticlunr.Storage.Provider)\n"
  },
  {
    "path": "test/tokenizer_test.exs",
    "content": "defmodule Elasticlunr.TokenizerTest do\n  use ExUnit.Case\n\n  alias Elasticlunr.{Token, Tokenizer}\n\n  describe \"tokenizing string\" do\n    test \"splits to list of tokens\" do\n      str = \"the man came home\"\n\n      tokenized_str = [\n        Token.new(\"the\", %{start: 0, end: 3}),\n        Token.new(\"man\", %{start: 4, end: 3}),\n        Token.new(\"came\", %{start: 8, end: 4}),\n        Token.new(\"home\", %{start: 13, end: 4})\n      ]\n\n      assert ^tokenized_str = Tokenizer.tokenize(str)\n    end\n\n    test \"downcase tokens\" do\n      assert ~w[foo bar] =\n               Tokenizer.tokenize(\"FOO BAR\")\n               |> Enum.map(& &1.token)\n    end\n\n    test \"removes whitespace and hyphens\" do\n      assert ~w[foo bar] =\n               Tokenizer.tokenize(\"  FOO    BAR   \")\n               |> Enum.map(& &1.token)\n\n      assert ~w[take the new york san francisco flight] =\n               Tokenizer.tokenize(\"take the New York-San Francisco flight\")\n               |> Enum.map(& &1.token)\n\n      assert ~w[solve for a b] =\n               Tokenizer.tokenize(\"Solve for A - B\")\n               |> Enum.map(& &1.token)\n    end\n\n    test \"with custom separator\" do\n      assert ~w[hello world i love] =\n               Tokenizer.tokenize(\"hello/world/I/love\", ~r/\\/+/)\n               |> Enum.map(& &1.token)\n\n      assert ~w[hello world i love] =\n               Tokenizer.tokenize(\"hello\\\\world\\\\I\\\\love\", ~r/[\\\\]+/)\n               |> Enum.map(& &1.token)\n\n      assert ~w[hello world apple pie] =\n               Tokenizer.tokenize(\"hello/world/%%%apple%pie\", ~r/[\\/\\%]+/)\n               |> Enum.map(& &1.token)\n    end\n  end\nend\n"
  }
]