[
  {
    "path": ".dockerignore",
    "content": "/html/assets/js/lib/loadAnalytics.js\n/html/assets/svg\n/html/audio\n/html/*.html\n!/html/docs.html\n/target\nout\n*.old\n.env\nipadic/\nunidic/\nunidic-mecab/\ndata/\ndata\n/resources/\n/suggestions\n/suggestions_\naudio_old\n*.bat\n=======\nindexes/\nindexes_old\nmassiv.out\n*/*/target\ntmp/\nimg_scan_tmp/\n/news\n.gitignore\n/*.zip\n"
  },
  {
    "path": ".github/FUNDING.yml",
    "content": "# These are supported funding model platforms\n\ncustom: [\"https://paypal.me/JojiiOfficial\", \"https://paypal.me/yukaru1\"]\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.md",
    "content": "---\nname: Bug report\nabout: Create a report to help us improve. Check the trello board first!\ntitle: ''\nlabels: ''\nassignees: ''\n\n---\n\nBefore you file a bug report, check the [trello board](https://trello.com/b/nmG0xgaW/jotoba-roadmap) if this has already added on the road map.\n\n**Describe the bug**\nA clear and concise description of what the bug is.\n\n**To Reproduce**\nSteps to reproduce the behavior:\n1. Go to '...'\n2. Click on '....'\n3. Scroll down to '....'\n4. See error\n\n**Expected behavior**\nA clear and concise description of what you expected to happen.\n\n**Screenshots**\nIf applicable, add screenshots to help explain your problem.\n\n**Desktop (please complete the following information):**\n - OS: [e.g. iOS]\n - Browser [e.g. chrome, safari]\n\n**Smartphone (please complete the following information):**\n - Device: [e.g. iPhone6]\n - OS: [e.g. iOS8.1]\n - Browser [e.g. stock browser, safari]\n\n**Additional context**\nAdd any other context about the problem here.\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature_request.md",
    "content": "---\nname: Feature request\nabout: Suggest an idea for this project\ntitle: ''\nlabels: ''\nassignees: ''\n\n---\n\n**Is your feature request related to a problem? Please describe.**\nA clear and concise description of what the problem is. Ex. I'm always frustrated when [...]\n\n**Describe the solution you'd like**\nA clear and concise description of what you want to happen.\n\n**Describe alternatives you've considered**\nA clear and concise description of any alternative solutions or features you've considered.\n\n**Additional context**\nAdd any other context or screenshots about the feature request here.\n"
  },
  {
    "path": ".github/workflows/docker-image.yml",
    "content": "name: Docker Image CI\n\non:\n  push:\n    branches: [ master ]\n  workflow_dispatch:\n\njobs:\n  build:\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v2\n    - name: Build the Docker image\n      run: docker build . --file Dockerfile --tag ghcr.io/wedontpanic/jotoba:latest\n    - name: Export image\n      run: |\n        echo ${{ secrets.GH_TOKEN }} | docker login ghcr.io -u ${{ secrets.GH_USER }} --password-stdin\n        docker push ghcr.io/wedontpanic/jotoba:latest\n    # - name: Update server\n    #   uses: garygrossgarten/github-action-ssh@release\n    #   with:\n    #     command: /home/jotoba/update.sh\n    #     host: ${{ secrets.HOST }}\n    #     port: ${{ secrets.PORT }}\n    #     username: ${{ secrets.SSH_USER }}\n    #     privateKey: ${{ secrets.SSH_KEY}}\n"
  },
  {
    "path": ".gitignore",
    "content": "/html/assets/js/lib/loadAnalytics.js\n/html/assets/svg\n/html/audio\n/html/assets/sitemap.xml\n/html/assets/*.html\n!/html/assets/docs.html\n/target\nout\n*.old\n.env\nipadic/\nunidic/\nunidic-mecab/\ndata/\ndata\n/resources/\n/suggestions\n/suggestions_\naudio_old\n*.bat\n=======\n/indexes/\nindexes_old\nmassiv.out\n*/*/target\ntmp/\nimg_scan_tmp/\n/news\ncluster_find\n.idea\n/resources_src/\n/*.zip\n"
  },
  {
    "path": "Cargo.toml",
    "content": "[workspace]\n\nmembers = [\"jotoba_bin\", \"lib/*\"]\n\n[profile.dev]\nopt-level = 2\nincremental = true\nlto = false\nstrip = false\n\n[profile.release]\nlto = \"fat\"\nstrip = true\n"
  },
  {
    "path": "Dockerfile",
    "content": "FROM rust:1.70.0-bullseye as build\n\nWORKDIR app\n\nCOPY ./lib ./lib\nCOPY ./.git ./.git\nCOPY ./locales ./locales\nCOPY ./Cargo.lock ./\nCOPY ./Cargo.toml ./\nCOPY ./tests ./tests\nCOPY ./scripts ./scripts\nCOPY ./jotoba_bin ./jotoba_bin\nCOPY ./LICENSE ./\n\nRUN apt clean\nRUN apt-get update --allow-releaseinfo-change -y\nRUN apt upgrade -y\nRUN apt install build-essential cmake pkg-config libssl-dev libleptonica-dev libtesseract-dev clang tesseract-ocr-jpn -y\n\n# Build your program for release\nRUN cargo build --release\n\nRUN mv target/release/jotoba .\n\nFROM debian:bullseye\n\nWORKDIR app\n\nRUN apt-get update --allow-releaseinfo-change -y\nRUN apt upgrade -y\nRUN apt install build-essential pkg-config cmake libssl-dev libleptonica-dev libtesseract-dev clang tesseract-ocr-jpn -y\n\nCOPY --from=build /app/jotoba .\nCOPY --from=build /app/locales ./locales\n\nRUN useradd -s /bin/bash runuser\nUSER runuser\n\n# Run the binary\nCMD [\"./jotoba\",\"-s\"]\n"
  },
  {
    "path": "LICENSE",
    "content": "                    GNU AFFERO GENERAL PUBLIC LICENSE\n                       Version 3, 19 November 2007\n\n Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n                            Preamble\n\n  The GNU Affero General Public License is a free, copyleft license for\nsoftware and other kinds of works, specifically designed to ensure\ncooperation with the community in the case of network server software.\n\n  The licenses for most software and other practical works are designed\nto take away your freedom to share and change the works.  By contrast,\nour General Public Licenses are intended to guarantee your freedom to\nshare and change all versions of a program--to make sure it remains free\nsoftware for all its users.\n\n  When we speak of free software, we are referring to freedom, not\nprice.  Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthem if you wish), that you receive source code or can get it if you\nwant it, that you can change the software or use pieces of it in new\nfree programs, and that you know you can do these things.\n\n  Developers that use our General Public Licenses protect your rights\nwith two steps: (1) assert copyright on the software, and (2) offer\nyou this License which gives you legal permission to copy, distribute\nand/or modify the software.\n\n  A secondary benefit of defending all users' freedom is that\nimprovements made in alternate versions of the program, if they\nreceive widespread use, become available for other developers to\nincorporate.  Many developers of free software are heartened and\nencouraged by the resulting cooperation.  However, in the case of\nsoftware used on network servers, this result may fail to come about.\nThe GNU General Public License permits making a modified version and\nletting the public access it on a server without ever releasing its\nsource code to the public.\n\n  The GNU Affero General Public License is designed specifically to\nensure that, in such cases, the modified source code becomes available\nto the community.  It requires the operator of a network server to\nprovide the source code of the modified version running there to the\nusers of that server.  Therefore, public use of a modified version, on\na publicly accessible server, gives the public access to the source\ncode of the modified version.\n\n  An older license, called the Affero General Public License and\npublished by Affero, was designed to accomplish similar goals.  This is\na different license, not a version of the Affero GPL, but Affero has\nreleased a new version of the Affero GPL which permits relicensing under\nthis license.\n\n  The precise terms and conditions for copying, distribution and\nmodification follow.\n\n                       TERMS AND CONDITIONS\n\n  0. Definitions.\n\n  \"This License\" refers to version 3 of the GNU Affero General Public License.\n\n  \"Copyright\" also means copyright-like laws that apply to other kinds of\nworks, such as semiconductor masks.\n\n  \"The Program\" refers to any copyrightable work licensed under this\nLicense.  Each licensee is addressed as \"you\".  \"Licensees\" and\n\"recipients\" may be individuals or organizations.\n\n  To \"modify\" a work means to copy from or adapt all or part of the work\nin a fashion requiring copyright permission, other than the making of an\nexact copy.  The resulting work is called a \"modified version\" of the\nearlier work or a work \"based on\" the earlier work.\n\n  A \"covered work\" means either the unmodified Program or a work based\non the Program.\n\n  To \"propagate\" a work means to do anything with it that, without\npermission, would make you directly or secondarily liable for\ninfringement under applicable copyright law, except executing it on a\ncomputer or modifying a private copy.  Propagation includes copying,\ndistribution (with or without modification), making available to the\npublic, and in some countries other activities as well.\n\n  To \"convey\" a work means any kind of propagation that enables other\nparties to make or receive copies.  Mere interaction with a user through\na computer network, with no transfer of a copy, is not conveying.\n\n  An interactive user interface displays \"Appropriate Legal Notices\"\nto the extent that it includes a convenient and prominently visible\nfeature that (1) displays an appropriate copyright notice, and (2)\ntells the user that there is no warranty for the work (except to the\nextent that warranties are provided), that licensees may convey the\nwork under this License, and how to view a copy of this License.  If\nthe interface presents a list of user commands or options, such as a\nmenu, a prominent item in the list meets this criterion.\n\n  1. Source Code.\n\n  The \"source code\" for a work means the preferred form of the work\nfor making modifications to it.  \"Object code\" means any non-source\nform of a work.\n\n  A \"Standard Interface\" means an interface that either is an official\nstandard defined by a recognized standards body, or, in the case of\ninterfaces specified for a particular programming language, one that\nis widely used among developers working in that language.\n\n  The \"System Libraries\" of an executable work include anything, other\nthan the work as a whole, that (a) is included in the normal form of\npackaging a Major Component, but which is not part of that Major\nComponent, and (b) serves only to enable use of the work with that\nMajor Component, or to implement a Standard Interface for which an\nimplementation is available to the public in source code form.  A\n\"Major Component\", in this context, means a major essential component\n(kernel, window system, and so on) of the specific operating system\n(if any) on which the executable work runs, or a compiler used to\nproduce the work, or an object code interpreter used to run it.\n\n  The \"Corresponding Source\" for a work in object code form means all\nthe source code needed to generate, install, and (for an executable\nwork) run the object code and to modify the work, including scripts to\ncontrol those activities.  However, it does not include the work's\nSystem Libraries, or general-purpose tools or generally available free\nprograms which are used unmodified in performing those activities but\nwhich are not part of the work.  For example, Corresponding Source\nincludes interface definition files associated with source files for\nthe work, and the source code for shared libraries and dynamically\nlinked subprograms that the work is specifically designed to require,\nsuch as by intimate data communication or control flow between those\nsubprograms and other parts of the work.\n\n  The Corresponding Source need not include anything that users\ncan regenerate automatically from other parts of the Corresponding\nSource.\n\n  The Corresponding Source for a work in source code form is that\nsame work.\n\n  2. Basic Permissions.\n\n  All rights granted under this License are granted for the term of\ncopyright on the Program, and are irrevocable provided the stated\nconditions are met.  This License explicitly affirms your unlimited\npermission to run the unmodified Program.  The output from running a\ncovered work is covered by this License only if the output, given its\ncontent, constitutes a covered work.  This License acknowledges your\nrights of fair use or other equivalent, as provided by copyright law.\n\n  You may make, run and propagate covered works that you do not\nconvey, without conditions so long as your license otherwise remains\nin force.  You may convey covered works to others for the sole purpose\nof having them make modifications exclusively for you, or provide you\nwith facilities for running those works, provided that you comply with\nthe terms of this License in conveying all material for which you do\nnot control copyright.  Those thus making or running the covered works\nfor you must do so exclusively on your behalf, under your direction\nand control, on terms that prohibit them from making any copies of\nyour copyrighted material outside their relationship with you.\n\n  Conveying under any other circumstances is permitted solely under\nthe conditions stated below.  Sublicensing is not allowed; section 10\nmakes it unnecessary.\n\n  3. Protecting Users' Legal Rights From Anti-Circumvention Law.\n\n  No covered work shall be deemed part of an effective technological\nmeasure under any applicable law fulfilling obligations under article\n11 of the WIPO copyright treaty adopted on 20 December 1996, or\nsimilar laws prohibiting or restricting circumvention of such\nmeasures.\n\n  When you convey a covered work, you waive any legal power to forbid\ncircumvention of technological measures to the extent such circumvention\nis effected by exercising rights under this License with respect to\nthe covered work, and you disclaim any intention to limit operation or\nmodification of the work as a means of enforcing, against the work's\nusers, your or third parties' legal rights to forbid circumvention of\ntechnological measures.\n\n  4. Conveying Verbatim Copies.\n\n  You may convey verbatim copies of the Program's source code as you\nreceive it, in any medium, provided that you conspicuously and\nappropriately publish on each copy an appropriate copyright notice;\nkeep intact all notices stating that this License and any\nnon-permissive terms added in accord with section 7 apply to the code;\nkeep intact all notices of the absence of any warranty; and give all\nrecipients a copy of this License along with the Program.\n\n  You may charge any price or no price for each copy that you convey,\nand you may offer support or warranty protection for a fee.\n\n  5. Conveying Modified Source Versions.\n\n  You may convey a work based on the Program, or the modifications to\nproduce it from the Program, in the form of source code under the\nterms of section 4, provided that you also meet all of these conditions:\n\n    a) The work must carry prominent notices stating that you modified\n    it, and giving a relevant date.\n\n    b) The work must carry prominent notices stating that it is\n    released under this License and any conditions added under section\n    7.  This requirement modifies the requirement in section 4 to\n    \"keep intact all notices\".\n\n    c) You must license the entire work, as a whole, under this\n    License to anyone who comes into possession of a copy.  This\n    License will therefore apply, along with any applicable section 7\n    additional terms, to the whole of the work, and all its parts,\n    regardless of how they are packaged.  This License gives no\n    permission to license the work in any other way, but it does not\n    invalidate such permission if you have separately received it.\n\n    d) If the work has interactive user interfaces, each must display\n    Appropriate Legal Notices; however, if the Program has interactive\n    interfaces that do not display Appropriate Legal Notices, your\n    work need not make them do so.\n\n  A compilation of a covered work with other separate and independent\nworks, which are not by their nature extensions of the covered work,\nand which are not combined with it such as to form a larger program,\nin or on a volume of a storage or distribution medium, is called an\n\"aggregate\" if the compilation and its resulting copyright are not\nused to limit the access or legal rights of the compilation's users\nbeyond what the individual works permit.  Inclusion of a covered work\nin an aggregate does not cause this License to apply to the other\nparts of the aggregate.\n\n  6. Conveying Non-Source Forms.\n\n  You may convey a covered work in object code form under the terms\nof sections 4 and 5, provided that you also convey the\nmachine-readable Corresponding Source under the terms of this License,\nin one of these ways:\n\n    a) Convey the object code in, or embodied in, a physical product\n    (including a physical distribution medium), accompanied by the\n    Corresponding Source fixed on a durable physical medium\n    customarily used for software interchange.\n\n    b) Convey the object code in, or embodied in, a physical product\n    (including a physical distribution medium), accompanied by a\n    written offer, valid for at least three years and valid for as\n    long as you offer spare parts or customer support for that product\n    model, to give anyone who possesses the object code either (1) a\n    copy of the Corresponding Source for all the software in the\n    product that is covered by this License, on a durable physical\n    medium customarily used for software interchange, for a price no\n    more than your reasonable cost of physically performing this\n    conveying of source, or (2) access to copy the\n    Corresponding Source from a network server at no charge.\n\n    c) Convey individual copies of the object code with a copy of the\n    written offer to provide the Corresponding Source.  This\n    alternative is allowed only occasionally and noncommercially, and\n    only if you received the object code with such an offer, in accord\n    with subsection 6b.\n\n    d) Convey the object code by offering access from a designated\n    place (gratis or for a charge), and offer equivalent access to the\n    Corresponding Source in the same way through the same place at no\n    further charge.  You need not require recipients to copy the\n    Corresponding Source along with the object code.  If the place to\n    copy the object code is a network server, the Corresponding Source\n    may be on a different server (operated by you or a third party)\n    that supports equivalent copying facilities, provided you maintain\n    clear directions next to the object code saying where to find the\n    Corresponding Source.  Regardless of what server hosts the\n    Corresponding Source, you remain obligated to ensure that it is\n    available for as long as needed to satisfy these requirements.\n\n    e) Convey the object code using peer-to-peer transmission, provided\n    you inform other peers where the object code and Corresponding\n    Source of the work are being offered to the general public at no\n    charge under subsection 6d.\n\n  A separable portion of the object code, whose source code is excluded\nfrom the Corresponding Source as a System Library, need not be\nincluded in conveying the object code work.\n\n  A \"User Product\" is either (1) a \"consumer product\", which means any\ntangible personal property which is normally used for personal, family,\nor household purposes, or (2) anything designed or sold for incorporation\ninto a dwelling.  In determining whether a product is a consumer product,\ndoubtful cases shall be resolved in favor of coverage.  For a particular\nproduct received by a particular user, \"normally used\" refers to a\ntypical or common use of that class of product, regardless of the status\nof the particular user or of the way in which the particular user\nactually uses, or expects or is expected to use, the product.  A product\nis a consumer product regardless of whether the product has substantial\ncommercial, industrial or non-consumer uses, unless such uses represent\nthe only significant mode of use of the product.\n\n  \"Installation Information\" for a User Product means any methods,\nprocedures, authorization keys, or other information required to install\nand execute modified versions of a covered work in that User Product from\na modified version of its Corresponding Source.  The information must\nsuffice to ensure that the continued functioning of the modified object\ncode is in no case prevented or interfered with solely because\nmodification has been made.\n\n  If you convey an object code work under this section in, or with, or\nspecifically for use in, a User Product, and the conveying occurs as\npart of a transaction in which the right of possession and use of the\nUser Product is transferred to the recipient in perpetuity or for a\nfixed term (regardless of how the transaction is characterized), the\nCorresponding Source conveyed under this section must be accompanied\nby the Installation Information.  But this requirement does not apply\nif neither you nor any third party retains the ability to install\nmodified object code on the User Product (for example, the work has\nbeen installed in ROM).\n\n  The requirement to provide Installation Information does not include a\nrequirement to continue to provide support service, warranty, or updates\nfor a work that has been modified or installed by the recipient, or for\nthe User Product in which it has been modified or installed.  Access to a\nnetwork may be denied when the modification itself materially and\nadversely affects the operation of the network or violates the rules and\nprotocols for communication across the network.\n\n  Corresponding Source conveyed, and Installation Information provided,\nin accord with this section must be in a format that is publicly\ndocumented (and with an implementation available to the public in\nsource code form), and must require no special password or key for\nunpacking, reading or copying.\n\n  7. Additional Terms.\n\n  \"Additional permissions\" are terms that supplement the terms of this\nLicense by making exceptions from one or more of its conditions.\nAdditional permissions that are applicable to the entire Program shall\nbe treated as though they were included in this License, to the extent\nthat they are valid under applicable law.  If additional permissions\napply only to part of the Program, that part may be used separately\nunder those permissions, but the entire Program remains governed by\nthis License without regard to the additional permissions.\n\n  When you convey a copy of a covered work, you may at your option\nremove any additional permissions from that copy, or from any part of\nit.  (Additional permissions may be written to require their own\nremoval in certain cases when you modify the work.)  You may place\nadditional permissions on material, added by you to a covered work,\nfor which you have or can give appropriate copyright permission.\n\n  Notwithstanding any other provision of this License, for material you\nadd to a covered work, you may (if authorized by the copyright holders of\nthat material) supplement the terms of this License with terms:\n\n    a) Disclaiming warranty or limiting liability differently from the\n    terms of sections 15 and 16 of this License; or\n\n    b) Requiring preservation of specified reasonable legal notices or\n    author attributions in that material or in the Appropriate Legal\n    Notices displayed by works containing it; or\n\n    c) Prohibiting misrepresentation of the origin of that material, or\n    requiring that modified versions of such material be marked in\n    reasonable ways as different from the original version; or\n\n    d) Limiting the use for publicity purposes of names of licensors or\n    authors of the material; or\n\n    e) Declining to grant rights under trademark law for use of some\n    trade names, trademarks, or service marks; or\n\n    f) Requiring indemnification of licensors and authors of that\n    material by anyone who conveys the material (or modified versions of\n    it) with contractual assumptions of liability to the recipient, for\n    any liability that these contractual assumptions directly impose on\n    those licensors and authors.\n\n  All other non-permissive additional terms are considered \"further\nrestrictions\" within the meaning of section 10.  If the Program as you\nreceived it, or any part of it, contains a notice stating that it is\ngoverned by this License along with a term that is a further\nrestriction, you may remove that term.  If a license document contains\na further restriction but permits relicensing or conveying under this\nLicense, you may add to a covered work material governed by the terms\nof that license document, provided that the further restriction does\nnot survive such relicensing or conveying.\n\n  If you add terms to a covered work in accord with this section, you\nmust place, in the relevant source files, a statement of the\nadditional terms that apply to those files, or a notice indicating\nwhere to find the applicable terms.\n\n  Additional terms, permissive or non-permissive, may be stated in the\nform of a separately written license, or stated as exceptions;\nthe above requirements apply either way.\n\n  8. Termination.\n\n  You may not propagate or modify a covered work except as expressly\nprovided under this License.  Any attempt otherwise to propagate or\nmodify it is void, and will automatically terminate your rights under\nthis License (including any patent licenses granted under the third\nparagraph of section 11).\n\n  However, if you cease all violation of this License, then your\nlicense from a particular copyright holder is reinstated (a)\nprovisionally, unless and until the copyright holder explicitly and\nfinally terminates your license, and (b) permanently, if the copyright\nholder fails to notify you of the violation by some reasonable means\nprior to 60 days after the cessation.\n\n  Moreover, your license from a particular copyright holder is\nreinstated permanently if the copyright holder notifies you of the\nviolation by some reasonable means, this is the first time you have\nreceived notice of violation of this License (for any work) from that\ncopyright holder, and you cure the violation prior to 30 days after\nyour receipt of the notice.\n\n  Termination of your rights under this section does not terminate the\nlicenses of parties who have received copies or rights from you under\nthis License.  If your rights have been terminated and not permanently\nreinstated, you do not qualify to receive new licenses for the same\nmaterial under section 10.\n\n  9. Acceptance Not Required for Having Copies.\n\n  You are not required to accept this License in order to receive or\nrun a copy of the Program.  Ancillary propagation of a covered work\noccurring solely as a consequence of using peer-to-peer transmission\nto receive a copy likewise does not require acceptance.  However,\nnothing other than this License grants you permission to propagate or\nmodify any covered work.  These actions infringe copyright if you do\nnot accept this License.  Therefore, by modifying or propagating a\ncovered work, you indicate your acceptance of this License to do so.\n\n  10. Automatic Licensing of Downstream Recipients.\n\n  Each time you convey a covered work, the recipient automatically\nreceives a license from the original licensors, to run, modify and\npropagate that work, subject to this License.  You are not responsible\nfor enforcing compliance by third parties with this License.\n\n  An \"entity transaction\" is a transaction transferring control of an\norganization, or substantially all assets of one, or subdividing an\norganization, or merging organizations.  If propagation of a covered\nwork results from an entity transaction, each party to that\ntransaction who receives a copy of the work also receives whatever\nlicenses to the work the party's predecessor in interest had or could\ngive under the previous paragraph, plus a right to possession of the\nCorresponding Source of the work from the predecessor in interest, if\nthe predecessor has it or can get it with reasonable efforts.\n\n  You may not impose any further restrictions on the exercise of the\nrights granted or affirmed under this License.  For example, you may\nnot impose a license fee, royalty, or other charge for exercise of\nrights granted under this License, and you may not initiate litigation\n(including a cross-claim or counterclaim in a lawsuit) alleging that\nany patent claim is infringed by making, using, selling, offering for\nsale, or importing the Program or any portion of it.\n\n  11. Patents.\n\n  A \"contributor\" is a copyright holder who authorizes use under this\nLicense of the Program or a work on which the Program is based.  The\nwork thus licensed is called the contributor's \"contributor version\".\n\n  A contributor's \"essential patent claims\" are all patent claims\nowned or controlled by the contributor, whether already acquired or\nhereafter acquired, that would be infringed by some manner, permitted\nby this License, of making, using, or selling its contributor version,\nbut do not include claims that would be infringed only as a\nconsequence of further modification of the contributor version.  For\npurposes of this definition, \"control\" includes the right to grant\npatent sublicenses in a manner consistent with the requirements of\nthis License.\n\n  Each contributor grants you a non-exclusive, worldwide, royalty-free\npatent license under the contributor's essential patent claims, to\nmake, use, sell, offer for sale, import and otherwise run, modify and\npropagate the contents of its contributor version.\n\n  In the following three paragraphs, a \"patent license\" is any express\nagreement or commitment, however denominated, not to enforce a patent\n(such as an express permission to practice a patent or covenant not to\nsue for patent infringement).  To \"grant\" such a patent license to a\nparty means to make such an agreement or commitment not to enforce a\npatent against the party.\n\n  If you convey a covered work, knowingly relying on a patent license,\nand the Corresponding Source of the work is not available for anyone\nto copy, free of charge and under the terms of this License, through a\npublicly available network server or other readily accessible means,\nthen you must either (1) cause the Corresponding Source to be so\navailable, or (2) arrange to deprive yourself of the benefit of the\npatent license for this particular work, or (3) arrange, in a manner\nconsistent with the requirements of this License, to extend the patent\nlicense to downstream recipients.  \"Knowingly relying\" means you have\nactual knowledge that, but for the patent license, your conveying the\ncovered work in a country, or your recipient's use of the covered work\nin a country, would infringe one or more identifiable patents in that\ncountry that you have reason to believe are valid.\n\n  If, pursuant to or in connection with a single transaction or\narrangement, you convey, or propagate by procuring conveyance of, a\ncovered work, and grant a patent license to some of the parties\nreceiving the covered work authorizing them to use, propagate, modify\nor convey a specific copy of the covered work, then the patent license\nyou grant is automatically extended to all recipients of the covered\nwork and works based on it.\n\n  A patent license is \"discriminatory\" if it does not include within\nthe scope of its coverage, prohibits the exercise of, or is\nconditioned on the non-exercise of one or more of the rights that are\nspecifically granted under this License.  You may not convey a covered\nwork if you are a party to an arrangement with a third party that is\nin the business of distributing software, under which you make payment\nto the third party based on the extent of your activity of conveying\nthe work, and under which the third party grants, to any of the\nparties who would receive the covered work from you, a discriminatory\npatent license (a) in connection with copies of the covered work\nconveyed by you (or copies made from those copies), or (b) primarily\nfor and in connection with specific products or compilations that\ncontain the covered work, unless you entered into that arrangement,\nor that patent license was granted, prior to 28 March 2007.\n\n  Nothing in this License shall be construed as excluding or limiting\nany implied license or other defenses to infringement that may\notherwise be available to you under applicable patent law.\n\n  12. No Surrender of Others' Freedom.\n\n  If conditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License.  If you cannot convey a\ncovered work so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you may\nnot convey it at all.  For example, if you agree to terms that obligate you\nto collect a royalty for further conveying from those to whom you convey\nthe Program, the only way you could satisfy both those terms and this\nLicense would be to refrain entirely from conveying the Program.\n\n  13. Remote Network Interaction; Use with the GNU General Public License.\n\n  Notwithstanding any other provision of this License, if you modify the\nProgram, your modified version must prominently offer all users\ninteracting with it remotely through a computer network (if your version\nsupports such interaction) an opportunity to receive the Corresponding\nSource of your version by providing access to the Corresponding Source\nfrom a network server at no charge, through some standard or customary\nmeans of facilitating copying of software.  This Corresponding Source\nshall include the Corresponding Source for any work covered by version 3\nof the GNU General Public License that is incorporated pursuant to the\nfollowing paragraph.\n\n  Notwithstanding any other provision of this License, you have\npermission to link or combine any covered work with a work licensed\nunder version 3 of the GNU General Public License into a single\ncombined work, and to convey the resulting work.  The terms of this\nLicense will continue to apply to the part which is the covered work,\nbut the work with which it is combined will remain governed by version\n3 of the GNU General Public License.\n\n  14. Revised Versions of this License.\n\n  The Free Software Foundation may publish revised and/or new versions of\nthe GNU Affero General Public License from time to time.  Such new versions\nwill be similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\n  Each version is given a distinguishing version number.  If the\nProgram specifies that a certain numbered version of the GNU Affero General\nPublic License \"or any later version\" applies to it, you have the\noption of following the terms and conditions either of that numbered\nversion or of any later version published by the Free Software\nFoundation.  If the Program does not specify a version number of the\nGNU Affero General Public License, you may choose any version ever published\nby the Free Software Foundation.\n\n  If the Program specifies that a proxy can decide which future\nversions of the GNU Affero General Public License can be used, that proxy's\npublic statement of acceptance of a version permanently authorizes you\nto choose that version for the Program.\n\n  Later license versions may give you additional or different\npermissions.  However, no additional obligations are imposed on any\nauthor or copyright holder as a result of your choosing to follow a\nlater version.\n\n  15. Disclaimer of Warranty.\n\n  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY\nAPPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT\nHOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY\nOF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,\nTHE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\nPURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM\nIS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF\nALL NECESSARY SERVICING, REPAIR OR CORRECTION.\n\n  16. Limitation of Liability.\n\n  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS\nTHE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY\nGENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE\nUSE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF\nDATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD\nPARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),\nEVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF\nSUCH DAMAGES.\n\n  17. Interpretation of Sections 15 and 16.\n\n  If the disclaimer of warranty and limitation of liability provided\nabove cannot be given local legal effect according to their terms,\nreviewing courts shall apply local law that most closely approximates\nan absolute waiver of all civil liability in connection with the\nProgram, unless a warranty or assumption of liability accompanies a\ncopy of the Program in return for a fee.\n\n                     END OF TERMS AND CONDITIONS\n\n            How to Apply These Terms to Your New Programs\n\n  If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n  To do so, attach the following notices to the program.  It is safest\nto attach them to the start of each source file to most effectively\nstate the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n    <one line to give the program's name and a brief idea of what it does.>\n    Copyright (C) <year>  <name of author>\n\n    This program is free software: you can redistribute it and/or modify\n    it under the terms of the GNU Affero General Public License as published\n    by the Free Software Foundation, either version 3 of the License, or\n    (at your option) any later version.\n\n    This program is distributed in the hope that it will be useful,\n    but WITHOUT ANY WARRANTY; without even the implied warranty of\n    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n    GNU Affero General Public License for more details.\n\n    You should have received a copy of the GNU Affero General Public License\n    along with this program.  If not, see <https://www.gnu.org/licenses/>.\n\nAlso add information on how to contact you by electronic and paper mail.\n\n  If your software can interact with users remotely through a computer\nnetwork, you should also make sure that it provides a way for users to\nget its source.  For example, if your program is a web application, its\ninterface could display a \"Source\" link that leads users to an archive\nof the code.  There are many ways you could offer source, and different\nsolutions will be better for different programs; see section 13 for the\nspecific requirements.\n\n  You should also get your employer (if you work as a programmer) or school,\nif any, to sign a \"copyright disclaimer\" for the program, if necessary.\nFor more information on this, and how to apply and follow the GNU AGPL, see\n<https://www.gnu.org/licenses/>.\n\n"
  },
  {
    "path": "README.md",
    "content": "# Jotoba <img width=\"30\" align=\"center\" src=\"/html/assets/jotokun/JotoBook.svg\">\nJotoba is a free online multi-language japanese dictionary based on lots of various free resources.<br>\nPublic instance: [jotoba.de](https://jotoba.de)<br>\n\n<a href=\"https://discord.gg/ysSkFFxmjr\"><img src=\"https://img.shields.io/discord/854657468867936267?style=for-the-badge\" alt=\"Discord\"></a>\n<br>\n\n### Get the new [Android App](https://play.google.com/store/apps/details?id=com.jotoba.mobile) now!\n\n# Team\n<table>\n     <tr align=\"center\">\n          <td><a href=\"https://github.com/JojiiOfficial\">JojiiOfficial</a></td>\n          <td><a href=\"https://github.com/Yukaru-san\">Yukaru</a></td>\n     </tr>\n     <tr align=\"center\">\n          <td><a href=\"https://github.com/JojiiOfficial\"><img src=\"https://avatars.githubusercontent.com/u/15957865?v=4\" width=\"100\" height=\"100\"></a></td>\n          <td><a href=\"https://github.com/Yukaru-san\"><img src=\"https://avatars.githubusercontent.com/u/57414313?v=4\" width=\"100\" height=\"100\"></a></td>\n     </tr>\n     <tr align=\"center\">\n          <td>Backend dev</td>\n          <td>Frontend dev</td>\n     </tr>\n</table>\n\n# Dictionary licenses\nAlmost all of the data used by [jotoba.de](https://jotoba.de) comes from external sources like [edrdg](http://www.edrdg.org/) \nfor Words, Kanji, Names and Radicales or [WaniKani](https://www.wanikani.com/) and [Kanjialive](https://kanjialive.com/) for audio sources.\nFor a detailed list of used resources and their licenses please visit [jotoba.de/about](https://jotoba.de/about).\n\n# Roadmap\nPlease refer to our [Trello board](https://trello.com/b/nmG0xgaW/jotoba-roadmap) for a roadmap and the developing progress.\n\n# Developing\nJotoba is open source. Contributions are highly welcome and can be made by anyone who wants to help Jotoba grow.<br>\nThat being said, all API endpoints exposed by Jotoba are documented and allowed to be used (within a fair amount).<br>\nRefer to [API-Docs](https://jotoba.de/docs.html) for the API documentations and to [CONTRIBUTION](https://github.com/WeDontPanic/Jotoba/wiki/Contributing) for an introduction in how to contribute code to Jotoba.\n\n# Translations\nJotoba is aimed to be a multi-language dictionary thus the website is aimed to be fully translated into all available languages.<br>\nHowever, the main developers of this project don't speak ~10 languages.\nIf you're interested in contributing to this project we are thankful for each translation contribution.<br>\nFor a guide on how to add translations please refer to the [wiki](https://github.com/WeDontPanic/Jotoba/wiki/Translate-%5BPage%5D).\n\n# License\nJotoba itself is licensed under AGPL 3.0 or later. \nPlease refer to the [license file](https://github.com/WeDontPanic/Jotoba/blob/master/LICENSE) for further information.\nJoto-kun (including all of his variants) is licensed under [CC BY-NC-ND 4.0](https://creativecommons.org/licenses/by-nc-nd/4.0/).\n"
  },
  {
    "path": "deny.toml",
    "content": "# This template contains all of the possible sections and their default values\n\n# Note that all fields that take a lint level have these possible values:\n# * deny - An error will be produced and the check will fail\n# * warn - A warning will be produced, but the check will not fail\n# * allow - No warning or error will be produced, though in some cases a note\n# will be\n\n# The values provided in this template are the default values that will be used\n# when any section or field is not specified in your own configuration\n\n# If 1 or more target triples (and optionally, target_features) are specified,\n# only the specified targets will be checked when running `cargo deny check`.\n# This means, if a particular package is only ever used as a target specific\n# dependency, such as, for example, the `nix` crate only being used via the\n# `target_family = \"unix\"` configuration, that only having windows targets in\n# this list would mean the nix crate, as well as any of its exclusive\n# dependencies not shared by any other crates, would be ignored, as the target\n# list here is effectively saying which targets you are building for.\ntargets = [\n    # The triple can be any string, but only the target triples built in to\n    # rustc (as of 1.40) can be checked against actual config expressions\n    #{ triple = \"x86_64-unknown-linux-musl\" },\n    # You can also specify which target_features you promise are enabled for a\n    # particular target. target_features are currently not validated against\n    # the actual valid features supported by the target architecture.\n    #{ triple = \"wasm32-unknown-unknown\", features = [\"atomics\"] },\n]\n\n# This section is considered when running `cargo deny check advisories`\n# More documentation for the advisories section can be found here:\n# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html\n[advisories]\n# The path where the advisory database is cloned/fetched into\ndb-path = \"~/.cargo/advisory-db\"\n# The url(s) of the advisory databases to use\ndb-urls = [\"https://github.com/rustsec/advisory-db\"]\n# The lint level for security vulnerabilities\nvulnerability = \"deny\"\n# The lint level for unmaintained crates\nunmaintained = \"warn\"\n# The lint level for crates that have been yanked from their source registry\nyanked = \"warn\"\n# The lint level for crates with security notices. Note that as of\n# 2019-12-17 there are no security notice advisories in\n# https://github.com/rustsec/advisory-db\nnotice = \"warn\"\n# A list of advisory IDs to ignore. Note that ignored advisories will still\n# output a note when they are encountered.\nignore = [\n    #\"RUSTSEC-0000-0000\",\n]\n# Threshold for security vulnerabilities, any vulnerability with a CVSS score\n# lower than the range specified will be ignored. Note that ignored advisories\n# will still output a note when they are encountered.\n# * None - CVSS Score 0.0\n# * Low - CVSS Score 0.1 - 3.9\n# * Medium - CVSS Score 4.0 - 6.9\n# * High - CVSS Score 7.0 - 8.9\n# * Critical - CVSS Score 9.0 - 10.0\n#severity-threshold =\n\n# If this is true, then cargo deny will use the git executable to fetch advisory database.\n# If this is false, then it uses a built-in git library.\n# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support.\n# See Git Authentication for more information about setting up git authentication.\n#git-fetch-with-cli = true\n\n# This section is considered when running `cargo deny check licenses`\n# More documentation for the licenses section can be found here:\n# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html\n[licenses]\n# The lint level for crates which do not have a detectable license\nunlicensed = \"warn\"\n# List of explicitly allowed licenses\n# See https://spdx.org/licenses/ for list of possible licenses\n# [possible values: any SPDX 3.11 short identifier (+ optional exception)].\nallow = [\n    \"MIT\",\n    \"AGPL-3.0\",\n    \"GPL-3.0\",\n    \"Apache-2.0\",\n    \"BSD-3-Clause\",\n    \"MPL-2.0\",\n    \"BSD-2-Clause\",\n    \"CC0-1.0\",\n    \"Unicode-DFS-2016\",\n    \"ISC\",\n    #\"Apache-2.0 WITH LLVM-exception\",\n]\n# List of explicitly disallowed licenses\n# See https://spdx.org/licenses/ for list of possible licenses\n# [possible values: any SPDX 3.11 short identifier (+ optional exception)].\ndeny = [\n    #\"Nokia\",\n]\n# Lint level for licenses considered copyleft\ncopyleft = \"warn\"\n# Blanket approval or denial for OSI-approved or FSF Free/Libre licenses\n# * both - The license will be approved if it is both OSI-approved *AND* FSF\n# * either - The license will be approved if it is either OSI-approved *OR* FSF\n# * osi-only - The license will be approved if is OSI-approved *AND NOT* FSF\n# * fsf-only - The license will be approved if is FSF *AND NOT* OSI-approved\n# * neither - This predicate is ignored and the default lint level is used\nallow-osi-fsf-free = \"neither\"\n# Lint level used when no other predicates are matched\n# 1. License isn't in the allow or deny lists\n# 2. License isn't copyleft\n# 3. License isn't OSI/FSF, or allow-osi-fsf-free = \"neither\"\ndefault = \"deny\"\n# The confidence threshold for detecting a license from license text.\n# The higher the value, the more closely the license text must be to the\n# canonical license text of a valid SPDX license file.\n# [possible values: any between 0.0 and 1.0].\nconfidence-threshold = 0.8\n# Allow 1 or more licenses on a per-crate basis, so that particular licenses\n# aren't accepted for every possible crate as with the normal allow list\nexceptions = [\n    # Each entry is the crate and version constraint, and its specific allow\n    # list\n    #{ allow = [\"Zlib\"], name = \"adler32\", version = \"*\" },\n]\n\n# Some crates don't have (easily) machine readable licensing information,\n# adding a clarification entry for it allows you to manually specify the\n# licensing information\n#[[licenses.clarify]]\n# The name of the crate the clarification applies to\n#name = \"ring\"\n# The optional version constraint for the crate\n#version = \"*\"\n# The SPDX expression for the license requirements of the crate\n#expression = \"MIT AND ISC AND OpenSSL\"\n# One or more files in the crate's source used as the \"source of truth\" for\n# the license expression. If the contents match, the clarification will be used\n# when running the license check, otherwise the clarification will be ignored\n# and the crate will be checked normally, which may produce warnings or errors\n# depending on the rest of your configuration\n#license-files = [\n    # Each entry is a crate relative path, and the (opaque) hash of its contents\n    #{ path = \"LICENSE\", hash = 0xbd0eed23 }\n#]\n\n[licenses.private]\n# If true, ignores workspace crates that aren't published, or are only\n# published to private registries.\n# To see how to mark a crate as unpublished (to the official registry),\n# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field.\nignore = false\n# One or more private registries that you might publish crates to, if a crate\n# is only published to private registries, and ignore is true, the crate will\n# not have its license(s) checked\nregistries = [\n    #\"https://sekretz.com/registry\n]\n\n# This section is considered when running `cargo deny check bans`.\n# More documentation about the 'bans' section can be found here:\n# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html\n[bans]\n# Lint level for when multiple versions of the same crate are detected\nmultiple-versions = \"warn\"\n# Lint level for when a crate version requirement is `*`\nwildcards = \"allow\"\n# The graph highlighting used when creating dotgraphs for crates\n# with multiple versions\n# * lowest-version - The path to the lowest versioned duplicate is highlighted\n# * simplest-path - The path to the version with the fewest edges is highlighted\n# * all - Both lowest-version and simplest-path are used\nhighlight = \"all\"\n# List of crates that are allowed. Use with care!\nallow = [\n    #{ name = \"ansi_term\", version = \"=0.11.0\" },\n]\n# List of crates to deny\ndeny = [\n    # Each entry the name of a crate and a version range. If version is\n    # not specified, all versions will be matched.\n    #{ name = \"ansi_term\", version = \"=0.11.0\" },\n    #\n    # Wrapper crates can optionally be specified to allow the crate when it\n    # is a direct dependency of the otherwise banned crate\n    #{ name = \"ansi_term\", version = \"=0.11.0\", wrappers = [] },\n]\n# Certain crates/versions that will be skipped when doing duplicate detection.\nskip = [\n    #{ name = \"ansi_term\", version = \"=0.11.0\" },\n]\n# Similarly to `skip` allows you to skip certain crates during duplicate\n# detection. Unlike skip, it also includes the entire tree of transitive\n# dependencies starting at the specified crate, up to a certain depth, which is\n# by default infinite\nskip-tree = [\n    #{ name = \"ansi_term\", version = \"=0.11.0\", depth = 20 },\n]\n\n# This section is considered when running `cargo deny check sources`.\n# More documentation about the 'sources' section can be found here:\n# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html\n[sources]\n# Lint level for what to happen when a crate from a crate registry that is not\n# in the allow list is encountered\nunknown-registry = \"warn\"\n# Lint level for what to happen when a crate from a git repository that is not\n# in the allow list is encountered\nunknown-git = \"warn\"\n# List of URLs for allowed crate registries. Defaults to the crates.io index\n# if not specified. If it is specified but empty, no registries are allowed.\nallow-registry = [\"https://github.com/rust-lang/crates.io-index\"]\n# List of URLs for allowed Git repositories\nallow-git = []\n\n[sources.allow-org]\n# 1 or more github.com organizations to allow git sources for\ngithub = [\"\"]\n# 1 or more gitlab.com organizations to allow git sources for\ngitlab = [\"\"]\n# 1 or more bitbucket.org organizations to allow git sources for\nbitbucket = [\"\"]\n"
  },
  {
    "path": "docker-compose.yaml",
    "content": "version: \"3.7\"\n\nservices:\n  app:\n    image: ghcr.io/wedontpanic/jotoba:latest\n    restart: always\n    ports:\n      - 8080:8080\n    working_dir: /app\n    volumes:\n      - ./html:/app/html\n      - ./resources:/app/resources\n      - ./data:/app/data\n"
  },
  {
    "path": "html/assets/css/main.css",
    "content": "/* ----------------- Color Themes ----------------- */\r\n\r\n:root,\r\n:root.light {\r\n    --background: #f2f1f0;\r\n    --overlay: #f3f3f3;\r\n\r\n    --primaryColor: #34a83c;\r\n    --bgPrimaryColor: #50c058;\r\n    --secondaryColor: #909dc0;\r\n\r\n    --primaryTextColor: #222222;\r\n    --secondaryTextColor: #ffffff;\r\n\r\n    --searchBackground: #ffffff;\r\n    --searchTextColor: #555555;\r\n    --shadowColor: #222222;\r\n\r\n    --tagColor: #808080;\r\n    --itemBG: #d3d3d3;\r\n\r\n    --alert: #ff4254;\r\n    --danger: #dc3545;\r\n    --danger2: #dd4c5b;\r\n\r\n    /* Special */\r\n    --itemBG_075: rgb(211, 211, 211, 0.75);\r\n    --langSep: rgba(50, 103, 51, 0.1);\r\n    --lineColor: rgba(0, 0, 0, 0.1);\r\n    --backgroundShadow: rgba(34, 34, 34, 0.1);\r\n\r\n    /* Used by Radical Picker */\r\n    --borderColor: var(--searchTextColor);\r\n    --disabledColor: #bdbdbd;\r\n\r\n    /* Used by overlays */\r\n    --headerColor: var(--borderColor);\r\n    --headerScrollBar: var(--borderColor);\r\n\r\n    /* Overlay Button */\r\n    --buttonText: #1f1f1f;\r\n    --buttonBg: #dedede;\r\n    --buttonBgActive: #e6e6e6;\r\n\r\n    /* Overlay Graph */\r\n    --graphLink: #d1d1d1;\r\n    --graphCircle: var(--bgPrimaryColor);\r\n    --graphStroke: rgb(116 116 116 / 6%);\r\n    --graphPath: white;\r\n    --graphText: white;\r\n}\r\n\r\n:root.dark {\r\n    --background: #202324;\r\n    --overlay: #1f2123;\r\n\r\n    --primaryColor: #2d9034;\r\n    --bgPrimaryColor: #338f4f;\r\n    --secondaryColor: #435993;\r\n\r\n    --primaryTextColor: #d3cfc9;\r\n    --secondaryTextColor: #e8e6e3;\r\n\r\n    --searchBackground: #181a1b;\r\n    --searchTextColor: #b2aca2;\r\n    --shadowColor: #9d9488;\r\n\r\n    --tagColor: #787878;\r\n    --itemBG: #7a7a7a;\r\n\r\n    --itemBG_075: rgba(122, 122, 122, 0.75);\r\n    --lineColor: rgba(211, 207, 201, 0.1);\r\n    --backgroundShadow: rgba(34, 34, 34, 0.2);\r\n\r\n    --borderColor: var(--itemBG_075);\r\n    --disabledColor: #3c3c3c;\r\n\r\n    --headerColor: var(--lineColor);\r\n    --headerScrollBar: #434344;\r\n\r\n    --buttonText: #fff;\r\n    --buttonBg: #404040;\r\n    --buttonBgActive: #515151;\r\n\r\n    --alert: #e93849;\r\n}\r\n\r\n:root.dark ::-moz-selection {\r\n    background: var(--secondaryColor);\r\n}\r\n\r\n:root.dark ::selection {\r\n    background: var(--secondaryColor);\r\n}\r\n\r\n/* ------------------- Scrollbar Adjustments ------------------- */\r\n\r\n* {\r\n    scrollbar-width: thin;\r\n}\r\n\r\n::-webkit-scrollbar {\r\n    width: 20px;\r\n}\r\n\r\n::-webkit-scrollbar-track {\r\n    background-color: transparent;\r\n}\r\n\r\n::-webkit-scrollbar-thumb {\r\n    background-color: #c1c1c1;\r\n    border-radius: 20px;\r\n    border: 6px solid transparent;\r\n    background-clip: content-box;\r\n}\r\n\r\n::-webkit-scrollbar-thumb:hover {\r\n    background-color: #a8a8a8;\r\n}\r\n\r\n:root.dark ::-webkit-scrollbar-thumb {\r\n    background-color: var(--itemBG);\r\n}\r\n\r\n/* ----------------- Overall Page Adjustments ----------------- */\r\n\r\nhtml,\r\nbody {\r\n    font-size: 100%;\r\n    background: var(--background) !important;\r\n}\r\n\r\nbody {\r\n    color: var(--primaryTextColor);\r\n    cursor: auto;\r\n    font-family: \"Helvetica Neue\", Helvetica, Arial, \"Source Han Sans\",\r\n        \"源ノ角ゴシック\", \"Hiragino Sans\", \"HiraKakuProN-W3\",\r\n        \"Hiragino Kaku Gothic ProN W3\", \"Hiragino Kaku Gothic ProN\",\r\n        \"ヒラギノ角ゴ ProN W3\", \"Noto Sans\", \"Noto Sans JP\", \"Noto Sans CJK JP\",\r\n        \"メイリオ\", Meiryo, \"游ゴシック\", YuGothic, \"ＭＳ Ｐゴシック\",\r\n        \"MS PGothic\", \"ＭＳ ゴシック\", \"MS Gothic\", sans-serif;\r\n    font-style: normal;\r\n    font-weight: normal;\r\n    line-height: 1.5;\r\n    margin: 0;\r\n    padding: 0;\r\n    position: relative;\r\n    -webkit-font-smoothing: auto;\r\n}\r\n\r\nh3,\r\nh4 {\r\n    font-family: \"Helvetica\", \"Arial\", sans-serif;\r\n}\r\n\r\nbody {\r\n    min-height: 100vh;\r\n    height: 100vh;\r\n}\r\n\r\nbody.index {\r\n    display: grid;\r\n    grid-template-rows: 1fr auto;\r\n    overflow-x: hidden;\r\n}\r\n\r\n.noselect,\r\n.tags,\r\n.clickable,\r\n.entry-count,\r\n.no-drag {\r\n    -webkit-touch-callout: none; /* iOS Safari */\r\n    -webkit-user-select: none; /* Safari */ /* Konqueror HTML */\r\n    -moz-user-select: none; /* Old versions of Firefox */\r\n    -ms-user-select: none; /* Internet Explorer/Edge */\r\n    -o-user-select: none; /* Opera */\r\n    user-select: none; /* Non-prefixed version, currently supported by Chrome, Edge, Opera and Firefox */\r\n}\r\n\r\n#backdrop {\r\n    position: fixed;\r\n    display: flex;\r\n    flex-direction: column;\r\n    align-items: center;\r\n    justify-content: center;\r\n    cursor: pointer;\r\n    z-index: 999999;\r\n    top: 0;\r\n    left: 0;\r\n    width: 100vw;\r\n    height: 100vh;\r\n    background-color: var(--backgroundShadow);\r\n}\r\n\r\nbutton {\r\n    display: flex;\r\n    place-content: center;\r\n    place-items: center;\r\n}\r\n\r\n.btn-danger {\r\n    color: white !important;\r\n    background-color: var(--danger) !important;\r\n    border-color: var(--danger) !important;\r\n}\r\n\r\n.btn-danger:not(:disabled):not(.disabled).active:focus,\r\n.btn-danger:not(:disabled):not(.disabled):active:focus {\r\n    box-shadow: unset;\r\n}\r\n\r\n.btn-danger:hover {\r\n    background-color: var(--danger2) !important;\r\n}\r\n\r\n.close:focus {\r\n    outline: 0;\r\n}\r\n\r\nobject {\r\n    pointer-events: none;\r\n}\r\n\r\n.vl {\r\n    border-left: 1px solid var(--searchTextColor);\r\n}\r\n\r\n:root.dark hr {\r\n    border-top: 1px solid rgba(255,255,255,.1);\r\n}\r\n\r\nh3 {\r\n    font-size: 22px;\r\n    text-align: center;\r\n    text-align: -webkit-center;\r\n    font-weight: bold;\r\n}\r\n\r\nh4 {\r\n    font-size: 11px;\r\n    color: var(--searchTextColor);\r\n    margin: 2px 0 0 0;\r\n}\r\n\r\n.hidden {\r\n    display: none !important;\r\n}\r\n\r\n.highlight {\r\n    color: var(--primaryColor);\r\n}\r\n\r\n.indented {\r\n    margin-left: 5%;\r\n}\r\n\r\n.clickable {\r\n    color: var(--primaryColor);\r\n    text-align: center;\r\n    text-align: -webkit-center;\r\n    cursor: pointer;\r\n}\r\n\r\n.clickable.title {\r\n    font-size: 22px;\r\n    font-weight: bold;\r\n}\r\n\r\n.clickable.fat {\r\n    font-size: 20px;\r\n}\r\n\r\n.clickable:hover {\r\n    text-decoration: underline;\r\n    color: var(--primaryColor);\r\n}\r\n\r\n.no-margin {\r\n    margin: 0px 0px 0px 0px !important;\r\n}\r\n\r\n.no-align {\r\n    text-align: unset;\r\n}\r\n\r\n.text-left {\r\n    text-align: left;\r\n}\r\n\r\n.top-padding-05-rem {\r\n    padding-top: 0.5rem;\r\n}\r\n\r\n.right-padding-10 {\r\n    padding-right: 10px;\r\n}\r\n\r\n.right-padding-20 {\r\n    padding-right: 20px;\r\n}\r\n\r\n.d-flex.wrap {\r\n    flex-wrap: wrap;\r\n}\r\n\r\n.no-highlight {\r\n    color: var(--primaryTextColor);\r\n}\r\n\r\n.no-highlight:hover {\r\n    text-decoration: none;\r\n}\r\n\r\na:hover {\r\n    color: unset;\r\n    text-decoration: unset !important;\r\n}\r\n\r\n.black {\r\n    color: var(--primaryTextColor);\r\n}\r\n\r\n.fat {\r\n    font-weight: bold;\r\n}\r\n\r\n.center-text {\r\n    text-align: center;\r\n    text-align: -webkit-center;\r\n}\r\n\r\n/* ----------------- Commonly Used CSS ----------------- */\r\n\r\n.search-suggestion {\r\n    color: inherit;\r\n}\r\n\r\n.search-suggestion:focus,\r\n.search-suggestion:link,\r\n.search-suggestion:visited,\r\n.search-suggestion:hover {\r\n    text-decoration: none;\r\n}\r\n\r\n#page-container {\r\n    padding-top: 10px;\r\n    padding-left: 10px;\r\n    padding-right: 10px;\r\n}\r\n\r\n.main-container {\r\n    width: 100%;\r\n    max-width: 1145px;\r\n    height: -webkit-max-content;\r\n    height: -moz-max-content;\r\n    height: max-content;\r\n}\r\n\r\n.main-info {\r\n    width: 100%;\r\n    height: -webkit-max-content;\r\n    height: -moz-max-content;\r\n    height: max-content;\r\n    padding-bottom: 10px;\r\n}\r\n\r\n.secondary-info {\r\n    height: -webkit-max-content;\r\n    height: -moz-max-content;\r\n    height: max-content;\r\n    width: 35%;\r\n    padding-bottom: 10px;\r\n    padding-left: 10px;\r\n}\r\n\r\n@media only screen and (max-width: 600px) {\r\n    .secondary-info {\r\n        padding-left: 0px;\r\n    }\r\n}\r\n\r\n.tags {\r\n    color: var(--tagColor);\r\n    font-size: 12px;\r\n    margin-top: 6px;\r\n}\r\n\r\n.tags.fat {\r\n    font-size: 20px;\r\n    font-weight: bold;\r\n    color: var(--primaryTextColor);\r\n}\r\n\r\n.tags.slim {\r\n    font-size: 20px;\r\n    font-weight: 400;\r\n    color: var(--primaryTextColor);\r\n    margin-top: -5px !important;\r\n}\r\n\r\n.tags.no-margin {\r\n    margin-top: 0px;\r\n}\r\n\r\n.d-flex .row-tag-entry + .row-tag-entry {\r\n    padding-left: 10px;\r\n}\r\n\r\n.entry-count {\r\n    color: var(--tagColor);\r\n    line-height: 30px;\r\n    margin-right: 5px;\r\n    position: relative;\r\n    height: 100%;\r\n}\r\n\r\n/* --- Slider adjustments --- */\r\n\r\n.slider-parent {\r\n    padding-top: 30px;\r\n    padding-right: 20px;\r\n    width: 150px;\r\n}\r\n\r\n.slider-output {\r\n    font-size: 13px;\r\n    padding-top: 10px;\r\n    color: var(--primaryColor);\r\n}\r\n\r\n/* The slider itself */\r\n.slider {\r\n    -webkit-appearance: none;\r\n    width: 130px;\r\n    height: 15px;\r\n    border-radius: 10px;\r\n    background: var(--itemBG);\r\n    outline: none;\r\n    opacity: 0.7;\r\n    transition: opacity 0.2s;\r\n}\r\n\r\n/* Mouse-over effects */\r\n.slider:hover {\r\n    opacity: 1;\r\n}\r\n:root.dark .slider:hover {\r\n    opacity: 0.8;\r\n}\r\n\r\n/* The slider handle for webkit and mozilla with its extra shit */\r\n.slider::-webkit-slider-thumb {\r\n    -webkit-appearance: none;\r\n    appearance: none;\r\n    width: 25px;\r\n    height: 25px;\r\n    border-radius: 50%;\r\n    border-color: var(--bgPrimaryColor);\r\n    background: var(--bgPrimaryColor);\r\n    cursor: pointer;\r\n}\r\n\r\n:root.dark .slider::-webkit-slider-thumb {\r\n    border-color: #3ace67;\r\n    background: #3ace67;\r\n}\r\n\r\n.slider::-webkit-slider-thumb:hover {\r\n    background-color: var(--primaryColor);\r\n}\r\n\r\n:root.dark .slider::-webkit-slider-thumb:hover {\r\n    border-color: #2eeb67;\r\n}\r\n\r\n.slider::-moz-range-thumb {\r\n    width: 25px;\r\n    height: 25px;\r\n    border-radius: 50%;\r\n    background: var(--bgPrimaryColor);\r\n    cursor: pointer;\r\n}\r\n\r\n:root.dark .slider::-moz-range-thumb:hover {\r\n    background: #3ace67;\r\n}\r\n\r\n.slider::-moz-range-thumb:hover {\r\n    background-color: var(--primaryColor);\r\n}\r\n\r\n:root.dark .slider::-moz-range-thumb:hover {\r\n    border-color: #2eeb67;\r\n}\r\n\r\n.res-separator {\r\n    border-top: 2px solid var(--lineColor);\r\n    margin-right: 5px;\r\n}\r\n\r\n.res-separator.sentence {\r\n    width: 100%;\r\n}\r\n\r\n/* Useful stuff */\r\n\r\n.flex-center {\r\n    display: flex;\r\n    place-content: center;\r\n}\r\n\r\n#loading-screen {\r\n    visibility: hidden;\r\n    background-color: #000;\r\n    opacity: 0;\r\n    transition: opacity 0.15s linear;\r\n    z-index: 2000;\r\n    position: fixed;\r\n    top: 0;\r\n    left: 0;\r\n    width: 100vw;\r\n    height: 100vh;\r\n    display: flex;\r\n    justify-content: center;\r\n    align-items: center;\r\n}\r\n\r\n#loading-screen.show {\r\n    display: block;\r\n    opacity: 0.5;\r\n}\r\n\r\n.loading-animation {\r\n    border: 16px solid var(--itemBG);\r\n    border-radius: 50%;\r\n    border-top: 16px solid var(--primaryColor);\r\n    width: 100px;\r\n    height: 100px;\r\n    -webkit-animation: spin 2s linear infinite; /* Safari */\r\n    animation: spin 2s linear infinite;\r\n}\r\n\r\n/* Safari */\r\n@-webkit-keyframes spin {\r\n    0% {\r\n        -webkit-transform: rotate(0deg);\r\n    }\r\n    100% {\r\n        -webkit-transform: rotate(360deg);\r\n    }\r\n}\r\n\r\n@keyframes spin {\r\n    0% {\r\n        transform: rotate(0deg);\r\n    }\r\n    100% {\r\n        transform: rotate(360deg);\r\n    }\r\n}\r\n\r\n/* SVG Colors */\r\n\r\n:root.dark .mobile-nav-btn > div,\r\n:root.dark .mobile-nav-inner-btn > div:not(.jumpSvg) {\r\n    background-color: var(--searchTextColor) !important;\r\n    color: var(--searchTextColor) !important;\r\n}\r\n\r\n:root.dark .mobile-nav-inner-btn > span {\r\n    color: var(--searchTextColor) !important;\r\n}\r\n\r\n.searchSvg,\r\n.settingsSvg,\r\n.clearSvg,\r\n.voiceSvg {\r\n    mask-size: cover !important;\r\n    -webkit-mask-size: cover !important;\r\n}\r\n\r\n.searchSvg {\r\n    height: 18px;\r\n    width: 18px;\r\n    background-color: var(--primaryColor);\r\n    mask: url(\"/assets/svg/ui/search.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/search.svg\") no-repeat center;\r\n}\r\n\r\n.searchSvg.index {\r\n    height: 16px;\r\n    width: 16px;\r\n    margin-top: 3px;\r\n    margin-left: 5px;\r\n    background-color: var(--secondaryTextColor);\r\n}\r\n\r\n.settingsSvg {\r\n    height: 30px;\r\n    width: 30px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/settings.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/settings.svg\") no-repeat center;\r\n}\r\n\r\n.infoSvg {\r\n    scale: 1.1;\r\n    height: 30px;\r\n    width: 30px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/info.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/info.svg\") no-repeat center;\r\n}\r\n\r\n.notificationSvg {\r\n    scale: 1.1;\r\n    height: 30px;\r\n    width: 30px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/notification.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/notification.svg\") no-repeat center;\r\n}\r\n\r\n.settingsSvg.mobile {\r\n    height: 26px;\r\n    width: 26px;\r\n}\r\n\r\n.clearSvg {\r\n    height: 20px;\r\n    width: 20px;\r\n    margin-top: 2px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/clear.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/clear.svg\") no-repeat center;\r\n}\r\n\r\n.voiceSvg {\r\n    margin-top: 2px;\r\n    height: 24px;\r\n    width: 24px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/voice.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/voice.svg\") no-repeat center;\r\n}\r\n\r\n.voiceSvg.mobile {\r\n    height: 30px;\r\n    width: 30px;\r\n    background-color: var(--tagColor);\r\n}\r\n\r\n.voiceSvg.index {\r\n    margin-top: -7px;\r\n    height: 30px;\r\n    width: 30px;\r\n}\r\n\r\n.voiceSvg.active {\r\n    background-color: var(--primaryColor) !important;\r\n}\r\n\r\n.cameraSvg {\r\n    height: 28px;\r\n    width: 28px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/camera.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/camera.svg\") no-repeat center;\r\n}\r\n\r\n.cameraSvg.index {\r\n    margin-top: -5px;\r\n    margin-right: 33px;\r\n}\r\n\r\n.jumpSvg {\r\n    margin-left: -1px;\r\n    height: 26px;\r\n    width: 26px;\r\n    background-color: var(--primaryColor) !important;\r\n    mask: url(\"/assets/svg/ui/jump.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/jump.svg\") no-repeat center;\r\n}\r\n\r\n.menuSvg {\r\n    height: 28px;\r\n    width: 28px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/menu.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/menu.svg\") no-repeat center;\r\n}\r\n\r\n.undoSvg {\r\n    height: 20px;\r\n    width: 20px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/undo.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/undo.svg\") no-repeat center;\r\n    cursor: pointer;\r\n}\r\n\r\n.imgUploadSvg {\r\n    margin: 11px 14px 0px -35px;\r\n    height: 20px;\r\n    width: 20px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/upload.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/upload.svg\") no-repeat center;\r\n    cursor: pointer;\r\n}\r\n\r\n.downloadSvg {\r\n    height: 20px;\r\n    width: 20px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/download.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/download.svg\") no-repeat center;\r\n    cursor: pointer;\r\n    pointer-events: none;\r\n}\r\n\r\n.conjugationSvg {\r\n    height: 20px;\r\n    width: 20px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/conjugation.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/conjugation.svg\") no-repeat center;\r\n    cursor: pointer;\r\n    pointer-events: none;\r\n}\r\n\r\n.sentenceSvg {\r\n    height: 20px;\r\n    width: 20px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/sentence.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/sentence.svg\") no-repeat center;\r\n    cursor: pointer;\r\n    pointer-events: none;\r\n}\r\n\r\n.transitivitySvg {\r\n    height: 20px;\r\n    width: 20px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/transitivity.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/transitivity.svg\") no-repeat center;\r\n    cursor: pointer;\r\n    pointer-events: none;\r\n}\r\n\r\n.linkSvg {\r\n    height: 20px;\r\n    width: 20px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/link.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/link.svg\") no-repeat center;\r\n    cursor: pointer;\r\n    pointer-events: none;\r\n}\r\n\r\n.copySvg {\r\n    height: 21px;\r\n    width: 21px;\r\n    background-color: var(--primaryColor);\r\n    mask: url(\"/assets/svg/ui/copy.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/copy.svg\") no-repeat center;\r\n    cursor: pointer;\r\n    pointer-events: none;\r\n}\r\n\r\n.tooltipSvg {\r\n    height: 20px;\r\n    width: 20px;\r\n    background-color: var(--searchTextColor);\r\n    mask: url(\"/assets/svg/ui/3dot.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/3dot.svg\") no-repeat center;\r\n    cursor: pointer;\r\n}\r\n\r\n.shareSvg {\r\n    height: 20px;\r\n    width: 20px;\r\n    background-color: var(--disabledColor);\r\n    mask: url(\"/assets/svg/ui/share.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/share.svg\") no-repeat center;\r\n    cursor: pointer;\r\n}\r\n\r\n.discordSvg {\r\n    height: 35px;\r\n    width: 35px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/_discord.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/_discord.svg\") no-repeat center;\r\n    cursor: pointer;\r\n}\r\n\r\n.githubSvg {\r\n    height: 35px;\r\n    width: 35px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/_github.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/_github.svg\") no-repeat center;\r\n    cursor: pointer;\r\n}\r\n\r\n.donationSvg {\r\n    height: 35px;\r\n    width: 35px;\r\n    margin-top: -1px;\r\n    background-color: var(--tagColor);\r\n    mask: url(\"/assets/svg/ui/_donation.svg\") no-repeat center;\r\n    -webkit-mask: url(\"/assets/svg/ui/_donation.svg\") no-repeat center;\r\n    cursor: pointer;\r\n}"
  },
  {
    "path": "html/assets/css/mobile.css",
    "content": "/*\n    Used to make mobile stuff less ugly. Will be transfered into the others file bit-by-bit.\n*/\n\n@media only screen and (max-width: 600px) {\n\t\n    .modal-open {\n        margin-right: 0px;\n    }\n\n    /* -------- Index -------- */\n   \n    .circle {\n        width: 1.25em;\n        height: 1.25em;\n        border-radius: 50%;\n        font-size: 50px;\n        text-align: center;\n        text-align: -webkit-center;\n        background-color: var(--itemBG_075);\n        line-height: 110%;\n        position: fixed;\n        bottom: .5em;\n        right: .5em;\n    }\n\n    article {\n        padding-left: 0.5em;\n        padding-right: 1em;\n    }\n\n    .voiceSvg.index {\n        margin-top: -4px;\n    }\n\n\t/* -------- Word Search -------- */\n    \n    .search-embedded-btn {\n        right: 5px;\n        margin-top: 9px;\n    }\n\n    .search-embedded-btn.search {\n        right: 7px;\n        margin-top: 10px;\n    }\n\n    .search-embedded-btn.radical {\n        display: none;\n    }\n\n    #voiceBtn > svg {\n        margin-top: 3px;\n    }\n\n    #search-vl {\n        position: absolute;\n        right: 39px;\n    }    \n\n    #emptyInput {\n        right: 31px;\n        margin-top: 7px;\n    }\n\n    .choices__item--selectable.selected {\n        color: var(--primaryColor) !important;\n    }\n    \n    .form-main > div > div > div > div .choices__list.choices__list--dropdown {\n        width: -webkit-max-content;\n        width: -moz-max-content;\n        width: max-content;\n    }\n\n    .searchDivInner form .inner-form .input-field.first-wrap .choices__list.choices__list--dropdown {\n        padding-right: 20px;\n    }\n\n    .overlay.suggestion {\n        width: 98%;\n        margin-left: 1%;\n    }\n    \n    .image-search-input {\n        width: 100%;\n    }\n\n    #shadow-text {\n        padding: 0px;\n        margin: 13px 15px;\n    }\n\t\n    #content-container {\n        flex-direction: column!important;\n        min-width: 350px;\n    }\n    \n    body {\n        height: -webkit-max-content;\n        height: -moz-max-content;\n        height: max-content;\n    }\n\n    .searchDivInner form .inner-form .input-field.first-wrap .choices__inner .choices__list--single .choices__item {\n        display: none;\n    }\n\n    .searchDivInner form .inner-form .input-field.first-wrap {\n        width: 40px;\n    }\n\n    .searchDivInner form .inner-form .input-field input {\n        height: 100%;\n        background: transparent;\n        border: 0;\n        display: block;\n        width: 88%;\n        padding: 25px 0px 27px 15px;\n        font-size: 1em;\n        color: var(--searchTextcolor);\n    }\n\n    #searchDiv {\n        margin: 0px;\n        width: 100%;\n        max-width: 1000px;\n    }\n\n    h3 {\n        margin-bottom: 2rem;\n    }\n\n    .title-div {\n        width: 100%;\n    }\n    \n    .btn-container, .main-tab-select {\n        display: none !important;\n    }\n    \n    .title-div {\n        padding-top: 10px;\n        width: 100%;\n    }\n    \n    .main-info {\n        height: -webkit-max-content;\n        height: -moz-max-content;\n        height: max-content;\n        width: 100%;\n    }\n\n    .mdl-menu__container {\n        margin-right: 15px;\n    }\n\t\n\t.main-info > .d-flex.center {\n  flex-direction: column!important;\n\t\talign-self: center;\n\t\talign-items: center;\n        width: 100%;\n\t}\n\n    .main-info > .d-flex.flex-row {\n        align-self: center;\n\t\talign-items: center;\n        margin-left: -10px;\n    }\n    \n    .definition-wrapper.d-flex.flex-row {\n        padding-bottom: 5px;\n    }\n    \n    .secondary-info {\n        height: -webkit-max-content;\n        height: -moz-max-content;\n        height: max-content;\n        align-self: center;\n        width: 100%;\n       /* padding-left: 15%; */\n    }\n\t\n    .secondary-info > div > .kanji-entry {\n        justify-content: center;\n    }\n\t\n\t.kanji-entry.left.fixed > .d-flex {\n\t\tpadding-bottom: 5px;\n\t}\n\n    .kanji-entry.right {\n        padding-left: 2vw;\n        padding-right: 2vw;\n        width: 100%;\n\t}\n    \n    .kanji-preview.large {\n        position: absolute;\n    }\n    \n    .translation.kanji {\n        padding-top: 70px !important;\n\t\tpadding-left: 0px !important;\n\t\tpadding-right: 0px !important;\n        text-align: center;\n        text-align: -webkit-center;\n    }\n\t\n\t.kanji-parent > .tags {\n\t\tpadding-bottom: 5px;\n        text-align: center;\n        text-align: -webkit-center;\n\t}\n\n    .list-entry > .tags, .kanji-entry.right, .tags.no-margin {\n        text-align: left;\n    }\n\n    .entry-min-height-1 {\n        min-height: unset;\n    }\n    \n    .entry-min-height-2 {\n        min-height: unset;\n    }\n\n    /* ----------- Overlay ----------- */\n\n    .clickable.collocation {\n        width: 70%;\n        margin-left: 20%;\n    }\n\n    .table.conjugation {\n        margin-left: 0px;\n        width: 101%;\n    }\n\n    /* -------- Radical Picker -------- */\n    \n    .rad-picker-icon {\n        font-size: 30px;\n    }\n\n    /* -------- Image Upload -------- */\n\n    .cropping-target-border {\n        width: calc(100% - 30px);\n        left: 15px;\n    }\n\n    .croppie-container {\n        width: calc(100% - 60px);\n        left: 30px;\n    }\n\n    .croppie-container .cr-boundary {\n        height: 485px;\n        overflow: scroll;\n    }\n\n    /* -------- Help Page --------- */\n\n    .help-joto {\n        display: none;\n    }\n  \n    /* -------- Mobile only -------- */\n\n    .mobile-nav {\n        overflow-y: hidden;\n        position: fixed;\n        bottom: 75px;\n        right: 24px;\n        z-index: 100;\n        height: 50%;\n        width: 60px;\n    }\n\n    .mobile-nav-btn {\n        place-content: center;\n        place-items: center;\n        display: flex;\n        position: fixed;\n        bottom: 20px;\n        right: 30px; \n        z-index: 99;  \n        border: none;\n        outline: none; \n        background-color: var(--itemBG_075);\n        border-radius: 50%;\n        width: 50px;\n        height: 50px;\n        text-align: center;\n        text-align: -webkit-center;\n    }\n\n    .mobile-nav-inner-btn {\n        width: 45px;\n        height: 45px;\n        border: none;\n        outline: none;\n        background: var(--itemBG_075);\n        border-radius: 50%;\n        margin-bottom: 5px;\n        margin-left: 6px;\n        transition: all .2s linear;\n    }\n\n    .mobile-nav.hidden > .mobile-nav-inner-btn {\n        margin-bottom: -50px;\n    }\n\n    #jmp-btn {\n        padding-bottom: 5px;\n        transform: rotate(180deg);\n    }\n\n    #jmp-btn > svg > polygon {\n        fill: var(--primaryColor);\n    }\n\n    .kanji-jump {\n        text-align: center;\n        text-align: -webkit-center;\n        margin-bottom: 20px;\n    }\n\n    .kanji-jump.parent {\n        text-align: center;\n        text-align: -webkit-center;\n        width: 100%;\n    }\n\n    .word-title {\n        display: none;\n    }\n\n    .info-h3 {\n        margin-bottom: 0px;\n    }\n\n    /* Radical Picker */\n    .rad-result-preview {\n        display: none;\n    }\n\n    .mobile-nav-btn:focus, .mobile-nav-inner-btn:focus {\n        outline: none;\n        box-shadow: 0 0 3pt 2pt var(--primaryColor);\n    }\n\n}\n\n/* Mobile only */\n@media only screen and (min-width: 600px) {\n    .circle, .mobile-nav, .mobile-nav-btn, .mobile-nav-inner-btn, .kanji-jump, .desktop-br {\n        display: none;\n    }\n}"
  },
  {
    "path": "html/assets/css/overlay/croppingOverlay.css",
    "content": ".modal.fade .modal-dialog.crop {\n    transition: unset;\n    transform: unset;\n}\n\n.cropping-target-border {\n    position: fixed;\n    width: 52vw;\n    height: 596px;\n    background: var(--itemBG);\n    left: 25%;\n    top: 25px;\n}\n\n.croppie-container {\n    position: fixed;\n    width: 50%;\n    height: 500px;\n    left: 26%;\n    top: 50px\n}\n\n.croppie-container .cr-boundary {\n    height: 525px;\n}\n\n.btn-search.crop {\n    width: 110px;\n    height: 30px;\n    display: inline;\n    position: absolute;\n    right: 25px;\n    top: 93.75%;\n    background: var(--bgPrimaryColor);\n}\n\n.btn-danger.crop {\n    left: 25px;\n}\n\n.croppie-container .cr-slider-wrap {\n    margin: 10px auto;\n}\n\n.cr-slider {\n    padding: 0;\n}\n\n.cr-slider::-webkit-slider-thumb {\n    -webkit-appearance: none; \n    appearance: none;\n    margin-top: -9px;\n    width: 25px; \n    height: 25px; \n    background: var(--primaryColor); \n    cursor: pointer;\n}\n\n.cr-slider::-moz-range-thumb {\n    width: 25px;\n    height: 25px; \n    margin-top: -9px;\n    background: var(--primaryColor); \n    cursor: pointer; \n}\n\n.cr-slider::-webkit-slider-runnable-track{\n\twidth:100%;\n\theight:7px;\n\tbackground:rgba(0,0,0,.5);\n\tborder:0;\n\tborder-radius:3px\n}\n\n.cr-slider::-moz-range-track{\n\twidth:100%;\n\theight:7px;\n\tbackground:rgba(0,0,0,.5);\n\tborder:0;\n\tborder-radius:3px\n}"
  },
  {
    "path": "html/assets/css/overlay/footerOverlay.css",
    "content": ".cookie-footer {\n    display: block;\n    position: fixed;\n    bottom: 5%;\n    padding: 10px;\n    background: var(--overlay);\n    border: 2px solid var( --tagColor);\n    border-radius: 10px;\n    z-index: 100;\n    max-width: -webkit-max-content;\n    max-width: -moz-max-content;\n    max-width: max-content;\n    width: inherit;\n    width: 90%;\n    left: 50%;\n    transform: translate(-50%, 0);\n}\n\n.cookie-footer > .res-separator {\n    margin-top: 5px;\n    margin-bottom: 5px;\n}\n\n.cookie-btn {\n    margin-top: 10px;\n    margin-right: 10px;\n}\n\n.joto-cookie {\n    width: 85px;\n    margin-right: 25px;\n}\n\n/* Mobile adjustments */\n\n@media only screen and (max-width: 600px) {   \n    .cookie-footer span, .cookie-btn {\n        font-size: 12px;\n    }\n\n    .joto-cookie {\n        width: 125px;\n        margin-right: 25px;\n    }\n}"
  },
  {
    "path": "html/assets/css/overlay/imgUploadOverlay.css",
    "content": ".image-search-input {\n    width: 50%;\n    padding: 0px 39px 0px 8px;\n    margin-top: 4px;\n    display: block;\n    border: 1px solid var(--lineColor);\n    background-color: var(--searchBackground);\n}\n\n.image-search-input.disabled {\n    cursor: not-allowed;\n    background: var(--backgroundShadow);\n}\n\n.image-search-input:focus {\n    outline: none;\n}\n\n.image-search-upload {\n    opacity: 0;   \n    margin-left: -40px;\n    margin-right: 10px;\n    width: 30px;\n    margin-top: 5px;\n    cursor: pointer;\n}\n\n.image-search-upload-btn {\n    border: 1px solid var(--lineColor);\n    background: var(--overlay);\n    color: var(--primaryTextColor);\n    padding: 5px;\n    font-size: small;\n    margin-top: 4px;\n}\n"
  },
  {
    "path": "html/assets/css/overlay/notificationOverlay.css",
    "content": "#notifications-container {\n    position: absolute;\n    top: 60px;\n    right: 55px;\n    width: 16rem;\n    z-index: 10;\n    overflow: hidden;\n    border-radius: .5rem;\n    background-color: var(--overlay);\n    color: var(--primaryTextColor);\n    box-shadow: 0px 8px 20px 0px var(--backgroundShadow);\n}\n\n.notifications-info-container {\n    display: flex;\n    flex-flow: column;\n    width: 100%;\n    padding: 1rem;\n    overflow: hidden;\n}\n\n.notification-title {\n    padding: .5rem;\n    margin: -1rem -1rem .5rem;\n    background-color: var(--bgPrimaryColor);\n    background: linear-gradient(45deg,var(--bgPrimaryColor),var(--primaryColor));\n    color: var(--secondaryTextColor);\n    text-align: center;\n    text-transform: uppercase;\n    font-size: large;\n}\n\n:root.dark .notification-title {\n    background: var(--bgPrimaryColor);\n}\n\n#notification-content {\n    margin: .5rem 0;\n    opacity: .85;\n    font-weight: 400;\n}\n\n#no-result {\n    padding-bottom: 10px;\n    text-align: center;\n}\n\n.notification-entry {\n    border-bottom: 1px solid var(--borderColor);\n    padding: 5px;\n    cursor: pointer;\n}\n\n.notification-entry:not(#no-result):hover {\n    background: rgba(0, 0, 0, 0.02);\n}\n\n:root.dark .notification-entry:not(#no-result):hover {\n    background: rgba(255, 255, 255, 0.02);\n}\n\n.entry-title {\n    font-weight: bold;\n    font-size: 16px;\n    text-overflow: ellipsis;\n    overflow: hidden;\n    max-width: 60%;\n}\n\n.date-tag {\n    position: absolute;\n    right: 25px;\n    margin-top: -20px;\n    font-size: 10px;\n}\n\n.content > li {\n    line-height: 15px;\n}\n\n.button-container {\n    display: flex;\n    grid-gap: .5rem;\n    gap: .5rem;\n    margin-top: 10px;\n    margin-bottom: -10px;\n    justify-content: flex-end;\n}\n\n#notificationModal h1 {\n    font-size: 1.5rem;\n}\n\n#notificationModal h2 {\n    font-size: 1.25rem;\n}"
  },
  {
    "path": "html/assets/css/overlay/overlayBase.css",
    "content": ".overlay {\n    z-index: 1;\n    margin-top: 2px;\n    position: absolute;\n    width: 100%;\n    height: -webkit-max-content;\n    height: -moz-max-content;\n    height: max-content;\n    border: 2px solid var(--searchBackground);\n    border-radius: 20px;\n    border-top: none;\n    background-color: var(--searchBackground);\n    box-shadow: 0px 8px 20px 0px var(--backgroundShadow)\n}\n\n.overlay > * {\n    z-index: 2;\n}\n\n.overlay > .flex-column {\n    margin: 5px 10px;\n}\n\n.x-button {\n    position: absolute;\n    right: 10px;\n    font-size: larger;\n}\n\n.x-button:hover {\n    cursor: pointer;\n}\n\n.modal-header {\n    border-bottom-color: var(--lineColor);\n}\n\n:root.dark .modal-header > .close {\n    color: #fff;\n}\n\n:root.dark .modal-header > .close:hover {\n    color: #97a495;\n}\n\n.modal-footer {\n    border-top-color: var(--lineColor);\n}\n\n.modal-content {\n    background-color: var(--overlay);\n}\n\n#default_lang_settings {\n    margin-top: 0.25rem !important;\n}\n\n.form-control.small {\n    width: 100%;\n    height: 20px;\n    padding: 0;\n    padding-left: 1%;\n    display: inline;\n    color: var(--searchTextColor);\n    background: var(--searchBackground);\n    border-color: var(--lineColor);\n}\n\n.form-control.small:focus {\n    box-shadow: unset;\n}\n\n.overlay-button {\n    position: relative;\n    display: inline-flex;\n    flex: auto 0 0;\n    justify-content: center;\n    align-items: center;\n    grid-gap: 4px;\n    gap: 4px;\n    padding: 0 0.625rem;\n    margin: 0;\n    border: none;\n    height: 2rem;\n    font-size: 1rem;\n    border-radius: 4px;\n    cursor: pointer;\n}\n\n.overlay-button {\n    color: var(--buttonText);\n    background-color: var(--buttonBg);\n    flex-grow: 1;\n    outline: 0px solid var(--primaryColor);\n    transition: all .05s ease;\n}\n\n.overlay-button:hover, .overlay-button:active {\n    background-color: var(--buttonBgActive);\n}\n\n.overlay-button:active {\n    outline: 2px solid var(--primaryColor);\n}\n"
  },
  {
    "path": "html/assets/css/overlay/radicalOverlay.css",
    "content": "/*\n    Used by the radical picker only.\n*/\n\n.rad-results {\n    padding-left: 5px;\n    padding-top: 10px;\n    min-height: 90px;\n    max-height: 25vh;\n    overflow-y: scroll;\n}\n\n.rad-suggestion-wrapper {\n    width: 100%;\n    border-top: 1px solid var(--borderColor);\n    margin-top: 10px;\n    overflow-x: scroll;\n    scrollbar-width: none;\n    background: var(--itemBG_075);\n}\n\n.rad-suggestion-wrapper::-webkit-scrollbar {\n    width: 0;\n    height: 0;\n}\n\n#suggestion-container-rad {\n    overflow-y: auto;\n    width: -webkit-max-content;\n    width: -moz-max-content;\n    width: max-content;\n}\n\n#suggestion-container-rad > .search-suggestion:first-child {\n    padding-top: 0px !important;\n}\n\n#suggestion-container-rad > .search-suggestion {\n    width: -webkit-max-content !important;\n    width: -moz-max-content !important;\n    width: max-content !important;\n    padding-bottom: 0px !important;\n    padding-left: 10px !important;\n    padding-right: 10px !important;\n}\n\n.rad-page-footer {\n    height: 50px;\n    border-top: 1px solid var(--borderColor);\n}\n\n.overlay.radical > .clickable {\n    padding-left: 5px;\n}\n\n.overlay.radical > .x-button {\n    margin: 1px 5px 5px 5px;\n}\n\n.rad-page-toggle {\n    display: flex;\n    flex-direction: row;\n    height: 28px;\n    margin-top: 10px;\n    margin-bottom: -2px;\n    cursor: default;\n}\n\n.rad-page-toggle:hover {\n    cursor: pointer;\n}\n\n.rad-page-toggle > span {\n    margin-left: 5px;\n    margin-right: 5px;\n    padding-bottom: 1px;\n    font-size: 18px;\n    width: 45px;\n    text-align: center;\n    cursor: pointer;\n}\n\n#r-tc {\n    display: none;\n    text-align: center;\n    text-align: -webkit-center;\n}\n\n#r-tc.show {\n    display: block;\n}\n\n#r-tc > .searchSvg {\n    background-color: var(--searchTextColor);\n    width: 15px;\n    height: 15px;\n    margin-top: 4px;\n    margin-left: auto;\n    margin-right: auto;\n}\n\n.rad-page-toggle > span:first-child {\n    margin-left: 20px;\n}\n\n.rad-page-toggle > span.disabled {\n    color: var(--disabledColor);\n}\n\n.rad-page-toggle > span.highlighted {\n    color: var(--primaryColor);\n}\n\n.rad-page-toggle span.selected {\n    color: var(--primaryColor);\n    border-color: var(--primaryColor);\n    border: 2px solid var(--borderColor);\n    border-bottom: unset;\n    background: var(--background);\n    border-radius: 6px 6px 0px 0px;\n}\n\n.rad-kanji-wrapper {\n    border: 2px solid var(--borderColor);\n    margin: 0px -2px -2px -2px;\n    border-radius: 15px;\n    overflow-x: auto;\n}\n\n.rad-kanji-title {\n    font-size: 17px;\n    color: var(--searchTextColor);\n    margin: 5px;\n}\n\n.rad-wrapper {\n    background-color: var(--background);\n}\n\n.kanji-wrapper {\n    background-color: var(--background);\n    margin-top: 10px;\n    margin-bottom: -10px;\n    border-top: 1px solid var(--borderColor);\n    border-top-width: 1px;\n    width: 100%;\n}\n\n.rad-picker {\n    overflow-y: scroll;\n    margin-bottom: -10px;\n    padding: 5px;\n    height: 86px;\n    scrollbar-width: none;\n}\n\n.rad-picker::-webkit-scrollbar {\n    width: 0px;\n}\n\n.rad-btn {\n    display: inline-block;\n    border-radius: 2px;\n    font-size: 24px;\n    text-align: center;\n    text-align: -webkit-center;\n    margin: 1px;\n    height: 36px;\n    width: 36px;\n    padding: 2px 4px;\n    line-height: 1.4;\n}\n\n.rad-btn.picker.selected {\n    color: var(--secondaryTextColor);\n    background-color: var(--bgPrimaryColor);\n    border-radius: 5px;\n}\n\n.rad-btn.picker.disabled {\n    color: var(--lineColor);\n}\n\n.rad-btn.picker.disabled:hover {\n    cursor: unset;\n}\n\n.rad-btn.picker {\n    background-color: none;\n}\n\n.rad-btn.num {\n    color: var(--secondaryTextColor);\n    background: none;\n    min-width: 32px;\n    line-height: 36px;\n    font-weight: bold;\n    font-size: 20px;\n    padding: 0 5px;\n}\n\n.rad-btn.num {\n    background-color: var(--borderColor);\n} \n \n.rad-btn:hover:not(.num) {\n    cursor: pointer;\n}\n\n.kanji-search-wrapper > .searchSvg{\n    display: inline-block;\n    margin: 15px 0px 0px 18px;\n    width: 20px;\n    height: 20px;\n    background-color: var(--tagColor);\n}\n\n.kanji-search-wrapper > .btn-search{\n    position: absolute;\n    height: 30px;\n    width: 100px;\n    right: 20px;\n    margin-top: -29px;\n    border-radius: 15px;\n}\n\n#kanji-search {\n    background-color: var(--searchBackground);\n    border: none;\n    border-bottom: 2px solid var(--backgroundShadow);\n    color: var(--searchTextColor);\n    position: absolute;\n    padding-left: 40px;\n    margin-top: 8px;\n    margin-left: 10px;\n    height: 35px;\n    width: 70%;\n    max-width: calc(100% - 175px);\n}\n\n.undoSvg {\n    position: absolute;\n    scale: 1.2;\n    left: calc(70% + 20px);\n    margin-top: 18px;\n}\n\n#kanji-search:focus-visible, #kanji-search:focus {\n    outline: unset;\n    border-bottom: 2px solid var(--tagColor);\n}\n\n/* ---------- Scrollbar changes ---------- */\n\n/* Firefox */\n.rad-results, .rad-picker, .overlay.radical, .rad-page-toggle {\n    scrollbar-width: none;\n}\n\n/* Literally everyone else */\n.rad-results::-webkit-scrollbar, .rad-picker::-webkit-scrollbar, .overlay.radical::-webkit-scrollbar, .rad-page-toggle::-webkit-scrollbar {\n    width: 0px;\n    height: 0px;\n}\n\n/* Mobile Adjustments */\n@media only screen and (max-width: 600px) {\n    \n    body {\n        min-height: 650px;\n    }\n\n    .rad-results {\n        max-height: 20vh !important;\n    }\n\n    .rad-page-toggle {\n        height: 30px;\n        overflow-x: auto;\n        white-space: nowrap;\n        max-width: 90%;\n    }\n\n    .rad-page-toggle > span:first-child {\n        margin-left: 10px;\n        margin-right: 0px;\n    }\n\n    .rad-page-toggle > span:not(:first-child) {\n        width: -webkit-max-content;\n        width: -moz-max-content;\n        width: max-content;\n        padding: 0px 2px 0px 2px;\n    }\n\n    .rad-page-toggle > span {\n        margin-left: 2%;\n        margin-right: 2%;\n    }\n\n    .undoSvg {\n        scale: 1;\n        right: 132px;\n        left: unset;\n        margin-top: 16px;\n    }\n}"
  },
  {
    "path": "html/assets/css/overlay/settingsOverlay.css",
    "content": "#settingsModal .modal-body {\n    height: 600px;\n    margin: 0;\n    padding: 0;\n}\n\n#settingsModal .choices__list--dropdown .choices__list {\n    max-height: 350px;\n    overflow-y: scroll;\n}\n\n#settingsModal .close {\n    margin: -1rem -1rem -1rem auto;\n    color: var(--secondaryTextColor);\n}\n\n#settingsModal .close:hover {\n    color: #a7a7a7;\n}\n\n#settingsModal .choices:after {\n    right: 15px;\n    margin-left: unset;\n}\n\n#show_anim_speed_settings_slider {\n    font-size: 14px;\n}\n\n.mdl-layout__header-row {\n    padding: 0 40px 0 25px !important;\n}\n\n.mdl-layout__tab {\n    padding: 0 24px !important;\n}\n\n.mdl-layout__tab-bar {\n    width: calc(100% - 56px);\n}\n\n.mdl-layout__header, .mdl-layout__tab-bar {\n    background-color: var(--headerColor);\n}\n\n.mdl-layout__tab-bar-button {\n    background-color: var(--headerScrollBar);\n}\n\n.mdl-layout.is-upgraded .mdl-layout__tab.is-active::after {\n    background-color: var(--bgPrimaryColor);\n}\n\n.page-content {\n    margin: 15px;\n}\n\n.settings-entry {\n    display: flex;\n    flex-direction: row;\n}\n\n.settings-entry.ex {\n    font-size: 0.8em;\n}\n\n.settings-entry.txt-input {\n    margin-top: 5px !important;\n    margin-bottom: -15px !important;\n}\n\n.settings-entry:not(:first-child, .no-gap)  {\n    margin-top: 10px;\n}\n\n.inner-header {\n    font-weight: bold;\n    text-decoration: underline;\n}\n\n.inner-header:first-child {\n    margin-bottom: 5px;\n}\n\n.inner-title {\n    width: 60%;\n    font-size: 15px;\n    align-self: center;\n}\n\n.inner-header:not(:first-child) {\n    margin-bottom: -5px;\n    margin-top: 10px;\n}\n\n.inner-title.display {\n    width: 72%;\n}\n\n.inner-title.txt-input {\n    margin-top: -10px;\n}\n\n.settings-entry.sub > .inner-title {\n    width: 55%;\n    margin-left: 5%;\n}\n\n.settings-entry.sub > .inner-title::before {\n    content: \"↪\";\n    margin-left: -5px;\n    margin-right: 5px;\n}\n\n.mdl-checkbox {\n    width: 0%;\n}\n\n.mdl-textfield.mdl-js-textfield.is-upgraded {\n    width: 65%;\n    margin: -20px 0;\n    padding-right: 65px;\n}\n\n.mdl-textfield__input {\n    text-align: center;\n}\n\n.mdl-textfield.is-focused .mdl-textfield__label:after {\n    width: calc(100% - 65px);\n}\n\n.mdl-textfield__error {\n    width: 150%;\n    margin-left: -50px;\n}\n\n:root.dark .mdl-textfield__label {\n    color: var(--borderColor);\n}\n\n.mdl-textfield__label:after {\n    background-color: var(--bgPrimaryColor);\n}\n\n#show_anim_speed_settings {\n    margin-top: 10px;\n}\n\n.mdl-textfield__label:after {\n    left: 0px;\n}\n\n.mdl-checkbox.is-checked .mdl-checkbox__box-outline {\n    border-color: var(--primaryColor);\n}\n\n.mdl-checkbox.is-checked .mdl-checkbox__tick-outline {\n    background-color: var(--primaryColor);\n}\n\n:root.dark .mdl-checkbox__box-outline {\n    border-color: var(--borderColor);\n}\n\n:root.dark .mdl-textfield__input {\n    border-color: var(--itemBG);\n}\n\n@media only screen and (max-width: 600px) {\n\t\n    .inner-title {\n        width: 70%;\n    }\n\n    .settings-entry.sub > .inner-title {\n        width: 65%;\n    }\n\n    .inner-title.big {\n        width: 80%;\n    }\n\n    .inner-title.display {\n        width: 76%;\n    }\n\n    .mdl-textfield.mdl-js-textfield {\n        padding-right: 0px !important;\n        left: 7px !important;\n    }\n\n    .mdl-textfield.is-focused .mdl-textfield__label:after {\n        width: 100%;\n    }\n\n    .mdl-textfield__label:after {\n        left: 45%;\n    }\n\n    .slidercontainer.settings {\n        text-align: center;\n    }\n}"
  },
  {
    "path": "html/assets/css/overlay/suggestionOverlay.css",
    "content": "\n#search::-moz-selection {\n    color: var(--primaryTextColor);\n    background-color: var(--secondaryColor);\n}\n\n#search::selection {\n    color: var(--primaryTextColor);\n    background-color: var(--secondaryColor);\n}\n\n.overlay.suggestion {\n    border: unset;\n    width: 76%;\n    margin-left: 17.5%;\n}\n\n.search-suggestion {\n    padding-left: 30px;\n    padding-bottom: 5px;\n    font-size: 16px;\n    cursor: pointer;\n}\n\n.search-suggestion:first-child {\n    padding-top: 5px;\n}\n\n.search-suggestion.selected, .search-suggestion:hover {\n    background: var(--lineColor);\n    font-weight: bold;\n}\n\n.secondary-suggestion {\n    color: var(--tagColor);\n    position: absolute;\n    font-size: 14px;\n    padding-left: 10px;\n    margin-top: 1px;\n}\n\n#shadow-text {\n    position: absolute;\n    pointer-events: none;\n    height: 100%;\n    background: transparent;\n    border: 0;\n    display: block;\n    width: 88%;\n    padding: 13px 32px;\n    font-size: 16px;\n    color: var(--searchTextColor);\n    z-index: 4;\n    opacity: 0.4;\n    overflow: hidden;\n}"
  },
  {
    "path": "html/assets/css/page/aboutPage.css",
    "content": "article {\n    padding-left: 10px;\n}\n\n.about-title {\n    font-size: 22px;\n    font-weight: bold;\n    color: var(--primaryColor); \n}\n\n.joto-wizard {\n    width: 60px;\n    position: relative;\n    margin-top: -37px;\n    margin-left: 10px;\n}\n"
  },
  {
    "path": "html/assets/css/page/errorPage.css",
    "content": "body {\n\tbackground: #f2f1f0;\n\tcolor: #222222;\n\tfont-family: 'Open Sans', sans-serif;\n\tmax-height: 700px;\n\toverflow: hidden;\n}\n\n.err-parent {\n\ttext-align: center;\n\ttext-align: -webkit-center;\n\tdisplay: block;\n\tposition: relative;\n\twidth: 80%;\n\tmargin: 100px auto;\n}\n\n.err-parent > *:not(:first-child) {\n\tmargin-bottom: 10px;\n}\n\n.err-code {\n\tfont-size: 220px;\n\tposition: relative;\n\tdisplay: inline-block;\n\tz-index: 2;\n\theight: 250px;\n\tletter-spacing: 15px;\n}\n\n.txt-primary {\n\ttext-align: center;\n\ttext-align: -webkit-center;\n\tdisplay: block;\n\tposition: relative;\n    padding-bottom: 5px;\n\tletter-spacing: 8px;\n\tfont-size: 3em;\n\tline-height: 80%;\n    text-transform: uppercase;\n}\n\n.txt-secondary {\n\ttext-align: center;\n\ttext-align: -webkit-center;\n\tdisplay: block;\n\tposition: relative;\n\tfont-size: 20px;\n    text-transform: uppercase;\n    padding-bottom: 5px;\n}\n\n.back-btn {\n\tbackground-color: #50c058;\n\tposition: relative;\n\tdisplay: inline-block;\n\twidth: 358px;\n\tpadding: 5px;\n\tz-index: 5;\n\tfont-size: 25px;\n\tmargin: 0 auto;\n\tcolor: white;\n\ttext-decoration: none;\n}\n\n.issue-btn {\n\tposition: relative;\n\tdisplay: inline-block;\n\twidth: 308px;\n\tpadding: 5px;\n\tz-index: 5;\n\tfont-size: 25px;\n\tmargin: 0 auto;\n\tcolor: white;\n\ttext-decoration: none;\n}\n\n.git-logo {\n\tfill: black;\n}\n\na:hover {\n    text-decoration: none;\n    color: white;\n}\n\nhr {\n\tpadding: 0;\n\tborder: none;\n\tborder-top: 5px solid #50c058;\n\tcolor: #50c058;\n\ttext-align: center;\n\ttext-align: -webkit-center;\n\tmargin: 0px auto;\n\twidth: 420px;\n\theight: 10px;\n\tz-index: -10;\n}\n\n/* Mobile adjustments */\n@media only screen and (max-width: 600px) {\n\t\n\t.err-code {\n\t\tfont-size: 150px;\n\t\theight: 150px;\n\t\tmargin-left: 1vw;\n\t}\n\n\t.txt-primary {\n\t\tfont-size: 2.5em;\n\t}\n\n\t.back-btn, hr {\n\t\twidth: 100%;\n\t}\n\n\t.git-logo {\n\t\twidth: 100%;\n\t}\n}\n\n/* Mobile adjustments */\n@media only screen and (max-width: 300px) {\n\t\n\t.err-code {\n\t\tfont-size: 120px;\n\t\tmargin-top: -30px;\n\t}\n}"
  },
  {
    "path": "html/assets/css/page/footer.css",
    "content": "footer {\n    margin: auto;\n    width: 50%;\n    padding-top: 50px;\n    padding-bottom: 15px;\n    max-width: 1150px;\n    width: 100%;\n    height: -webkit-max-content;\n    height: -moz-max-content;\n    height: max-content;\n}\n\n.ref-row {\n    display: flex;\n    place-content: center;\n    gap: 2em;\n}\n\n.ref-row > .discordSvg, .ref-row > .githubSvg , .ref-row .donationSvg {\n    background-color: var(--searchTextColor);\n}\n\n:root.dark .ref-row > .discordSvg, :root.dark .ref-row > .githubSvg, :root.dark .ref-row .donationSvg {\n    background-color: var(--tagColor);\n}\n\n.ref-row > .donation {\n    position: relative;\n}\n\n.ref-row > .donation > .tooltip {\n    position: absolute;\n    visibility: hidden;\n    width: max-content;\n    max-width: 33vw;\n    text-align: center;\n    border-radius: 6px;\n    padding: 0.5em;\n    z-index: 1;\n    color: #fff;\n    background-color: var(--bgPrimaryColor);\n    box-shadow: 0 0 10px 8px var(--backgroundShadow);\n}\n\n.ref-row > .donation:hover .tooltip {\n    visibility: visible;\n}\n\n.footer-hr {\n    border-top: 1px solid var(--primaryColor);\n    padding-bottom: 10px;\n}\n\n.footer-hr:before {\n    content: '';\n    border-radius: 100%;\n    position: absolute;\n    height: 20px;\n    width: 120px;\n    background: var(--background);\n    margin: -10px;\n    /* left: calc(50% - 50px); */\n    margin-left: -60px;\n    box-shadow: inherit\n}\n\n.footer-hr:after {\n    content: '';\n    border-radius: 100%;\n    position: absolute;\n    height: 10px;\n    width: 10px;\n    background: var(--bgPrimaryColor);\n    margin: -5px;\n    box-shadow: inherit\n}"
  },
  {
    "path": "html/assets/css/page/helpPage.css",
    "content": ".help-joto {\n    width: 100px;\n    position: relative;\n    margin-top: -320px;\n    margin-left: 530px;\n}\n\ndiv > .fat:not(:first-child) {\n    margin-top: 1rem;\n}\n\narticle .fat {\n    margin-top: 1rem;\n}"
  },
  {
    "path": "html/assets/css/page/indexPage.css",
    "content": "/* ----------------- Index Page ----------------- */\n\n.title {\n    padding-top: 5%;\n    padding-bottom: 1.25%;\n    margin: auto;\n    width: 30%;\n}\n\n.titleImg {\n    width: 30vw;\n}\n\nform {\n    -webkit-margin-after: 1em;\n            margin-block-end: 1em;\n}\n\n#searchDiv {\n    max-width: 1000px;\n}\n\n.overlay {\n    margin-top: -14px;\n}\n\n.overlay.suggestion {\n    width: 81%;\n    margin-left: 17.5%;\n}\n\n.x-button {\n    margin-right: 5px;\n}\n\n.index-btn-container {\n    display: flex;\n    place-content: center;\n}\n\n.settingsBtn, .infoBtn, .notificationBtn {\n    position: absolute !important;\n    cursor: pointer;\n}\n\n.settingsBtn {\n    top: 20px;\n    left: 20px;\n}\n\n.notificationBtn {\n    top: 29px;\n    right: 25px;\n}\n\n.notificationPoint {\n    display: none;\n    position: absolute;\n    pointer-events: none;\n    top: 32px;\n    right: 30px;\n    padding: 0.25rem;\n    border-radius: 2rem;\n    z-index: 500;\n    width: 9px;\n    height: 9px;\n    background-color: var(--alert);\n}\n\n.notificationBtn.update ~ .notificationPoint {\n    display: block;\n}\n\n.infoBtn {\n    top: 29px;\n    right: 60px;\n}\n\n.notificationBtn.update, .infoBtn.new {\n    background-color: var(--primaryColor);\n}\n\n.inner-form {\n    border-radius: 20px !important;\n}\n\n.input-field.third-wrap {\n    width: 120px;\n    height: 45px;\n    margin: 11px 4px;\n}\n\n.input-field.third-wrap.rad {\n    width: 180px;\n    height: 45px;\n    margin: 11px 4px;\n}\n\n.btn-search {\n    color: white !important;\n}\n\n.input-field.third-wrap.rad > .btn-search {\n    background: var(--secondaryColor);\n}\n\n.rad-picker-icon {\n    color: white;\n    font-size: 22px;\n    margin-left: 5px;\n}\n\n.searchDivInner form .inner-form .input-field input {\n    width: 96%;\n}\n\n.searchDivInner form .inner-form .input-field.first-wrap {\n    display: unset !important;\n}\n  \n/* Mobile View */\n@media only screen and (max-width: 600px) {\n\n    @-webkit-keyframes dropdownAnim {\n        from {height: 0px;}\n        to { height: 360%;}\n    }\n\n    @keyframes dropdownAnim {\n        from {height: 0px;}\n        to { height: 360%;}\n    }   \n\n\t.title {\n        padding-top: 15%;\n        width: 72%;\n    }\n    \n    .titleImg {\n        width: 70vw;\n    }\n\n    .choices.main[data-type*=\"select-one\"]:after {\n        border-color: var(--secondaryTextColor) transparent transparent transparent;\n    }\n\n    .choices.main[data-type*=\"select-one\"].is-open:after {\n        border-color: transparent transparent var(--secondaryTextColor) transparent !important;\n    }\n\n    .inner-form > .index-btn-container {\n        position: absolute;\n        top: 75px;\n        width: 50%;\n        place-content: flex-start;\n        z-index: -1;\n    }\n\n    .choices__item.index {\n        color: white !important;\n    }\n\n    .searchDivInner form .inner-form .input-field.first-wrap .choices__inner .choices__list--single .choices__item {\n        display: unset !important;\n        margin-top: 4px;\n    }\n\n    .input-field.first-wrap {\n        background: var(--bgPrimaryColor);\n        border-radius: 3px;\n        height: 45px !important;\n        width: 100% !important;\n        margin-left: 10px;\n    }\n\n    .overlay {\n        z-index: 1;\n    }\n\n    .overlay.suggestion {\n        width: 100%;\n        margin-left: 0%;\n    }\n\n    #suggestion-container {\n        margin: 5px -5px;\n    }\n    \n    .rad-picker {\n        max-height: 44vh;\n    }\n\n    #settingsModal {\n        display: none;\n    }\n\n    main > .index-btn-container {\n        flex-direction: row-reverse;\n        place-content: flex-start;\n    }\n\n    .settingsBtn {\n        display: unset !important;\n        left: 10px !important;\n    }\n\n    .btn-container {\n        display: block !important;\n    }\n\n    .infoBtn, .notificationBtn {\n        display: unset !important;\n    }\n\n    .infoBtn {\n        top: 20px;\n        right: 50px;\n    }\n\n    .notificationBtn {\n        top: 20px;\n        right: 15px;\n    }\n\n    .notificationPoint {\n        top: 20px;\n        right: 15px;\n    }\n\n    .input-field.third-wrap:not(.rad) {\n        display: none;\n    }\n    \n    .input-field.third-wrap.rad {\n        width: -webkit-max-content;\n        width: -moz-max-content;\n        width: max-content;\n        margin-top: 9px;\n        padding-right: 15px;\n    }\n\n    .btn-search {\n        border-radius: 3px;\n        padding-left: 10px;\n        padding-right: 10px;\n    }\n\n    .mobile-nav-btn {\n        display: none;\n    }\n\n    footer > div > span {\n        font-size: 15px;\n    }\n\n}"
  },
  {
    "path": "html/assets/css/page/infoPage.css",
    "content": ".help-cat {\n    padding-top: 25px;\n}\n\n.table {\n    display: flex;\n    flex-direction: column;\n    margin-left: 25px;\n}\n\n.row {\n    display: flex;\n    flex-direction: row;\n}\n\n.row span:first-child {\n    color: var(--primaryColor);\n    flex: 0 0 150px;\n}\n\narticle {\n    max-width: 700px;\n}"
  },
  {
    "path": "html/assets/css/page/kanjiPage.css",
    "content": ".kanji-entry.left.detail {\n    width: 155px;\n    padding-left: 25px;\n}\n\n.kanji-entry.right.detail {\n    overflow-x: hidden;\n    padding-left: 35px;\n    width: 100%;\n    max-width: 1000px;\n}\n\n.kanji-preview.x-large {\n    cursor: pointer;\n    font-size: 100px;\n}\n\n.kanji-preview-info {\n    padding-left: 5px;\n}\n\n.kanji-preview-right {\n    max-width: 350px;\n    text-align: right;\n}\n\n.translation.big {\n    font-size: 25px;\n}\n\n.kanji-preview-left {\n    width: 63%;\n    padding-right: 10%;\n}\n\n.kun-reading {\n    padding-left: 10px;\n}\n\n.kun-reading,\n.on-reading {\n    width: 100%;\n}\n\n.speed-tag {\n    gap: 1em;\n    align-items: center;\n}\n\n.kanji-img {\n    position: absolute;\n    pointer-events: none;\n    margin-bottom: -130px;\n    opacity: 0;\n}\n\n.animation-container {\n    padding: 1em 0 1.5em 0 !important;\n    place-content: center;\n}\n\n.animation-controller {\n    margin: 0 1em -1em 0;\n    gap: 0.5em;\n    width: 35%;\n}\n\n.animation-controller .slider {\n    width: 100%;\n}\n\n.animation-group > .l {\n    border-top-left-radius: 15px;\n    border-bottom-left-radius: 15px;\n}\n\n.animation-group > .m {\n    margin-left: 2px;\n    margin-right: 2px;\n    width: 60%;\n}\n\n.animation-group > .m > span {\n    pointer-events: none;\n}\n\n.animation-group > .r {\n    border-top-right-radius: 15px;\n    border-bottom-right-radius: 15px;\n}\n\n.animation-group > button {\n    border: 0px;\n    background: var(--bgPrimaryColor);\n    color: white;\n    width: 20%;\n    height: 2em;\n    line-height: 2;\n}\n\n.reset-btn {\n    width: 24px;\n    position: relative;\n    left: 95px;\n    top: -25px;\n}\n\n.animation-group > button > img {\n    width: 45%;\n}\n\n.compounds-dropdown-parent {\n    width: 97%;\n}\n\n.compounds-dropdown:after {\n    position: absolute;\n    content: \"\";\n    height: 0;\n    width: 0;\n    border-style: solid;\n    border-width: 5px;\n    border-color: var(--tagColor) transparent transparent transparent;\n    pointer-events: none;\n    transition: linear 0.2s;\n    right: 0;\n    margin-top: -3px;\n}\n\n.compounds-dropdown.closed:after {\n    border-color: transparent transparent var(--tagColor) transparent;\n    margin-top: -8px;\n}\n\n.compounds-click-area {\n    position: absolute;\n    width: 100%;\n    height: 20px;\n    margin-top: -25px;\n}\n\n.anim-container svg {\n    user-select: none;\n}\n\n.anim-container text {\n    font-size: 8px;\n}\n\n.tree-parent {\n    position: relative;\n    cursor: initial;\n    width: 90%;\n    max-width: 1145px;\n}\n\n#tree-toggle {\n    position: absolute;\n    user-select: none;\n    cursor: pointer;\n    right: 1em;\n    top: 1em;\n    height: 26px;\n    width: 26px;\n    zoom: 1.2; \n}\n\n@media only screen and (max-width: 600px) {\n    #tree-toggle {\n        zoom: 1; \n    }\n}\n\n#tree-toggle {\n    background-color: var(--primaryColor) !important;\n    mask: url(\"/assets/svg/ui/graph_filled.svg\") no-repeat center;\n    -webkit-mask: url(\"/assets/svg/ui/graph_filled.svg\") no-repeat center;\n}\n\n#tree-toggle.detailed {\n    mask: url(\"/assets/svg/ui/graph_empty.svg\") no-repeat center;\n    -webkit-mask: url(\"/assets/svg/ui/graph_empty.svg\") no-repeat center;\n}\n\n#tree-target {\n    align-items: center;\n    text-align: center;\n    padding: 5%;\n    box-shadow: 0 4px 8px 0 rgba(0, 0, 0, 0.2), 0 6px 20px 0 rgba(0, 0, 0, 0.19);\n    background-color: var(--overlay);\n}\n\n#tree-target > svg {\n    max-height: 80vh;\n    max-width: 80vw;\n}\n\n#tree-target .link {\n    fill: none;\n    stroke: var(--graphLink);\n    stroke-width: 3px;\n}\n\n#tree-target circle {\n    fill: var(--graphCircle) !important;\n    stroke: var(--graphStroke);\n    stroke-width: 2px;\n}\n\n#tree-target circle.clickable {\n    cursor: pointer;\n}\n\n#tree-target text {\n    fill: var(--graphText);\n    font: 20px sans-serif;\n}\n\n#tree-target path {\n    pointer-events: none;\n    stroke: none;\n    fill-rule: nonzero;\n    fill: var(--graphPath);\n    fill-opacity: 1;\n    transform: translateX(-15px) translateY(-15px) scale(0.03);\n}\n\n/* Kanji dark mode */\n:root.dark .stroke-container > svg > path.active {\n    stroke: rgb(211, 207, 201) !important;\n}\n\n:root.dark .stroke-container > svg > path.not.active {\n    stroke: rgb(105, 105, 105) !important;\n}\n\n:root.dark .stroke-container > svg > line {\n    stroke: rgb(92, 92, 92) !important;\n}\n\n:root.dark .stroke-container > svg > circle {\n    fill: rgb(95, 241, 96) !important;\n    opacity: 0.75 !important;\n}\n\n:root.dark .anim-container > svg path:not(.bg) {\n    stroke: var(--primaryTextColor);\n}\n\n:root.dark .anim-container text {\n    fill: var(--primaryTextColor);\n}\n\n/* Everything lower than max size */\n@media only screen and (max-width: 1150px) {\n    .animation-group > button > img {\n        width: 1.5em;\n    }\n\n    .animation-controller {\n        width: 50%;\n    }\n}\n\n/* Small screens */\n@media only screen and (max-width: 600px) {\n    .compounds-parent {\n        flex-direction: column !important;\n    }\n\n    .compounds-dropdown:after {\n        right: -8px;\n    }\n\n    .kun-reading {\n        padding-left: 0px;\n        padding-top: 20px;\n    }\n\n    .main-container > .d-flex {\n        flex-direction: column;\n    }\n\n    .kanji-entry.left.detail {\n        width: 100%;\n        align-self: center;\n        padding-left: 0px;\n    }\n\n    .kanji-preview.x-large {\n        align-self: center;\n    }\n\n    .translation.big {\n        font-size: 20px;\n        padding-bottom: 10px;\n    }\n\n    .kanji-preview-info {\n        padding-left: 0px;\n        text-align: left;\n    }\n\n    .kanji-entry.right.detail {\n        width: unset;\n        padding-left: 0px;\n    }\n\n    .kanji-entry.right.detail > .kanji-entry {\n        flex-direction: column !important;\n    }\n\n    .kanji-preview-left {\n        padding: 0;\n        width: 100%;\n        text-align: center;\n        text-align: -webkit-center;\n    }\n\n    .rad-parts-parent {\n        display: flex !important;\n        flex-direction: row !important;\n    }\n\n    .notes.stroke {\n        text-align: center;\n        text-align: -webkit-center;\n    }\n\n    .notes.rad {\n        width: 50%;\n    }\n\n    .notes.parts {\n        width: 50%;\n        padding-right: 20px;\n        text-align: right;\n    }\n\n    .kanji-preview-left > .d-flex {\n        padding-left: 10px;\n    }\n\n    .kanji-preview-right {\n        max-width: unset;\n        padding-top: 10px;\n        text-align: left;\n    }\n\n    .notes {\n        padding-left: 10px;\n    }\n\n    .tags.fat {\n        font-size: 15px;\n    }\n\n    .clickable.fat {\n        font-size: 15px;\n    }\n\n    .stroke-container {\n        max-width: 99vw;\n        padding-left: 1vw;\n    }\n\n    .on-reading {\n        padding-top: 10px;\n    }\n\n    .slider {\n        width: 100px;\n        height: 10px;\n    }\n\n    .slider::-webkit-slider-thumb {\n        width: 20px;\n        height: 20px;\n        border-radius: 50%;\n    }\n\n    .slider::-moz-range-thumb {\n        width: 20px;\n        height: 20px;\n        border-radius: 50%;\n    }\n\n    .slider-output {\n        margin-left: -10px;\n    }\n}\n\n/* Very small screens */\n@media only screen and (max-width: 400px) {\n    .animation-group > button > img {\n        width: 1.25em;\n    }\n}\n"
  },
  {
    "path": "html/assets/css/page/multiPage/kana.css",
    "content": ".inline-kana-preview {\n    line-height: 1.1;\n    font-size: xx-large;\n    padding-top: 24px;\n}"
  },
  {
    "path": "html/assets/css/page/multiPage/kanji.css",
    "content": ".kanji-entry {\n    height: -webkit-fit-content;\n    height: -moz-fit-content;\n    height: fit-content;\n    padding-bottom: 10px;\n}\n\n.kanji-entry.right {\n    position: relative;\n    width: 100%;\n}\n\n.kanji-preview {\n    line-height: 1.1;\n    font-size: xx-large;\n}\n\n.translation {\n    font-size: 19px;\n}\n\n.stroke-container {\n    max-width: 80vw;\n    overflow-x: auto;\n}\n\n.animation-container {\n    padding-left: 30%;\n    padding-top: 10px;\n}\n\n.kanji-entry > d-flex.flex-row {\n    flex-flow: row wrap;\n}\n\n.furigana-kanji-container {\n    text-align: center;\n    text-align: -webkit-center;\n    word-spacing: 10px;\n}\n\n.furigana-preview {\n    margin-bottom: 15px;\n}\n\n.draw2 {\n    stroke-dasharray: 1000;\n    stroke-dashoffset: 1000;\n    -webkit-animation: dash2 10s linear forwards;\n            animation: dash2 10s linear forwards;\n}\n\n.kanjisvg:hover {\n    cursor: pointer;\n}\n\n@-webkit-keyframes dash2 {\n    to { stroke-dashoffset: 0;}\n}\n\n@keyframes dash2 {\n    to { stroke-dashoffset: 0;}\n}"
  },
  {
    "path": "html/assets/css/page/multiPage/markdown.css",
    "content": "h1 {\n    font-size: 1.5rem;\n}\n\nh2 {\n    font-size: 1rem;\n}\n\np {\n    font-size: 14px;\n}\n\n.md-center {\n    display: flex;\n    justify-content: center;\n    margin-top: 10px;\n}"
  },
  {
    "path": "html/assets/css/page/namePage.css",
    "content": ".kanji-preview.small {\n    font-size: x-large;\n}"
  },
  {
    "path": "html/assets/css/page/newsPage.css",
    "content": "#news-list {\n    display: flex;\n    flex-direction: column;\n    align-items: center;\n}\n\n.news-container {\n    display: block;\n    width: 80%;\n    margin-top: 2rem;\n    border-radius: 1rem;\n    background-color: rgba(249, 249, 249, 0.75);\n    box-shadow: 0px 8px 20px 0px var(--backgroundShadow); \n}\n\n:root.dark .news-container {\n    background-color: rgba(47, 49, 55, 0.75);\n}\n\n.news-head {\n    background-color: var(--bgPrimaryColor);\n    background: linear-gradient(45deg,var(--bgPrimaryColor),var(--primaryColor));\n    color: var(--secondaryTextColor);\n    width: 100%;\n    height: 50px;\n    border-top-left-radius: 1rem;\n    border-top-right-radius: 1rem;\n    text-align: center;\n    font-size: 25px;\n}\n\n.news-head > span {\n    position: relative;\n    top: 5px;\n}\n\n.news-date {\n    position: relative;\n    text-align: right;\n    margin: 10px 10px -30px;\n    color: var(--tagColor);\n}\n\n@media only screen and (max-width: 600px) {\n    .news-date {\n        margin: 5px 10px -20px;\n    }\n}\n\n.news-body {\n    padding: 1.125rem;\n}"
  },
  {
    "path": "html/assets/css/page/sentencePage.css",
    "content": ".furigana-kanji-container {\n    text-align: left;\n}\n\n.inline-kana-preview.small, .kanji-preview.small, .inline-kana-preview.small {\n    font-size: x-large;\n}\n\n.sentence-translation.original {\n    font-size: 19px;\n    padding-top: 0.2rem;\n}   \n\n.sentence-toggle {\n    width: 100%;\n    margin-top: -10px;\n    margin-bottom: -10px;\n    color: var(--primaryColor);\n    font-family: \"Roboto\";\n    text-align: end;\n    font-size: small;\n    cursor: pointer;\n}\n\n.sentence-share {\n    display: flex;\n    flex-direction: column;\n    gap: 0.5em;\n    width: 100%;\n    font-size: 12px;\n    margin-top: -10px;\n    margin-bottom: -12px;\n    padding-top: 5px;\n    padding-right: 5px;\n    color: var(--primaryColor);\n    -webkit-writing-mode: tb;\n        -ms-writing-mode: tb;\n            writing-mode: tb;\n}\n\n.sentence-share > .searchSvg {\n    cursor: pointer;\n    color: var(--disabledColor);\n    background-color: var(--disabledColor);\n}"
  },
  {
    "path": "html/assets/css/page/wordExtensions/searchAnnotation.css",
    "content": ".search-annotation {\n    color: var(--tagColor);\n    max-height: 150px;\n    margin-bottom: 15px;\n    overflow: auto;\n    display: flex;\n    justify-content: center;\n}\n\n.search-annotation .kanji-preview {\n    line-height: unset;\n}\n\n.search-annotation.no-center {\n    justify-content: unset;\n}\n\n.search-annotation::-webkit-scrollbar {\n    width: 0px;\n}\n\n.search-inflection {\n    padding: 0.5rem 1rem 0.5rem 1rem;\n}\n\n.search-inflection > span > .forms {\n    position: relative;\n    left: 5px;\n    top: 5px;\n}\n\n/* Mobile only */\n@media only screen and (max-width: 600px) {\n    .search-annotation {\n        margin: -15px 0 0 5vw;\n    }\n    \n    .search-inflection {\n        padding: 0.5rem 0rem 0.5rem 0rem;\n        width: 80vw;\n    }\n}"
  },
  {
    "path": "html/assets/css/page/wordExtensions/sentenceReader.css",
    "content": "#sr {\n    justify-content: center;\n}\n\n.sentence-part {\n    color: var(--primaryTextColor) !important;\n    border-bottom: 2px solid var(--lineColor);\n    line-height: 1.1;\n    margin-right: 15px;\n    margin-top: 9px;\n    flex-grow: 0;\n    flex-shrink: 0;\n    overflow: hidden;\n    cursor: pointer;\n}\n\n.sentence-part.selected {\n    border-bottom: 2px solid var(--primaryColor);\n}\n\n.sentence-part:hover {\n    text-decoration: unset;\n    text-shadow: -1px -1px var(--primaryTextColor);\n    color: var(--primaryTextColor) !important; \n}\n\n.sentence-part.inline-kana-preview {\n    margin-top: 19px;\n}\n\n.sentence-part.symbol {\n    border-bottom: 0px;\n    margin-right: 0px;\n    margin-left: 0px;\n}\n\n.sentence-part:not(.symbol) + .symbol {\n    margin-left: -15px;\n    margin-right: 10px;\n}\n\n.sentence-part.particle {\n    color: var(--primaryColor) !important;\n}\n\n.sentence-part.particle:hover {\n    text-shadow: -1px -1px var(--primaryColor);\n}\n\n.sentence-part:empty {\n    padding-top: 19px;\n}\n\n.sentence-part > .furigana-kanji-container {\n    margin-top: 24px;\n}\n\n.sentence-part > .inline-kana-preview {\n    margin-top: 9px;\n}\n\n@media only screen and (max-width: 600px) { \n\n    #sr {\n        scrollbar-width: none;\n        flex-wrap: nowrap;\n    }\n\n    #sr::-webkit-scrollbar {\n        width: 0px;\n    }\n\n    .sentence-part {\n        min-width: -webkit-max-content;\n        min-width: -moz-max-content;\n        min-width: max-content;\n        margin-right: 20px;\n    }\n\n}"
  },
  {
    "path": "html/assets/css/page/wordPage.css",
    "content": ".title-div {\n    text-align: center;\n    text-align: -webkit-center;\n    width: 80%;\n}\n\n.title-div h1 {\n    font-weight: bold;\n    font-size: xx-large;\n}\n\n.title-div h4 {\n    font-size: medium;\n}\n\n.main-tab-select {\n    padding-top: 25px;\n    width: 100%;\n}\n\n.main-tab-select.l {\n    padding-right: 10px;\n    display: flex;\n    justify-content: flex-start;\n}\n\n.main-tab-select.r {\n    padding-left: 10px;\n    display: flex;\n    justify-content: flex-end;\n}\n\n.main-tab-select h2 {\n    font-size: medium;\n    text-decoration: underline;\n    color: var(--primaryColor);\n}\n\n.tab-btn {\n    cursor: pointer;\n    width: -webkit-max-content;\n    width: -moz-max-content;\n    width: max-content;\n}\n\n.entry-min-height-1 {\n    min-height: 90px;\n}\n\n.entry-min-height-2 {\n    min-height: 180px;\n}\n\n.main-info > .d-flex.flex-row {\n    padding-left: 1vw;\n}\n\n.definition-wrapper {\n    max-width: 93%;\n}\n\n/* Example Sentences */\n.example-sentence {\n    padding-top: 5px;\n    margin-bottom: 5px !important;\n    max-width: 90%;\n    overflow-y: hidden;\n}\n\n.example-sentence.collapsed {\n    max-height: 35px;\n}\n\n.example-sentence .wrap div {\n    margin-top: -5px;\n}\n\n.expander {\n    height: 0;\n    width: 0;\n    margin-top: 24px;\n    margin-left: 5px;\n    border-style: solid;\n    border-width: 5px;\n    border-color: var(--tagColor) transparent transparent transparent;\n}\n\n.expander.on {\n    transform: rotate(180deg);\n    transform-origin: top center;\n    margin-top: 29px;\n}\n\n.expander:hover {\n    cursor: pointer;\n}\n\n.tags .kanji-preview {\n    font-size: 15px;\n}\n\n.furigana-preview {\n    font-size: inherit;\n}\n\n.tags .furigana-preview {\n    font-size: 10px;\n    margin-bottom: 5px;\n    margin-top: 2px;\n}\n\n.tags .inline-kana-preview {\n    font-size: 15px;\n    padding-top: 18px;\n}\n\n/* Kanji Stuff Overwrite */\n\n.kanji-entry.left.fixed {\n    min-width: 155px;\n    max-width: 155px;\n    justify-content: center;\n    align-items: center;\n}\n\n.kanji-preview.large {\n    font-size: 50px;\n    padding-top: 15px;\n}\n\n.kanji-preview.large:hover {\n    text-decoration: none;\n} \n\n.kanji-entry {\n    padding-bottom: 5px;\n}\n\n/* 3-dot Menu */\n\n.dot-menu {\n    width: 100%;\n}\n\n.mdl-menu__item[disabled] + .mdl-menu__item[disabled], .mdl-menu__item:last-child[disabled], .mdl-menu__item:first-child[disabled] {\n    display: none;\n}\n\n.mdl-menu__item[disabled] {\n    height: 10px;\n}\n\n.mdl-menu__item[disabled] > hr {\n    margin: 0px;\n    margin-top: 5px;\n}\n\n#info-dropdown {\n    background-color: var(--background);\n    width: -webkit-max-content;\n    width: -moz-max-content;\n    width: max-content;\n    cursor: pointer;\n}\n\n.info-entry {\n    padding-left: 5px;\n    padding-right: 5px;\n    font-size: 18px;\n    line-height: 1.4;\n    cursor: pointer;\n    text-align: center;\n    display: flex;\n    flex-direction: row;\n    align-items: center;\n    width: auto;\n}\n\n.info-entry:hover {\n    background-color: var(--lineColor);\n}\n\n.info-entry > * {\n    margin: 0 5px;\n}\n\n.info-entry > div {\n    margin-top: 1px;\n}\n\n.info-entry a {\n    vertical-align: middle;\n}\n\n.info-entry > .extra {\n    border-left: 1px solid var(--lineColor);\n    padding-left: 7px;\n    margin-left: 1px;\n}\n\n.info-entry .copySvg {\n    pointer-events: all;\n}\n\n.word-tooltip {\n    position: absolute;\n    cursor: pointer;\n    top: 0px;\n    right: 10px;\n}\n\n.word-tooltip > span {\n    margin-bottom: -20px;\n}\n\n.mdl-menu__container {\n    margin-right: 10px;\n}\n\n/* -------- Words Column -------- */\n\n.word-frequency {\n    margin-top: 10px;\n    width: 100px;\n    clear: right;\n    margin: 4px 0 8px 0;\n    padding: 2px 5px 3px 5px;\n    font-size: 10px;\n    -webkit-font-smoothing: antialiased;\n    background-color: var(--secondaryColor);\n    border-radius: 3px;\n    color: var(--secondaryTextColor);\n    font-weight: bold;\n    text-align: center;\n    text-align: -webkit-center;\n}\n\n.word-frequency.common {\n    background-color: var(--bgPrimaryColor);\n}\n\n.kanji-entry .list-entry + .list-entry {\n    padding-top: 5px;\n}\n\n\n/* Pitch Accent Borders */\n.pitch {\n    border-radius: 0px;\n    margin-right: -4px;\n    font-size: large;\n    color: var(--primaryColor);\n}\n\n.pitch.t {\n    border-top: 1px solid var(--tagColor);\n    padding-left: 5px;\n    padding-right: 5px;\n}\n\n.pitch.r {\n    border-right: 1px solid var(--tagColor);\n    margin-right: -6px;\n}\n\n.pitch.b {\n    border-bottom: 1px solid var(--tagColor);\n}\n\n.pitch.b:not(.r) {\n    padding-left: 3px;\n}\n\n/* Info Overlay */\n\n.table.conjugation, .table.collocation {\n    width: 80%;\n    margin-left: 10%;\n}\n\n.table.collocation tr:first-child > th {\n    border-top: 2px solid var(--lineColor) !important;\n}\n\n.table.collocation th, .table.collocation td {\n    width: 50%;\n}\n\n.table {\n    color: var(--primaryTextColor) !important;\n}\n\nthead > tr > th {\n    color: var(--secondaryTextColor) !important;\n    background-color: var(--bgPrimaryColor) !important;\n}\n\ntable th {\n    padding: 0.75rem;\n    border-color: var(--searchTextColor) !important;\n    border-bottom: 2px solid var(--lineColor) !important;\n    border-top: 1px solid var(--lineColor) !important;\n}\n\ntd {\n    border-top: 2px solid var(--lineColor) !important;\n}\n\ntable tr:last-child > td {\n    border-bottom: 2px solid var(--lineColor) !important;\n}\n\n\n/* -------- Kanji Column -------- */\n\n.translation.kanji {\n    padding-top: 10px;\n    padding-left: 10px;\n}"
  },
  {
    "path": "html/assets/css/search/choices.css",
    "content": "/*\n    Used by the search bar only. So much css for a damn text field..\n*/\n\n.choices {\n  position: relative;\n  margin-bottom: 24px;\n  font-size: 16px;\n}\n\n.choices:focus {\n  outline: none;\n}\n\n.choices:last-child {\n  margin-bottom: 0;\n}\n\n.choices__item--choice:hover {\n  color: var(--primaryColor) !important;\n}\n\n.choices[data-type*=\"select-one\"] {\n  cursor: pointer;\n}\n\n.choices[data-type*=\"select-one\"] .choices__inner {\n  padding-bottom: 7.5px;\n}\n\n.choices[data-type*=\"select-one\"]:after {\n  content: \"\";\n  height: 0;\n  width: 0;\n  border-style: solid;\n  border-width: 5px;\n  border-color: var(--tagColor) transparent transparent transparent;\n  position: absolute;\n  right: 30px;\n  top: 50%;\n  margin-top: -2.5px;\n  pointer-events: none;\n}\n\n.choices[data-type*=\"select-one\"].is-open:after {\n  border-color: transparent transparent var(--tagColor) transparent !important;\n  margin-top: -7.5px;\n}\n\n.choices__inner {\n  display: inline-block;\n  vertical-align: top;\n  width: 100%;\n  padding: 7.5px 7.5px 3.75px;\n  border-radius: 2.5px;\n  font-size: 14px;\n  min-height: 44px;\n  overflow: hidden;\n}\n\n.is-open .choices__inner {\n  border-radius: 2.5px 2.5px 0 0;\n}\n\n.choices__list {\n  margin: 0;\n  padding-left: 0;\n  list-style: none;\n}\n\n.choices__list--single {\n  display: inline-block;\n  padding: 4px 16px 4px 4px;\n  width: 100%;\n}\n\n.choices__list--single .choices__item {\n  width: 100%;\n}\n\n.choices__list--dropdown {\n  display: none;\n  z-index: 1;\n  position: absolute;\n  width: 100%;\n  background-color: var(--searchBackground);\n  border: 1px solid var(--searchBackground);\n  top: 100%;\n  margin-top: -1px;\n  border-bottom-left-radius: 2.5px;\n  border-bottom-right-radius: 2.5px;\n  overflow: hidden;\n  word-break: break-all;\n}\n\n.choices__list--dropdown.is-active {\n  display: block;\n}\n\n.choices__list--dropdown .choices__list {\n  position: relative;\n  max-height: 300px;\n  overflow: auto;\n  overflow: -moz-hidden-unscrollable;\n  -webkit-overflow-scrolling: touch;\n  will-change: scroll-position;\n}\n\n.choices__list--dropdown .choices__item {\n  position: relative;\n  padding: 10px;\n  font-size: 14px;\n}\n\n@media (min-width: 640px) {\n  .choices__list--dropdown .choices__item--selectable {\n    padding-right: 100px;\n  }\n  .choices__list--dropdown .choices__item--selectable:after {\n    content: attr(data-select-text);\n    font-size: 12px;\n    opacity: 0;\n    position: absolute;\n    right: 10px;\n    top: 50%;\n    transform: translateY(-50%);\n  }\n}\n\n.choices__item {\n  cursor: default;\n}\n\n.choices__item--selectable {\n  cursor: pointer;\n}\n\n.choices__item--disabled {\n  cursor: not-allowed;\n  -webkit-user-select: none;\n      -ms-user-select: none;\n          -moz-user-select: none;\n       user-select: none;\n  opacity: 0.5;\n}\n\n.choices__input {\n  display: inline-block;\n  vertical-align: baseline;\n  font-size: 14px;\n  margin-bottom: 5px;\n  border: 0;\n  border-radius: 0;\n  max-width: 100%;\n  padding: 4px 0 4px 2px;\n}\n\n.choices__input:focus {\n  outline: 0;\n}\n\n.choices__button:focus {\n  outline: none;\n}\n\n/* ----------------- Settings specific changes -------------- */\n\n.modal-body .choices {\n  margin-bottom: 0px;\n}\n\n.modal-body .choices__inner {\n  width: -webkit-fit-content;\n  width: fit-content;\n  width: -moz-fit-content;\n}\n\n.modal-body .choices__list.choices__list--single {\n  box-shadow: 0px 1px 2px 0px var(--backgroundShadow);\n  border: 1px solid var(--backgroundShadow);\n  padding-right: 20px;\n}\n\n.modal-body .choices:after {\n  right: 15px;\n  margin-left: unset;\n}\n\n.modal-body .choices__list.choices__list--dropdown {\n  width: -webkit-max-content;\n  width: -moz-max-content;\n  width: max-content;\n  border: 0;\n  margin-top: 2px;\n  border-radius: 4px;\n  box-shadow: 0px 8px 20px 0px var(--backgroundShadow);\n}\n\n.modal-body .choices__list--dropdown .choices__item {\n  padding: 6px 17px 10px 10px;\n}\n\n@media (min-width: 640px) {\n  .modal-body .choices__list--dropdown .choices__item--selectable {\n    width: -webkit-max-content;\n    width: -moz-max-content;\n    width: max-content;\n    padding-right: 10px;\n  }\n\n  .modal-body .choices ::-webkit-scrollbar {\n    width: 10px;\n  }\n    \n  .modal-body .choices ::-webkit-scrollbar-track {\n    background: var(--lineColor);\n  }\n    \n  .modal-body .choices ::-webkit-scrollbar-thumb {\n    background: var(--itemBG_075);\n  }\n\n  .search-lang-txt {\n    position: absolute;\n    margin-top: -30px;\n    right: 8rem;\n  }\n}\n\n/* ----------------- Search Bar specific changes -------------- */\n\n.searchDivInner form .inner-form {\n  background: var(--searchBackground);\n  display: flex;\n  width: 100%;\n  justify-content: space-between;\n  align-items: center;\n  box-shadow: 0px 8px 20px 0px var(--backgroundShadow);\n  border-radius: 20px;\n}\n\n.searchDivInner form .inner-form .input-field {\n  height: 68px;\n}\n\n.searchDivInner form .inner-form .input-field input {\n  height: 100%;\n  width: 100%;\n  background: transparent;\n  border: 0;\n  display: block;\n  padding: 10px 32px;\n  margin-right: 80px;\n  font-size: 16px;\n  color: var(--searchTextColor);\n}\n\n.searchDivInner form .inner-form .input-field input.placeholder {\n  color: var(--tagColor);\n  font-size: 16px;\n}\n\n.searchDivInner form .inner-form .input-field input:-moz-placeholder {\n  color: var(--tagColor);\n  font-size: 16px;\n}\n\n.searchDivInner form .inner-form .input-field input::-webkit-input-placeholder {\n  color: var(--tagColor);\n  font-size: 16px;\n}\n\n.searchDivInner form .inner-form .input-field.first-wrap {\n  width: 200px;\n  border-right: 1px solid var(--lineColor);\n}\n\n.searchDivInner form .inner-form .input-field.first-wrap .choices__inner {\n  background: transparent;\n  border-radius: 0;\n  border: 0;\n  height: 100%;\n  display: flex;\n  align-items: center;\n  padding: 10px 30px;\n}\n\n.searchDivInner form .inner-form .input-field.first-wrap .choices__inner .choices__list.choices__list--single {\n  display: flex;\n  padding: 0;\n  align-items: center;\n  height: 100%;\n}\n\n.searchDivInner form .inner-form .input-field.first-wrap .choices__inner .choices__item.choices__item--selectable.choices__placeholder {\n  display: flex;\n  align-items: center;\n  height: 100%;\n  opacity: 1;\n  color: var(--tagColor);\n}\n\n.searchDivInner form .inner-form .input-field.first-wrap .choices__inner .choices__list--single .choices__item {\n  display: flex;\n  align-items: center;\n  height: 100%;\n  color: var(--searchTextColor);\n}\n\n.searchDivInner form .inner-form .input-field input:hover, .searchDivInner form .inner-form .input-field input:focus {\n  box-shadow: none;\n  outline: 0;\n}\n\n.searchDivInner form .inner-form .input-field.first-wrap .choices__list.choices__list--dropdown {\n  border: 0;\n  margin-top: 2px;\n  border-radius: 4px;\n  box-shadow: 0px 8px 20px 0px var(--backgroundShadow);\n}\n\n.searchDivInner form .inner-form .input-field.first-wrap .choices__list.choices__list--dropdown .choices__item--selectable {\n  padding-right: 0;\n}\n\n.searchDivInner form .inner-form .input-field.first-wrap .choices__list--dropdown .choices__item {\n  color: var(--searchTextColor);\n  min-height: 24px;\n}\n\n.searchDivInner form .inner-form .input-field.second-wrap {\n  flex-grow: 1;\n}\n\n.searchDivInner form .inner-form .input-field.third-wrap {\n  /* width: 74px; */\n  width: 30px;\n}\n\n.btn-search {\n  height: 100%;\n  width: 100%;\n  white-space: nowrap;\n  border: 0;\n  cursor: pointer;\n  color: var(--searchBackground);\n  background: var(--bgPrimaryColor);\n  transition: all .2s ease-out, color .2s ease-out;\n}\n\n.btn-search svg {\n  width: 16px;\n}\n\n.btn-search:hover {\n  background: var(--primaryColor);\n}\n\n.btn-search:focus {\n  outline: 0;\n  box-shadow: none;\n}\n\n.searchDivInner form .inner-form .input-field .input-select {\n  height: 100%;\n}\n\n.searchDivInner form .inner-form .input-field .input-select .choices {\n  height: 100%;\n}\n\n.searchDivInner form .inner-form .input-field {\n  height: 50px;\n}\n\n.btn-search > svg > path {\n  fill: var(--secondaryTextColor);\n}\n\n.searchDivInner .choices__list--dropdown > .choices__list {\n  margin-left: 10px;\n}\n\n.choices__list.choices__list--dropdown.index.is-active {\n  -webkit-animation: dropdownAnim 0.2s linear forwards;\n          animation: dropdownAnim 0.2s linear forwards;\n}\n\n.choices__list.choices__list--dropdown.index.animate:not(.is-active) {\n  display: unset !important;\n  -webkit-animation: dropdownAnimClose 0.2s linear forwards;\n          animation: dropdownAnimClose 0.2s linear forwards;\n}\n\n@-webkit-keyframes dropdownAnim {\n  from {height: 0px;}\n  to { height: 330%;}\n}\n\n@keyframes dropdownAnim {\n  from {height: 0px;}\n  to { height: 330%;}\n}\n\n@-webkit-keyframes dropdownAnimClose {\n  from {height: 330%; }\n  to { height: 0%; display:hidden !important;}\n}\n\n@keyframes dropdownAnimClose {\n  from {height: 330%; }\n  to { height: 0%; display:hidden !important;}\n}\n\n.choices.main[data-type*=\"select-one\"]:after {\n  transition: linear 0.2s;\n}\n\n.choices.main[data-type*=\"select-one\"].is-open:after {\n  transition: linear 0.2s;\n}\n\n/* Mobile Only */\n@media only screen and (max-width: 600px) {\n\n  .choices[data-type*=\"select-one\"]:after {\n    right: 14px;\n  }\n\n}\n\n/* Dekstop Only */\n@media only screen and (min-width: 600px) {\n\n  .searchDivInner form .inner-form .input-field.first-wrap .choices__list.choices__list--dropdown {\n      border-radius: 20px !important;\n      border-top-right-radius: 0px !important;\n  }\n\n}\n"
  },
  {
    "path": "html/assets/css/search/searchRow.css",
    "content": "#search-row {\n    padding-top: 10px;\n    padding-left: 10px;\n    padding-right: 10px;\n}\n\n#emptyInput {\n    position: absolute;\n    height: 30px;\n    margin-top: 7px; \n    right: 70px;\n}\n\n#emptyInput:focus {\n    outline: none;\n    box-shadow: none;\n}\n\n#emptyInput > svg {\n    width: 20px;\n    fill: var(--tagColor);\n}\n\n#search-vl {\n    position: absolute;\n    border-left: 1px solid var(--lineColor);\n    height: 70%;\n    margin-top: 7px;\n    right: 75px;\n}\n\n.search-embedded-btn:focus {\n    outline: none;\n    box-shadow: none;\n}\n\n.search-embedded-btn {\n    background: unset;\n    position: absolute;\n    height: 30px;\n    margin-top: 11px;\n    right: 10px;\n    width: unset;\n    border: none;\n}\n\n.search-embedded-btn.search {\n    width: 30px;\n    right: 14px;\n    display: none;\n}\n\n#voiceBtn.search-embedded-btn {\n    display: none;\n}\n\n.search-embedded-btn.search > svg > path, #voiceBtn.search-embedded-btn > svg > path {\n    fill: var(--primaryColor);\n}\n\n.search-embedded-btn.radical {\n    right: 38px;\n}\n\n.search-embedded-btn.radical > span {\n    font-size: 20px;\n    text-shadow: 0px 0px var(--tagColor);\n    color: var(--tagColor);\n}\n\n.input-group {\n    height: 100%;\n}\n\nsvg {\n    fill: var(--searchTextColor);\n}\n\n.kanjisvg > path {\n    stroke: var(--primaryTextColor);\n}\n\n.d-flex.center {\n    justify-content: center;\n}\n\n.btn-container {\n    position: absolute;\n    width: 100%;\n    top: 0;\n    text-align: center;\n    text-align: -webkit-center;\n}\n\n.btn-container div {\n    float: left;\n    width: 30px;\n}\n\n.btn-container.rad {\n    margin-top: -8px;\n    margin-right: -7px;\n}\n\n.settingsBtn, .infoBtn, .homeBtn {\n    position: absolute;\n    cursor: pointer;\n    z-index: 15;\n}\n\n.settingsBtn {\n    top: 22px;\n    left: 20px;\n    padding-top: 12px;\n    padding-bottom: 0px;\n}\n\n.infoBtn {\n    top: 21px;\n    right: 3em;\n}\n\n.homeBtn {\n    font-family: 'Material Icons';\n    font-size: 27px;\n    top: 16px;\n    right: 0.5em;\n    color: var(--tagColor);\n}\n\n.homeBtn.mobile {\n    position: relative;\n    font-size: 38px;\n    top: 0;\n    left: 0;\n    right: 0;\n}\n\n.rad-picker-icon {\n    font-size: 33px;\n    color: var(--searchTextColor);\n}\n\n.rad-picker-txt {\n    margin-top: -9px;\n}\n\n#searchDiv {\n    z-index: 1;\n    margin-left: 15px;\n    width: 100%;\n    max-width: 1150px;\n    position: relative;\n}\n\n#searchDiv.index {\n    max-width: 1150px;\n    margin-left: 0px;\n}\n \n.d-flex.left {\n    flex-direction: row;\n    justify-content: left;\n}\n\n/* Adjustments for the upper buttons */\n@media only screen and (max-width: 1350px) {\n    #searchDiv {\n        width: 90%;\n        padding-right: 30px;\n    }\n}\n@media only screen and (max-width: 875px) {\n    #searchDiv {\n        padding-right: 45px;\n    }\n    .homeBtn {\n        right: 0.25em;\n    }\n    .infoBtn {\n        right: 2.5em;\n    }\n}\n@media only screen and (max-width: 600px) {\n    #searchDiv {\n        width: 100%;\n        padding-right: 0px;\n    }\n}\n"
  },
  {
    "path": "html/assets/css/tools/alerts.css",
    "content": "/* ----------------- Alerts Color Design ----------------- */\n.msg-message {\n    border: none !important;\n    border-radius: 15px !important;  \n    text-shadow: none !important;\n}\n\n.msg-warning {\n    background-color: rgba(195,195,195,0.95) !important; \n    border-color: rgba(195,195,195,0.95) !important;\n} \n\n.mdl-tooltip {\n    font-size: 12px;\n}"
  },
  {
    "path": "html/assets/css/tools/pagination.css",
    "content": ".pagination {\n    display: flex;\n    list-style: none;\n    justify-content: center;\n    padding: 0px 0 25px 0;\n    margin-top: -45px;\n}\n\n.pagination-item {\n    font-family: 'Roboto', sans-serif;\n    display: flex;\n    padding-left: 0;\n    list-style: none;\n    border-radius: .25rem;\n    background-color: none;\n}\n\n.pagination-item.disabled .pagination-circle {\n    cursor: unset;\n    color: var(--tagColor);\n}\n\n.pagination-item.disabled .pagination-circle:not(.active):hover {\n    animation: none !important;\n    -webkit-animation: none !important;\n}\n\n.pagination-circle {\n    color: var(--primaryTextColor);\n    background: none;\n    margin-right: 2px;\n    margin-left: 2px;\n    line-height: 1.25;\n    padding: .5rem .75rem;\n    font-size: .9rem;\n    border: 0;\n    border-radius: 50%;\n    outline: 0 !important;\n}\n\n.pagination-circle:not(.active):hover {\n    -webkit-animation: hoverEffect 0.5s forwards;\n            animation: hoverEffect 0.5s forwards;\n}\n\n.pagination-circle.active {\n    color: var(--secondaryTextColor);\n    background: var(--bgPrimaryColor);\n    border-radius: 50%;\n    box-shadow: 0 2px 5px 0 rgb(0 0 0 / 16%), 0 2px 10px 0 rgb(0 0 0 / 12%);\n}\n\n@-webkit-keyframes hoverEffect {\n    to {\n        background-color: var(--lineColor);\n    }\n}\n\n@keyframes hoverEffect {\n    to {\n        background-color: var(--lineColor);\n    }\n}\n"
  },
  {
    "path": "html/assets/css/tools/ripple.css",
    "content": ".has-ripple{position:relative;overflow:hidden;-webkit-transform:translate3d(0,0,0);-o-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}.ripple-a{display:block;position:absolute;pointer-events:none;border-radius:50%;-webkit-transform:scale(0);-o-transform:scale(0);transform:scale(0);background:#fff;opacity:1}.ripple-animate{-webkit-animation:ripple;-o-animation:ripple;animation:ripple}@-webkit-keyframes ripple{100%{opacity:0;-webkit-transform:scale(2);transform:scale(2)}}@-o-keyframes ripple{100%{opacity:0;-o-transform:scale(2);transform:scale(2)}}@keyframes ripple{100%{opacity:0;transform:scale(2)}}\n"
  },
  {
    "path": "html/assets/docs.html",
    "content": "<html>\n  <head>\n    <meta charset=\"utf-8\">\n    <script type=\"module\" src=\"https://unpkg.com/rapidoc/dist/rapidoc-min.js\"></script>\n    <style>\n    rapi-doc::part(section-servers) { /* <<< targets the server div */\n      color: #888888;\n      margin:0 24px 0 24px;\n      border-radius: 5px;\n    }\n    rapi-doc::part(label-selected-server) { /* <<< targets selected server label */\n      color: #888888;\n    }\n    rapi-doc{ /* <<< targets selected server label */\n      color: #888888;\n    }\n  </style>\n  </head>\n  <body>\n    <rapi-doc id=\"thedoc\" render-style = \"read\" allow-try=\"true\" theme='dark' allow-authentication = \"false\" show-header = \"false\"\n    primary-color =\"#34A83C\"\n    bg-color = \"#202324\"\n\n    > </rapi-doc>\n    \n    <script>\n      document.addEventListener('DOMContentLoaded', (event) => {\n        let docEl = document.getElementById(\"thedoc\");\n        let strSpec = `\n            {\n  \"swagger\": \"2.0\",\n  \"info\": {\n    \"description\": \"Jotoba search API<br><br>(Note: Jotoba doesn't own any of the provided resources! Please refer to https://jotoba.de/about for a list of used resources and their licenses. If you're using data provided by jotoba, you have to acknowledge the autors and creators of the used resoure and follow the terms and contiditons of the original license)\",\n    \"version\": \"0.1\",\n    \"title\": \"Jotoba\"\n  },\n  \"host\": \"jotoba.de\",\n  \"tags\": [\n    {\n      \"name\": \"Search\",\n      \"description\": \"Search endpoints to address jotoba from own applications\"\n    },\n    {\n      \"name\": \"Image\",\n      \"description\": \"Image text detection\"\n    },\n    {\n      \"name\": \"Radicals\",\n      \"description\": \"Radical API\"\n    },\n    {\n      \"name\": \"Completion\",\n      \"description\": \"Search completion related endpoints\"\n    }\n  ],\n  \"paths\": {\n    \"/api/img_scan\": {\n      \"post\": {\n        \"tags\": [\n          \"Image\"\n        ],\n        \"summary\": \"Get japanese text from an image\",\n        \"consumes\": [\n          \"multipart/form-data\"\n        ],\n        \"produces\": [\n          \"application/json\"\n        ],\n        \"parameters\": [\n          {\n            \"name\": \"upfile\",\n            \"in\": \"formData\",\n            \"description\": \"The image to scan. Can be either jpg or png. Max 2MB.\",\n            \"required\": false,\n            \"type\": \"file\"\n          }\n        ],\n        \"responses\": {\n          \"200\": {\n            \"description\": \"Success response\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/ImgScanResponse\"\n            }\n          },\n          \"400\": {\n            \"description\": \"Bad request. Might occur if the query is empty or the provided image is malformed\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"500\": {\n            \"description\": \"Internal server error\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          }\n        }\n      }\n    },\n    \"/api/search/words\": {\n      \"post\": {\n        \"tags\": [\n          \"Search\"\n        ],\n        \"summary\": \"Search for words\",\n        \"consumes\": [\n          \"application/json\"\n        ],\n        \"produces\": [\n          \"application/json\"\n        ],\n        \"parameters\": [\n          {\n            \"in\": \"body\",\n            \"name\": \"body\",\n            \"required\": true,\n            \"schema\": {\n              \"$ref\": \"#/definitions/RequestPayload\"\n            }\n          }\n        ],\n        \"responses\": {\n          \"200\": {\n            \"description\": \"Success response\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/WordResponse\"\n            }\n          },\n          \"400\": {\n            \"description\": \"Bad request. Might occur if the query is empty\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"408\": {\n            \"description\": \"Timeout. Occurs if search takes too long\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"500\": {\n            \"description\": \"Internal server error\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          }\n        }\n      }\n    },\n    \"/api/search/names\": {\n      \"post\": {\n        \"tags\": [\n          \"Search\"\n        ],\n        \"summary\": \"Search for japanese names\",\n        \"consumes\": [\n          \"application/json\"\n        ],\n        \"produces\": [\n          \"application/json\"\n        ],\n        \"parameters\": [\n          {\n            \"in\": \"body\",\n            \"name\": \"body\",\n            \"required\": true,\n            \"schema\": {\n              \"$ref\": \"#/definitions/RequestPayload\"\n            }\n          }\n        ],\n        \"responses\": {\n          \"200\": {\n            \"description\": \"Success response\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/NameResponse\"\n            }\n          },\n          \"400\": {\n            \"description\": \"Bad request. Might occur if the query is empty\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"408\": {\n            \"description\": \"Timeout. Occurs if search takes too long\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"500\": {\n            \"description\": \"Internal server error\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          }\n        }\n      }\n    },\n    \"/api/search/kanji\": {\n      \"post\": {\n        \"tags\": [\n          \"Search\"\n        ],\n        \"summary\": \"Search for kanji\",\n        \"consumes\": [\n          \"application/json\"\n        ],\n        \"produces\": [\n          \"application/json\"\n        ],\n        \"parameters\": [\n          {\n            \"in\": \"body\",\n            \"name\": \"body\",\n            \"required\": true,\n            \"schema\": {\n              \"$ref\": \"#/definitions/RequestPayload\"\n            }\n          }\n        ],\n        \"responses\": {\n          \"200\": {\n            \"description\": \"Success response\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/KanjiResponse\"\n            }\n          },\n          \"400\": {\n            \"description\": \"Bad request. Might occur if the query is empty\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"408\": {\n            \"description\": \"Timeout. Occurs if search takes too long\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"500\": {\n            \"description\": \"Internal server error\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          }\n        }\n      }\n    },\n    \"/api/search/sentences\": {\n      \"post\": {\n        \"tags\": [\n          \"Search\"\n        ],\n        \"summary\": \"Search for sentences\",\n        \"consumes\": [\n          \"application/json\"\n        ],\n        \"produces\": [\n          \"application/json\"\n        ],\n        \"parameters\": [\n          {\n            \"in\": \"body\",\n            \"name\": \"body\",\n            \"required\": true,\n            \"schema\": {\n              \"$ref\": \"#/definitions/RequestPayload\"\n            }\n          }\n        ],\n        \"responses\": {\n          \"200\": {\n            \"description\": \"Success response\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/SentenceResponse\"\n            }\n          },\n          \"400\": {\n            \"description\": \"Bad request. Might occur if the query is empty\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"408\": {\n            \"description\": \"Timeout. Occurs if search takes too long\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"500\": {\n            \"description\": \"Internal server error\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          }\n        }\n      }\n    },\n    \"/api/kanji/by_radical\": {\n      \"post\": {\n        \"tags\": [\n          \"Radicals\"\n        ],\n        \"summary\": \"Search kanji by its radicals\",\n        \"consumes\": [\n          \"application/json\"\n        ],\n        \"produces\": [\n          \"application/json\"\n        ],\n        \"parameters\": [\n          {\n            \"in\": \"body\",\n            \"name\": \"body\",\n            \"required\": true,\n            \"schema\": {\n              \"$ref\": \"#/definitions/RadicalsPayload\"\n            }\n          }\n        ],\n        \"responses\": {\n          \"200\": {\n            \"description\": \"Success response\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/RadicalsResponse\"\n            }\n          },\n          \"400\": {\n            \"description\": \"Bad request. Might occur if no radicals were passed\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"500\": {\n            \"description\": \"Internal server error\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          }\n        }\n      }\n    },\n    \"/api/radical/search\": {\n      \"post\": {\n        \"tags\": [\n          \"Radicals\"\n        ],\n        \"summary\": \"Search radicals\",\n        \"consumes\": [\n          \"application/json\"\n        ],\n        \"produces\": [\n          \"application/json\"\n        ],\n        \"parameters\": [\n          {\n            \"in\": \"body\",\n            \"name\": \"body\",\n            \"required\": true,\n            \"schema\": {\n              \"$ref\": \"#/definitions/RadicalSearchPayload\"\n            }\n          }\n        ],\n        \"responses\": {\n          \"200\": {\n            \"description\": \"Success response\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/RadicalSearchResponse\"\n            }\n          },\n          \"400\": {\n            \"description\": \"Bad request. Might occur if no value was passed\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"500\": {\n            \"description\": \"Internal server error\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          }\n        }\n      }\n    },\n    \"/api/suggestion\": {\n      \"post\": {\n        \"tags\": [\n          \"Completion\"\n        ],\n        \"summary\": \"Retrieve word completions for search queries\",\n        \"consumes\": [\n          \"application/json\"\n        ],\n        \"produces\": [\n          \"application/json\"\n        ],\n        \"parameters\": [\n          {\n            \"in\": \"body\",\n            \"name\": \"body\",\n            \"required\": true,\n            \"schema\": {\n              \"$ref\": \"#/definitions/CompletionPayload\"\n            }\n          }\n        ],\n        \"responses\": {\n          \"200\": {\n            \"description\": \"Success response\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/CompletionResponse\"\n            }\n          },\n          \"400\": {\n            \"description\": \"Bad request. Might occur if no radicals were passed\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"408\": {\n            \"description\": \"Timeout. Occurs if suggestion takes too long\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          },\n          \"500\": {\n            \"description\": \"Internal server error\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          }\n        }\n      }\n    },\n    \"/api/news/short\": {\n      \"post\": {\n        \"tags\": [\n          \"News\"\n        ],\n        \"summary\": \"Retrieve latest Jotoba news in short form\",\n        \"consumes\": [\n          \"application/json\"\n        ],\n        \"produces\": [\n          \"application/json\"\n        ],\n        \"parameters\": [\n          {\n            \"in\": \"body\",\n            \"name\": \"body\",\n            \"required\": true,\n            \"schema\": {\n              \"$ref\": \"#/definitions/ShortNewsPayload\"\n            }\n          }\n        ],\n        \"responses\": {\n          \"200\": {\n            \"description\": \"Success response\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/ShortNewsResponse\"\n            }\n          }\n        }\n      }\n    },\n    \"/api/news/detailed\": {\n      \"post\": {\n        \"tags\": [\n          \"News\"\n        ],\n        \"summary\": \"Retrieve single news entry detailed\",\n        \"consumes\": [\n          \"application/json\"\n        ],\n        \"produces\": [\n          \"application/json\"\n        ],\n        \"parameters\": [\n          {\n            \"in\": \"body\",\n            \"name\": \"body\",\n            \"required\": true,\n            \"schema\": {\n              \"$ref\": \"#/definitions/DetailedNewsPayload\"\n            }\n          }\n        ],\n        \"responses\": {\n          \"200\": {\n            \"description\": \"Success response\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/DetailedNewsResponse\"\n            }\n          },\n          \"404\": {\n            \"description\": \"ID was not found\",\n            \"schema\": {\n              \"$ref\": \"#/definitions/Error\"\n            }\n          }\n        }\n      }\n    }\n  },\n  \"definitions\": {\n    \"ShortNewsPayload\": {\n      \"type\": \"object\",\n      \"required\": [\n        \"after\"\n      ],\n      \"properties\": {\n        \"after\": {\n          \"type\": \"integer\",\n          \"example\": 1637499806,\n          \"description\": \"Show news after\"\n        }\n      }\n    },\n    \"DetailedNewsPayload\": {\n      \"type\": \"object\",\n      \"required\": [\n        \"id\"\n      ],\n      \"properties\": {\n        \"id\": {\n          \"type\": \"integer\",\n          \"example\": 3,\n          \"description\": \"ID of the news enttry\"\n        }\n      }\n    },\n    \"ShortNewsResponse\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"entries\": {\n          \"type\": \"array\",\n          \"items\": {\n            \"$ref\": \"#/definitions/NewsEntry\"\n          }\n        }\n      }\n    },\n    \"DetailedNewsResponse\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"entry\": {\n          \"$ref\": \"#/definitions/NewsEntry\"\n        }\n      }\n    },\n    \"NewsEntry\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"id\": {\n          \"type\": \"integer\",\n          \"example\": 3,\n          \"description\": \"The ID of the news entry\"\n        },\n        \"title\": {\n          \"type\": \"string\",\n          \"example\": \"V1.1\",\n          \"description\": \"The news entry title\"\n        },\n        \"html\": {\n          \"type\": \"string\",\n          \"example\": \"<h1 id='featuring'>Featuring</h1><br><br><ul><br><li>New radical-picker</li><br><br><li>Image to text sear</li><br></ul><br>\",\n          \"description\": \"The HTML formatted news content\"\n        },\n        \"creation_time\": {\n          \"type\": \"integer\",\n          \"example\": 1637499806,\n          \"description\": \"The unix timestamp of the time the given newes entry was released\"\n        },\n        \"trimmed\": {\n          \"type\": \"boolean\",\n          \"description\": \"Whether the html content was trimmed or not\"\n        }\n      }\n    },\n    \"RequestPayload\": {\n      \"type\": \"object\",\n      \"required\": [\n        \"query\"\n      ],\n      \"properties\": {\n        \"query\": {\n          \"type\": \"string\",\n          \"example\": \"東京\",\n          \"description\": \"The search query\"\n        },\n        \"language\": {\n          \"$ref\": \"#/definitions/Language\"\n        },\n        \"no_english\": {\n          \"type\": \"boolean\",\n          \"example\": false,\n          \"description\": \"Does not return english results if the provided language differs from english\",\n          \"default\": false\n        }\n      }\n    },\n    \"Error\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"code\": {\n          \"type\": \"integer\",\n          \"example\": 400,\n          \"description\": \"Error code\"\n        },\n        \"error\": {\n          \"type\": \"string\",\n          \"example\": \"BadRequest\",\n          \"description\": \"Error description\"\n        },\n        \"message\": {\n          \"type\": \"string\",\n          \"example\": \"Bad request\",\n          \"description\": \"Error informations\"\n        }\n      }\n    },\n    \"WordResponse\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"kanji\": {\n          \"type\": \"array\",\n          \"description\": \"Kanji used to write words found in search\",\n          \"items\": {\n            \"$ref\": \"#/definitions/Kanji\"\n          }\n        },\n        \"words\": {\n          \"type\": \"array\",\n          \"description\": \"Words found in search\",\n          \"items\": {\n            \"$ref\": \"#/definitions/Word\"\n          }\n        }\n      }\n    },\n    \"Kanji\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"literal\": {\n          \"type\": \"string\",\n          \"example\": \"今\"\n        },\n        \"meanings\": {\n          \"type\": \"array\",\n          \"example\": [\n            \"now\"\n          ],\n          \"items\": {\n            \"type\": \"string\"\n          }\n        },\n        \"grade\": {\n          \"type\": \"integer\",\n          \"example\": 2\n        },\n        \"stroke_count\": {\n          \"type\": \"integer\",\n          \"example\": 4\n        },\n        \"frequency\": {\n          \"type\": \"integer\",\n          \"example\": 49\n        },\n        \"jlpt\": {\n          \"type\": \"integer\",\n          \"example\": 5\n        },\n        \"onyomi\": {\n          \"type\": \"array\",\n          \"items\": {\n            \"type\": \"string\",\n            \"example\": \"\"\n          }\n        },\n        \"kunyomi\": {\n          \"type\": \"array\",\n          \"items\": {\n            \"type\": \"string\",\n            \"example\": \"\"\n          }\n        },\n        \"chinese\": {\n          \"type\": \"array\",\n          \"items\": {\n            \"type\": \"string\",\n            \"example\": \"\"\n          }\n        },\n        \"korean_r\": {\n          \"type\": \"array\",\n          \"description\": \"Korean reading romanized\",\n          \"items\": {\n            \"type\": \"string\",\n            \"example\": \"\"\n          }\n        },\n        \"korean_h\": {\n          \"type\": \"array\",\n          \"description\": \"Korean reading(s) in hangul\",\n          \"items\": {\n            \"type\": \"string\",\n            \"example\": \"\"\n          }\n        },\n        \"parts\": {\n          \"type\": \"array\",\n          \"description\": \"Parts used to construct the kanji. (only available in kanji search)\",\n          \"items\": {\n            \"type\": \"string\"\n          }\n        },\n        \"radical\": {\n          \"type\": \"string\",\n          \"description\": \"(only available in kanji search)\"\n        },\n        \"stroke_frames\": {\n          \"type\": \"string\",\n          \"description\": \"Path to the stroke order svg image. (only available in kanji search)\"\n        }\n      }\n    },\n    \"Word\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"reading\": {\n          \"$ref\": \"#/definitions/Reading\"\n        },\n        \"common\": {\n          \"type\": \"boolean\",\n          \"description\": \"Whether the word is a common word or not\"\n        },\n        \"senses\": {\n          \"type\": \"array\",\n          \"items\": {\n            \"$ref\": \"#/definitions/Sense\"\n          }\n        },\n        \"audio\": {\n          \"type\": \"string\",\n          \"example\": \"/assets/audio/走る【はしる】.ogg\",\n          \"description\": \"Path of the audio file for the given word. Only provided if audio file exists\"\n        },\n        \"pitch\": {\n          \"type\": \"array\",\n          \"description\": \"Pitch accent of the word\",\n          \"items\": {\n            \"$ref\": \"#/definitions/PitchItem\"\n          }\n        }\n      }\n    },\n    \"Reading\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"kana\": {\n          \"type\": \"string\",\n          \"example\": \"はしる\",\n          \"description\": \"The kana reading\"\n        },\n        \"kanji\": {\n          \"type\": \"string\",\n          \"example\": \"走る\",\n          \"description\": \"The kanji reading (if available)\"\n        },\n        \"furigana\": {\n          \"type\": \"string\",\n          \"example\": \"[走|はし]る\",\n          \"description\": \"An encoded string representing the furigana parts of the kanji reading\"\n        }\n      }\n    },\n    \"Sense\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"glosses\": {\n          \"type\": \"array\",\n          \"example\": [\n            \"to run\"\n          ],\n          \"description\": \"Equal meanings of the japanese word in the specified other language\",\n          \"items\": {\n            \"type\": \"string\"\n          }\n        },\n        \"pos\": {\n          \"type\": \"array\",\n          \"description\": \"Part of speech of the provided glosses\",\n          \"items\": {\n            \"type\": \"string\"\n          }\n        },\n        \"language\": {\n          \"$ref\": \"#/definitions/Language\"\n        }\n      }\n    },\n    \"PitchItem\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"part\": {\n          \"type\": \"string\",\n          \"example\": \"かお\",\n          \"description\": \"A part of the kana reading with the same pitch\"\n        },\n        \"high\": {\n          \"type\": \"boolean\",\n          \"example\": false,\n          \"description\": \"Whether its a high or low pitch\"\n        }\n      },\n      \"description\": \"A part of a Japanese word with the same pitch\"\n    },\n    \"KanjiResponse\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"kanji\": {\n          \"type\": \"array\",\n          \"items\": {\n            \"$ref\": \"#/definitions/Kanji\"\n          }\n        }\n      }\n    },\n    \"NameResponse\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"names\": {\n          \"type\": \"array\",\n          \"items\": {\n            \"$ref\": \"#/definitions/Name\"\n          }\n        }\n      }\n    },\n    \"Name\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"kana\": {\n          \"type\": \"string\",\n          \"example\": \"らん\"\n        },\n        \"kanji\": {\n          \"type\": \"string\",\n          \"example\": \"走\"\n        },\n        \"transcription\": {\n          \"type\": \"string\",\n          \"example\": \"Ran\"\n        },\n        \"name_type\": {\n          \"type\": \"array\",\n          \"items\": {\n            \"type\": \"string\",\n            \"enum\": [\n              \"Company\",\n              \"Female\",\n              \"Male\",\n              \"Place\",\n              \"Given\",\n              \"Organization\",\n              \"Person\",\n              \"Product\",\n              \"RailwayStation\",\n              \"Surname\",\n              \"Unclassified\",\n              \"Work\"\n            ]\n          }\n        }\n      }\n    },\n    \"SentenceResponse\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"sentences\": {\n          \"type\": \"array\",\n          \"items\": {\n            \"$ref\": \"#/definitions/Sentence\"\n          }\n        }\n      }\n    },\n    \"Language\": {\n      \"type\": \"string\",\n      \"enum\": [\n        \"English\",\n        \"German\",\n        \"Spanish\",\n        \"Russain\",\n        \"Swedish\",\n        \"French\",\n        \"Dutch\",\n        \"Hungarian\",\n        \"Slovenian\"\n      ]\n    },\n    \"Sentence\": {\n      \"properties\": {\n        \"content\": {\n          \"type\": \"string\",\n          \"example\": \"いい天気です\"\n        },\n        \"furigana\": {\n          \"type\": \"string\",\n          \"example\": \"いい[天気|てん|き]です\"\n        },\n        \"translation\": {\n          \"type\": \"string\",\n          \"example\": \"It is a nice day\"\n        },\n        \"language\": {\n          \"type\": \"string\",\n          \"example\": \"English\"\n        }\n      }\n    },\n    \"RadicalsPayload\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"radicals\": {\n          \"type\": \"array\",\n          \"example\": [\n            \"山\",\n            \"一\",\n            \"冂\",\n            \"干\"\n          ],\n          \"items\": {\n            \"type\": \"string\"\n          }\n        }\n      },\n      \"description\": \"Payload for kanji-by-radicals search\"\n    },\n    \"RadicalSearchPayload\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"query\": {\n          \"type\": \"string\",\n          \"example\": \"heart\"\n        }\n      },\n      \"description\": \"Payload for radical search\"\n    },\n    \"RadicalsResponse\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"kanji\": {\n          \"type\": \"array\",\n          \"description\": \"All kanji which can be built using the provided radicals. The keys of the objects are the stroke counts\",\n          \"items\": {\n            \"type\": \"string\"\n          }\n        },\n        \"possible_radicals\": {\n          \"type\": \"array\",\n          \"example\": [\n            \"小\",\n            \"岡\",\n            \"幺\",\n            \"糸\"\n          ],\n          \"description\": \"Left over radicals which will have a non empty kanji result\",\n          \"items\": {\n            \"type\": \"string\"\n          }\n        }\n      },\n      \"description\": \"Kanji-by-radicals response\"\n    },\n    \"RadicalSearchResponse\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"4\": {\n          \"type\": \"array\",\n          \"description\": \"Radical search result\",\n          \"items\": {\n            \"$ref\": \"#/definitions/RadicalSearchItem\"\n          }\n        }\n      },\n      \"description\": \"Radical search response\"\n    },\n    \"RadicalSearchItem\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"l\": {\n          \"type\": \"string\",\n          \"example\": \"心\",\n          \"description\": \"A kanji literal\"\n        }\n      },\n      \"description\": \"Radical search response item\"\n    },\n    \"CompletionPayload\": {\n      \"type\": \"object\",\n      \"required\": [\n        \"input\",\n        \"lang\",\n        \"search_type\"\n      ],\n      \"properties\": {\n        \"input\": {\n          \"type\": \"string\",\n          \"example\": \"東\",\n          \"description\": \"Current search query to find completions for\"\n        },\n        \"lang\": {\n          \"type\": \"string\",\n          \"enum\": [\n            \"en-US\",\n            \"de-DE\",\n            \"es-ES\",\n            \"fr-FR\",\n            \"nl-NL\",\n            \"sv-SE\",\n            \"ru\",\n            \"hu\",\n            \"sl-SI\"\n          ]\n        },\n        \"search_type\": {\n          \"$ref\": \"#/definitions/SearchType\"\n        },\n        \"radicals\": {\n          \"type\": \"array\",\n          \"example\": [\n            \"⺡\"\n          ],\n          \"description\": \"Restrict results to certain radicals. This field is optional\",\n          \"items\": {\n            \"type\": \"string\"\n          }\n        }\n      },\n      \"description\": \"Payload for completion request\"\n    },\n    \"CompletionResponse\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"suggestions\": {\n          \"type\": \"array\",\n          \"items\": {\n            \"$ref\": \"#/definitions/Suggestion\"\n          }\n        },\n        \"suggestion_type\": {\n          \"type\": \"string\",\n          \"description\": \"Type of suggestion\",\n          \"default\": \"Default\",\n          \"enum\": [\n            \"Default\",\n            \"KanjiReading\"\n          ]\n        }\n      },\n      \"description\": \"Completion response\"\n    },\n    \"Suggestion\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"primary\": {\n          \"type\": \"string\",\n          \"example\": \"ねこ\"\n        },\n        \"secondary\": {\n          \"type\": \"string\",\n          \"example\": \"猫\"\n        }\n      },\n      \"description\": \"A single suggestion item\"\n    },\n    \"ImgScanResponse\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"text\": {\n          \"type\": \"string\",\n          \"example\": \"音楽が大好き\"\n        }\n      },\n      \"description\": \"Sucess response for retrieving text from an image\"\n    },\n    \"SearchType\": {\n      \"type\": \"string\",\n      \"description\": \"Type of search (Words, Kanji, Sentences, Names)\",\n      \"enum\": [\n        \"0\",\n        \"1\",\n        \"2\",\n        \"3\"\n      ]\n    }\n  }\n}\n              `;\n        let objSpec = JSON.parse(strSpec);\n        docEl.loadSpec(objSpec);\n      })\n    </script>\n  </body>\n</html>\n\n"
  },
  {
    "path": "html/assets/fonts/fonts.css",
    "content": "@font-face {\n  font-family: 'Roboto';\n  font-style: normal;\n  font-weight: 300;\n  font-display: swap;\n  src: url(\"roboto.woff2\") format('woff2');\n  unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+2000-206F, U+2074, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD;\n}\n\n@font-face {\n  font-family: 'Material Icons';\n  font-style: normal;\n  font-weight: 400;\n  src: url(\"materialFont.woff2\") format('woff2');\n}\n\n.material-icons {\n  font-family: 'Material Icons';\n  font-weight: normal;\n  font-style: normal;\n  font-size: 24px;\n  line-height: 1;\n  letter-spacing: normal;\n  text-transform: none;\n  display: inline-block;\n  white-space: nowrap;\n  word-wrap: normal;\n  direction: ltr;\n  -webkit-font-feature-settings: 'liga';\n  -webkit-font-smoothing: antialiased;\n}"
  },
  {
    "path": "html/assets/js/lib/d3.js",
    "content": "!function(){function n(n){return n&&(n.ownerDocument||n.document||n).documentElement}function t(n){return n&&(n.ownerDocument&&n.ownerDocument.defaultView||n.document&&n||n.defaultView)}function e(n,t){return t>n?-1:n>t?1:n>=t?0:NaN}function r(n){return null===n?NaN:+n}function i(n){return!isNaN(n)}function u(n){return{left:function(t,e,r,i){for(arguments.length<3&&(r=0),arguments.length<4&&(i=t.length);i>r;){var u=r+i>>>1;n(t[u],e)<0?r=u+1:i=u}return r},right:function(t,e,r,i){for(arguments.length<3&&(r=0),arguments.length<4&&(i=t.length);i>r;){var u=r+i>>>1;n(t[u],e)>0?i=u:r=u+1}return r}}}function o(n){return n.length}function a(n){for(var t=1;n*t%1;)t*=10;return t}function l(n,t){for(var e in t)Object.defineProperty(n.prototype,e,{value:t[e],enumerable:!1})}function c(){this._=Object.create(null)}function f(n){return(n+=\"\")===bo||n[0]===_o?_o+n:n}function s(n){return(n+=\"\")[0]===_o?n.slice(1):n}function h(n){return f(n)in this._}function p(n){return(n=f(n))in this._&&delete this._[n]}function g(){var n=[];for(var t in this._)n.push(s(t));return n}function v(){var n=0;for(var t in this._)++n;return n}function d(){for(var n in this._)return!1;return!0}function y(){this._=Object.create(null)}function m(n){return n}function M(n,t,e){return function(){var r=e.apply(t,arguments);return r===t?n:r}}function x(n,t){if(t in n)return t;t=t.charAt(0).toUpperCase()+t.slice(1);for(var e=0,r=wo.length;r>e;++e){var i=wo[e]+t;if(i in n)return i}}function b(){}function _(){}function w(n){function t(){for(var t,r=e,i=-1,u=r.length;++i<u;)(t=r[i].on)&&t.apply(this,arguments);return n}var e=[],r=new c;return t.on=function(t,i){var u,o=r.get(t);return arguments.length<2?o&&o.on:(o&&(o.on=null,e=e.slice(0,u=e.indexOf(o)).concat(e.slice(u+1)),r.remove(t)),i&&e.push(r.set(t,{on:i})),n)},t}function S(){ao.event.preventDefault()}function k(){for(var n,t=ao.event;n=t.sourceEvent;)t=n;return t}function N(n){for(var t=new _,e=0,r=arguments.length;++e<r;)t[arguments[e]]=w(t);return t.of=function(e,r){return function(i){try{var u=i.sourceEvent=ao.event;i.target=n,ao.event=i,t[i.type].apply(e,r)}finally{ao.event=u}}},t}function E(n){return ko(n,Co),n}function A(n){return\"function\"==typeof n?n:function(){return No(n,this)}}function C(n){return\"function\"==typeof n?n:function(){return Eo(n,this)}}function z(n,t){function e(){this.removeAttribute(n)}function r(){this.removeAttributeNS(n.space,n.local)}function i(){this.setAttribute(n,t)}function u(){this.setAttributeNS(n.space,n.local,t)}function o(){var e=t.apply(this,arguments);null==e?this.removeAttribute(n):this.setAttribute(n,e)}function a(){var e=t.apply(this,arguments);null==e?this.removeAttributeNS(n.space,n.local):this.setAttributeNS(n.space,n.local,e)}return n=ao.ns.qualify(n),null==t?n.local?r:e:\"function\"==typeof t?n.local?a:o:n.local?u:i}function L(n){return n.trim().replace(/\\s+/g,\" \")}function q(n){return new RegExp(\"(?:^|\\\\s+)\"+ao.requote(n)+\"(?:\\\\s+|$)\",\"g\")}function T(n){return(n+\"\").trim().split(/^|\\s+/)}function R(n,t){function e(){for(var e=-1;++e<i;)n[e](this,t)}function r(){for(var e=-1,r=t.apply(this,arguments);++e<i;)n[e](this,r)}n=T(n).map(D);var i=n.length;return\"function\"==typeof t?r:e}function D(n){var t=q(n);return function(e,r){if(i=e.classList)return r?i.add(n):i.remove(n);var i=e.getAttribute(\"class\")||\"\";r?(t.lastIndex=0,t.test(i)||e.setAttribute(\"class\",L(i+\" \"+n))):e.setAttribute(\"class\",L(i.replace(t,\" \")))}}function P(n,t,e){function r(){this.style.removeProperty(n)}function i(){this.style.setProperty(n,t,e)}function u(){var r=t.apply(this,arguments);null==r?this.style.removeProperty(n):this.style.setProperty(n,r,e)}return null==t?r:\"function\"==typeof t?u:i}function U(n,t){function e(){delete this[n]}function r(){this[n]=t}function i(){var e=t.apply(this,arguments);null==e?delete this[n]:this[n]=e}return null==t?e:\"function\"==typeof t?i:r}function j(n){function t(){var t=this.ownerDocument,e=this.namespaceURI;return e===zo&&t.documentElement.namespaceURI===zo?t.createElement(n):t.createElementNS(e,n)}function e(){return this.ownerDocument.createElementNS(n.space,n.local)}return\"function\"==typeof n?n:(n=ao.ns.qualify(n)).local?e:t}function F(){var n=this.parentNode;n&&n.removeChild(this)}function H(n){return{__data__:n}}function O(n){return function(){return Ao(this,n)}}function I(n){return arguments.length||(n=e),function(t,e){return t&&e?n(t.__data__,e.__data__):!t-!e}}function Y(n,t){for(var e=0,r=n.length;r>e;e++)for(var i,u=n[e],o=0,a=u.length;a>o;o++)(i=u[o])&&t(i,o,e);return n}function Z(n){return ko(n,qo),n}function V(n){var t,e;return function(r,i,u){var o,a=n[u].update,l=a.length;for(u!=e&&(e=u,t=0),i>=t&&(t=i+1);!(o=a[t])&&++t<l;);return o}}function X(n,t,e){function r(){var t=this[o];t&&(this.removeEventListener(n,t,t.$),delete this[o])}function i(){var i=l(t,co(arguments));r.call(this),this.addEventListener(n,this[o]=i,i.$=e),i._=t}function u(){var t,e=new RegExp(\"^__on([^.]+)\"+ao.requote(n)+\"$\");for(var r in this)if(t=r.match(e)){var i=this[r];this.removeEventListener(t[1],i,i.$),delete this[r]}}var o=\"__on\"+n,a=n.indexOf(\".\"),l=$;a>0&&(n=n.slice(0,a));var c=To.get(n);return c&&(n=c,l=B),a?t?i:r:t?b:u}function $(n,t){return function(e){var r=ao.event;ao.event=e,t[0]=this.__data__;try{n.apply(this,t)}finally{ao.event=r}}}function B(n,t){var e=$(n,t);return function(n){var t=this,r=n.relatedTarget;r&&(r===t||8&r.compareDocumentPosition(t))||e.call(t,n)}}function W(e){var r=\".dragsuppress-\"+ ++Do,i=\"click\"+r,u=ao.select(t(e)).on(\"touchmove\"+r,S).on(\"dragstart\"+r,S).on(\"selectstart\"+r,S);if(null==Ro&&(Ro=\"onselectstart\"in e?!1:x(e.style,\"userSelect\")),Ro){var o=n(e).style,a=o[Ro];o[Ro]=\"none\"}return function(n){if(u.on(r,null),Ro&&(o[Ro]=a),n){var t=function(){u.on(i,null)};u.on(i,function(){S(),t()},!0),setTimeout(t,0)}}}function J(n,e){e.changedTouches&&(e=e.changedTouches[0]);var r=n.ownerSVGElement||n;if(r.createSVGPoint){var i=r.createSVGPoint();if(0>Po){var u=t(n);if(u.scrollX||u.scrollY){r=ao.select(\"body\").append(\"svg\").style({position:\"absolute\",top:0,left:0,margin:0,padding:0,border:\"none\"},\"important\");var o=r[0][0].getScreenCTM();Po=!(o.f||o.e),r.remove()}}return Po?(i.x=e.pageX,i.y=e.pageY):(i.x=e.clientX,i.y=e.clientY),i=i.matrixTransform(n.getScreenCTM().inverse()),[i.x,i.y]}var a=n.getBoundingClientRect();return[e.clientX-a.left-n.clientLeft,e.clientY-a.top-n.clientTop]}function G(){return ao.event.changedTouches[0].identifier}function K(n){return n>0?1:0>n?-1:0}function Q(n,t,e){return(t[0]-n[0])*(e[1]-n[1])-(t[1]-n[1])*(e[0]-n[0])}function nn(n){return n>1?0:-1>n?Fo:Math.acos(n)}function tn(n){return n>1?Io:-1>n?-Io:Math.asin(n)}function en(n){return((n=Math.exp(n))-1/n)/2}function rn(n){return((n=Math.exp(n))+1/n)/2}function un(n){return((n=Math.exp(2*n))-1)/(n+1)}function on(n){return(n=Math.sin(n/2))*n}function an(){}function ln(n,t,e){return this instanceof ln?(this.h=+n,this.s=+t,void(this.l=+e)):arguments.length<2?n instanceof ln?new ln(n.h,n.s,n.l):_n(\"\"+n,wn,ln):new ln(n,t,e)}function cn(n,t,e){function r(n){return n>360?n-=360:0>n&&(n+=360),60>n?u+(o-u)*n/60:180>n?o:240>n?u+(o-u)*(240-n)/60:u}function i(n){return Math.round(255*r(n))}var u,o;return n=isNaN(n)?0:(n%=360)<0?n+360:n,t=isNaN(t)?0:0>t?0:t>1?1:t,e=0>e?0:e>1?1:e,o=.5>=e?e*(1+t):e+t-e*t,u=2*e-o,new mn(i(n+120),i(n),i(n-120))}function fn(n,t,e){return this instanceof fn?(this.h=+n,this.c=+t,void(this.l=+e)):arguments.length<2?n instanceof fn?new fn(n.h,n.c,n.l):n instanceof hn?gn(n.l,n.a,n.b):gn((n=Sn((n=ao.rgb(n)).r,n.g,n.b)).l,n.a,n.b):new fn(n,t,e)}function sn(n,t,e){return isNaN(n)&&(n=0),isNaN(t)&&(t=0),new hn(e,Math.cos(n*=Yo)*t,Math.sin(n)*t)}function hn(n,t,e){return this instanceof hn?(this.l=+n,this.a=+t,void(this.b=+e)):arguments.length<2?n instanceof hn?new hn(n.l,n.a,n.b):n instanceof fn?sn(n.h,n.c,n.l):Sn((n=mn(n)).r,n.g,n.b):new hn(n,t,e)}function pn(n,t,e){var r=(n+16)/116,i=r+t/500,u=r-e/200;return i=vn(i)*na,r=vn(r)*ta,u=vn(u)*ea,new mn(yn(3.2404542*i-1.5371385*r-.4985314*u),yn(-.969266*i+1.8760108*r+.041556*u),yn(.0556434*i-.2040259*r+1.0572252*u))}function gn(n,t,e){return n>0?new fn(Math.atan2(e,t)*Zo,Math.sqrt(t*t+e*e),n):new fn(NaN,NaN,n)}function vn(n){return n>.206893034?n*n*n:(n-4/29)/7.787037}function dn(n){return n>.008856?Math.pow(n,1/3):7.787037*n+4/29}function yn(n){return Math.round(255*(.00304>=n?12.92*n:1.055*Math.pow(n,1/2.4)-.055))}function mn(n,t,e){return this instanceof mn?(this.r=~~n,this.g=~~t,void(this.b=~~e)):arguments.length<2?n instanceof mn?new mn(n.r,n.g,n.b):_n(\"\"+n,mn,cn):new mn(n,t,e)}function Mn(n){return new mn(n>>16,n>>8&255,255&n)}function xn(n){return Mn(n)+\"\"}function bn(n){return 16>n?\"0\"+Math.max(0,n).toString(16):Math.min(255,n).toString(16)}function _n(n,t,e){var r,i,u,o=0,a=0,l=0;if(r=/([a-z]+)\\((.*)\\)/.exec(n=n.toLowerCase()))switch(i=r[2].split(\",\"),r[1]){case\"hsl\":return e(parseFloat(i[0]),parseFloat(i[1])/100,parseFloat(i[2])/100);case\"rgb\":return t(Nn(i[0]),Nn(i[1]),Nn(i[2]))}return(u=ua.get(n))?t(u.r,u.g,u.b):(null==n||\"#\"!==n.charAt(0)||isNaN(u=parseInt(n.slice(1),16))||(4===n.length?(o=(3840&u)>>4,o=o>>4|o,a=240&u,a=a>>4|a,l=15&u,l=l<<4|l):7===n.length&&(o=(16711680&u)>>16,a=(65280&u)>>8,l=255&u)),t(o,a,l))}function wn(n,t,e){var r,i,u=Math.min(n/=255,t/=255,e/=255),o=Math.max(n,t,e),a=o-u,l=(o+u)/2;return a?(i=.5>l?a/(o+u):a/(2-o-u),r=n==o?(t-e)/a+(e>t?6:0):t==o?(e-n)/a+2:(n-t)/a+4,r*=60):(r=NaN,i=l>0&&1>l?0:r),new ln(r,i,l)}function Sn(n,t,e){n=kn(n),t=kn(t),e=kn(e);var r=dn((.4124564*n+.3575761*t+.1804375*e)/na),i=dn((.2126729*n+.7151522*t+.072175*e)/ta),u=dn((.0193339*n+.119192*t+.9503041*e)/ea);return hn(116*i-16,500*(r-i),200*(i-u))}function kn(n){return(n/=255)<=.04045?n/12.92:Math.pow((n+.055)/1.055,2.4)}function Nn(n){var t=parseFloat(n);return\"%\"===n.charAt(n.length-1)?Math.round(2.55*t):t}function En(n){return\"function\"==typeof n?n:function(){return n}}function An(n){return function(t,e,r){return 2===arguments.length&&\"function\"==typeof e&&(r=e,e=null),Cn(t,e,n,r)}}function Cn(n,t,e,r){function i(){var n,t=l.status;if(!t&&Ln(l)||t>=200&&300>t||304===t){try{n=e.call(u,l)}catch(r){return void o.error.call(u,r)}o.load.call(u,n)}else o.error.call(u,l)}var u={},o=ao.dispatch(\"beforesend\",\"progress\",\"load\",\"error\"),a={},l=new XMLHttpRequest,c=null;return!this.XDomainRequest||\"withCredentials\"in l||!/^(http(s)?:)?\\/\\//.test(n)||(l=new XDomainRequest),\"onload\"in l?l.onload=l.onerror=i:l.onreadystatechange=function(){l.readyState>3&&i()},l.onprogress=function(n){var t=ao.event;ao.event=n;try{o.progress.call(u,l)}finally{ao.event=t}},u.header=function(n,t){return n=(n+\"\").toLowerCase(),arguments.length<2?a[n]:(null==t?delete a[n]:a[n]=t+\"\",u)},u.mimeType=function(n){return arguments.length?(t=null==n?null:n+\"\",u):t},u.responseType=function(n){return arguments.length?(c=n,u):c},u.response=function(n){return e=n,u},[\"get\",\"post\"].forEach(function(n){u[n]=function(){return u.send.apply(u,[n].concat(co(arguments)))}}),u.send=function(e,r,i){if(2===arguments.length&&\"function\"==typeof r&&(i=r,r=null),l.open(e,n,!0),null==t||\"accept\"in a||(a.accept=t+\",*/*\"),l.setRequestHeader)for(var f in a)l.setRequestHeader(f,a[f]);return null!=t&&l.overrideMimeType&&l.overrideMimeType(t),null!=c&&(l.responseType=c),null!=i&&u.on(\"error\",i).on(\"load\",function(n){i(null,n)}),o.beforesend.call(u,l),l.send(null==r?null:r),u},u.abort=function(){return l.abort(),u},ao.rebind(u,o,\"on\"),null==r?u:u.get(zn(r))}function zn(n){return 1===n.length?function(t,e){n(null==t?e:null)}:n}function Ln(n){var t=n.responseType;return t&&\"text\"!==t?n.response:n.responseText}function qn(n,t,e){var r=arguments.length;2>r&&(t=0),3>r&&(e=Date.now());var i=e+t,u={c:n,t:i,n:null};return aa?aa.n=u:oa=u,aa=u,la||(ca=clearTimeout(ca),la=1,fa(Tn)),u}function Tn(){var n=Rn(),t=Dn()-n;t>24?(isFinite(t)&&(clearTimeout(ca),ca=setTimeout(Tn,t)),la=0):(la=1,fa(Tn))}function Rn(){for(var n=Date.now(),t=oa;t;)n>=t.t&&t.c(n-t.t)&&(t.c=null),t=t.n;return n}function Dn(){for(var n,t=oa,e=1/0;t;)t.c?(t.t<e&&(e=t.t),t=(n=t).n):t=n?n.n=t.n:oa=t.n;return aa=n,e}function Pn(n,t){return t-(n?Math.ceil(Math.log(n)/Math.LN10):1)}function Un(n,t){var e=Math.pow(10,3*xo(8-t));return{scale:t>8?function(n){return n/e}:function(n){return n*e},symbol:n}}function jn(n){var t=n.decimal,e=n.thousands,r=n.grouping,i=n.currency,u=r&&e?function(n,t){for(var i=n.length,u=[],o=0,a=r[0],l=0;i>0&&a>0&&(l+a+1>t&&(a=Math.max(1,t-l)),u.push(n.substring(i-=a,i+a)),!((l+=a+1)>t));)a=r[o=(o+1)%r.length];return u.reverse().join(e)}:m;return function(n){var e=ha.exec(n),r=e[1]||\" \",o=e[2]||\">\",a=e[3]||\"-\",l=e[4]||\"\",c=e[5],f=+e[6],s=e[7],h=e[8],p=e[9],g=1,v=\"\",d=\"\",y=!1,m=!0;switch(h&&(h=+h.substring(1)),(c||\"0\"===r&&\"=\"===o)&&(c=r=\"0\",o=\"=\"),p){case\"n\":s=!0,p=\"g\";break;case\"%\":g=100,d=\"%\",p=\"f\";break;case\"p\":g=100,d=\"%\",p=\"r\";break;case\"b\":case\"o\":case\"x\":case\"X\":\"#\"===l&&(v=\"0\"+p.toLowerCase());case\"c\":m=!1;case\"d\":y=!0,h=0;break;case\"s\":g=-1,p=\"r\"}\"$\"===l&&(v=i[0],d=i[1]),\"r\"!=p||h||(p=\"g\"),null!=h&&(\"g\"==p?h=Math.max(1,Math.min(21,h)):\"e\"!=p&&\"f\"!=p||(h=Math.max(0,Math.min(20,h)))),p=pa.get(p)||Fn;var M=c&&s;return function(n){var e=d;if(y&&n%1)return\"\";var i=0>n||0===n&&0>1/n?(n=-n,\"-\"):\"-\"===a?\"\":a;if(0>g){var l=ao.formatPrefix(n,h);n=l.scale(n),e=l.symbol+d}else n*=g;n=p(n,h);var x,b,_=n.lastIndexOf(\".\");if(0>_){var w=m?n.lastIndexOf(\"e\"):-1;0>w?(x=n,b=\"\"):(x=n.substring(0,w),b=n.substring(w))}else x=n.substring(0,_),b=t+n.substring(_+1);!c&&s&&(x=u(x,1/0));var S=v.length+x.length+b.length+(M?0:i.length),k=f>S?new Array(S=f-S+1).join(r):\"\";return M&&(x=u(k+x,k.length?f-b.length:1/0)),i+=v,n=x+b,(\"<\"===o?i+n+k:\">\"===o?k+i+n:\"^\"===o?k.substring(0,S>>=1)+i+n+k.substring(S):i+(M?n:k+n))+e}}}function Fn(n){return n+\"\"}function Hn(){this._=new Date(arguments.length>1?Date.UTC.apply(this,arguments):arguments[0])}function On(n,t,e){function r(t){var e=n(t),r=u(e,1);return r-t>t-e?e:r}function i(e){return t(e=n(new va(e-1)),1),e}function u(n,e){return t(n=new va(+n),e),n}function o(n,r,u){var o=i(n),a=[];if(u>1)for(;r>o;)e(o)%u||a.push(new Date(+o)),t(o,1);else for(;r>o;)a.push(new Date(+o)),t(o,1);return a}function a(n,t,e){try{va=Hn;var r=new Hn;return r._=n,o(r,t,e)}finally{va=Date}}n.floor=n,n.round=r,n.ceil=i,n.offset=u,n.range=o;var l=n.utc=In(n);return l.floor=l,l.round=In(r),l.ceil=In(i),l.offset=In(u),l.range=a,n}function In(n){return function(t,e){try{va=Hn;var r=new Hn;return r._=t,n(r,e)._}finally{va=Date}}}function Yn(n){function t(n){function t(t){for(var e,i,u,o=[],a=-1,l=0;++a<r;)37===n.charCodeAt(a)&&(o.push(n.slice(l,a)),null!=(i=ya[e=n.charAt(++a)])&&(e=n.charAt(++a)),(u=A[e])&&(e=u(t,null==i?\"e\"===e?\" \":\"0\":i)),o.push(e),l=a+1);return o.push(n.slice(l,a)),o.join(\"\")}var r=n.length;return t.parse=function(t){var r={y:1900,m:0,d:1,H:0,M:0,S:0,L:0,Z:null},i=e(r,n,t,0);if(i!=t.length)return null;\"p\"in r&&(r.H=r.H%12+12*r.p);var u=null!=r.Z&&va!==Hn,o=new(u?Hn:va);return\"j\"in r?o.setFullYear(r.y,0,r.j):\"W\"in r||\"U\"in r?(\"w\"in r||(r.w=\"W\"in r?1:0),o.setFullYear(r.y,0,1),o.setFullYear(r.y,0,\"W\"in r?(r.w+6)%7+7*r.W-(o.getDay()+5)%7:r.w+7*r.U-(o.getDay()+6)%7)):o.setFullYear(r.y,r.m,r.d),o.setHours(r.H+(r.Z/100|0),r.M+r.Z%100,r.S,r.L),u?o._:o},t.toString=function(){return n},t}function e(n,t,e,r){for(var i,u,o,a=0,l=t.length,c=e.length;l>a;){if(r>=c)return-1;if(i=t.charCodeAt(a++),37===i){if(o=t.charAt(a++),u=C[o in ya?t.charAt(a++):o],!u||(r=u(n,e,r))<0)return-1}else if(i!=e.charCodeAt(r++))return-1}return r}function r(n,t,e){_.lastIndex=0;var r=_.exec(t.slice(e));return r?(n.w=w.get(r[0].toLowerCase()),e+r[0].length):-1}function i(n,t,e){x.lastIndex=0;var r=x.exec(t.slice(e));return r?(n.w=b.get(r[0].toLowerCase()),e+r[0].length):-1}function u(n,t,e){N.lastIndex=0;var r=N.exec(t.slice(e));return r?(n.m=E.get(r[0].toLowerCase()),e+r[0].length):-1}function o(n,t,e){S.lastIndex=0;var r=S.exec(t.slice(e));return r?(n.m=k.get(r[0].toLowerCase()),e+r[0].length):-1}function a(n,t,r){return e(n,A.c.toString(),t,r)}function l(n,t,r){return e(n,A.x.toString(),t,r)}function c(n,t,r){return e(n,A.X.toString(),t,r)}function f(n,t,e){var r=M.get(t.slice(e,e+=2).toLowerCase());return null==r?-1:(n.p=r,e)}var s=n.dateTime,h=n.date,p=n.time,g=n.periods,v=n.days,d=n.shortDays,y=n.months,m=n.shortMonths;t.utc=function(n){function e(n){try{va=Hn;var t=new va;return t._=n,r(t)}finally{va=Date}}var r=t(n);return e.parse=function(n){try{va=Hn;var t=r.parse(n);return t&&t._}finally{va=Date}},e.toString=r.toString,e},t.multi=t.utc.multi=ct;var M=ao.map(),x=Vn(v),b=Xn(v),_=Vn(d),w=Xn(d),S=Vn(y),k=Xn(y),N=Vn(m),E=Xn(m);g.forEach(function(n,t){M.set(n.toLowerCase(),t)});var A={a:function(n){return d[n.getDay()]},A:function(n){return v[n.getDay()]},b:function(n){return m[n.getMonth()]},B:function(n){return y[n.getMonth()]},c:t(s),d:function(n,t){return Zn(n.getDate(),t,2)},e:function(n,t){return Zn(n.getDate(),t,2)},H:function(n,t){return Zn(n.getHours(),t,2)},I:function(n,t){return Zn(n.getHours()%12||12,t,2)},j:function(n,t){return Zn(1+ga.dayOfYear(n),t,3)},L:function(n,t){return Zn(n.getMilliseconds(),t,3)},m:function(n,t){return Zn(n.getMonth()+1,t,2)},M:function(n,t){return Zn(n.getMinutes(),t,2)},p:function(n){return g[+(n.getHours()>=12)]},S:function(n,t){return Zn(n.getSeconds(),t,2)},U:function(n,t){return Zn(ga.sundayOfYear(n),t,2)},w:function(n){return n.getDay()},W:function(n,t){return Zn(ga.mondayOfYear(n),t,2)},x:t(h),X:t(p),y:function(n,t){return Zn(n.getFullYear()%100,t,2)},Y:function(n,t){return Zn(n.getFullYear()%1e4,t,4)},Z:at,\"%\":function(){return\"%\"}},C={a:r,A:i,b:u,B:o,c:a,d:tt,e:tt,H:rt,I:rt,j:et,L:ot,m:nt,M:it,p:f,S:ut,U:Bn,w:$n,W:Wn,x:l,X:c,y:Gn,Y:Jn,Z:Kn,\"%\":lt};return t}function Zn(n,t,e){var r=0>n?\"-\":\"\",i=(r?-n:n)+\"\",u=i.length;return r+(e>u?new Array(e-u+1).join(t)+i:i)}function Vn(n){return new RegExp(\"^(?:\"+n.map(ao.requote).join(\"|\")+\")\",\"i\")}function Xn(n){for(var t=new c,e=-1,r=n.length;++e<r;)t.set(n[e].toLowerCase(),e);return t}function $n(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+1));return r?(n.w=+r[0],e+r[0].length):-1}function Bn(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e));return r?(n.U=+r[0],e+r[0].length):-1}function Wn(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e));return r?(n.W=+r[0],e+r[0].length):-1}function Jn(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+4));return r?(n.y=+r[0],e+r[0].length):-1}function Gn(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.y=Qn(+r[0]),e+r[0].length):-1}function Kn(n,t,e){return/^[+-]\\d{4}$/.test(t=t.slice(e,e+5))?(n.Z=-t,e+5):-1}function Qn(n){return n+(n>68?1900:2e3)}function nt(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.m=r[0]-1,e+r[0].length):-1}function tt(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.d=+r[0],e+r[0].length):-1}function et(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+3));return r?(n.j=+r[0],e+r[0].length):-1}function rt(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.H=+r[0],e+r[0].length):-1}function it(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.M=+r[0],e+r[0].length):-1}function ut(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.S=+r[0],e+r[0].length):-1}function ot(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+3));return r?(n.L=+r[0],e+r[0].length):-1}function at(n){var t=n.getTimezoneOffset(),e=t>0?\"-\":\"+\",r=xo(t)/60|0,i=xo(t)%60;return e+Zn(r,\"0\",2)+Zn(i,\"0\",2)}function lt(n,t,e){Ma.lastIndex=0;var r=Ma.exec(t.slice(e,e+1));return r?e+r[0].length:-1}function ct(n){for(var t=n.length,e=-1;++e<t;)n[e][0]=this(n[e][0]);return function(t){for(var e=0,r=n[e];!r[1](t);)r=n[++e];return r[0](t)}}function ft(){}function st(n,t,e){var r=e.s=n+t,i=r-n,u=r-i;e.t=n-u+(t-i)}function ht(n,t){n&&wa.hasOwnProperty(n.type)&&wa[n.type](n,t)}function pt(n,t,e){var r,i=-1,u=n.length-e;for(t.lineStart();++i<u;)r=n[i],t.point(r[0],r[1],r[2]);t.lineEnd()}function gt(n,t){var e=-1,r=n.length;for(t.polygonStart();++e<r;)pt(n[e],t,1);t.polygonEnd()}function vt(){function n(n,t){n*=Yo,t=t*Yo/2+Fo/4;var e=n-r,o=e>=0?1:-1,a=o*e,l=Math.cos(t),c=Math.sin(t),f=u*c,s=i*l+f*Math.cos(a),h=f*o*Math.sin(a);ka.add(Math.atan2(h,s)),r=n,i=l,u=c}var t,e,r,i,u;Na.point=function(o,a){Na.point=n,r=(t=o)*Yo,i=Math.cos(a=(e=a)*Yo/2+Fo/4),u=Math.sin(a)},Na.lineEnd=function(){n(t,e)}}function dt(n){var t=n[0],e=n[1],r=Math.cos(e);return[r*Math.cos(t),r*Math.sin(t),Math.sin(e)]}function yt(n,t){return n[0]*t[0]+n[1]*t[1]+n[2]*t[2]}function mt(n,t){return[n[1]*t[2]-n[2]*t[1],n[2]*t[0]-n[0]*t[2],n[0]*t[1]-n[1]*t[0]]}function Mt(n,t){n[0]+=t[0],n[1]+=t[1],n[2]+=t[2]}function xt(n,t){return[n[0]*t,n[1]*t,n[2]*t]}function bt(n){var t=Math.sqrt(n[0]*n[0]+n[1]*n[1]+n[2]*n[2]);n[0]/=t,n[1]/=t,n[2]/=t}function _t(n){return[Math.atan2(n[1],n[0]),tn(n[2])]}function wt(n,t){return xo(n[0]-t[0])<Uo&&xo(n[1]-t[1])<Uo}function St(n,t){n*=Yo;var e=Math.cos(t*=Yo);kt(e*Math.cos(n),e*Math.sin(n),Math.sin(t))}function kt(n,t,e){++Ea,Ca+=(n-Ca)/Ea,za+=(t-za)/Ea,La+=(e-La)/Ea}function Nt(){function n(n,i){n*=Yo;var u=Math.cos(i*=Yo),o=u*Math.cos(n),a=u*Math.sin(n),l=Math.sin(i),c=Math.atan2(Math.sqrt((c=e*l-r*a)*c+(c=r*o-t*l)*c+(c=t*a-e*o)*c),t*o+e*a+r*l);Aa+=c,qa+=c*(t+(t=o)),Ta+=c*(e+(e=a)),Ra+=c*(r+(r=l)),kt(t,e,r)}var t,e,r;ja.point=function(i,u){i*=Yo;var o=Math.cos(u*=Yo);t=o*Math.cos(i),e=o*Math.sin(i),r=Math.sin(u),ja.point=n,kt(t,e,r)}}function Et(){ja.point=St}function At(){function n(n,t){n*=Yo;var e=Math.cos(t*=Yo),o=e*Math.cos(n),a=e*Math.sin(n),l=Math.sin(t),c=i*l-u*a,f=u*o-r*l,s=r*a-i*o,h=Math.sqrt(c*c+f*f+s*s),p=r*o+i*a+u*l,g=h&&-nn(p)/h,v=Math.atan2(h,p);Da+=g*c,Pa+=g*f,Ua+=g*s,Aa+=v,qa+=v*(r+(r=o)),Ta+=v*(i+(i=a)),Ra+=v*(u+(u=l)),kt(r,i,u)}var t,e,r,i,u;ja.point=function(o,a){t=o,e=a,ja.point=n,o*=Yo;var l=Math.cos(a*=Yo);r=l*Math.cos(o),i=l*Math.sin(o),u=Math.sin(a),kt(r,i,u)},ja.lineEnd=function(){n(t,e),ja.lineEnd=Et,ja.point=St}}function Ct(n,t){function e(e,r){return e=n(e,r),t(e[0],e[1])}return n.invert&&t.invert&&(e.invert=function(e,r){return e=t.invert(e,r),e&&n.invert(e[0],e[1])}),e}function zt(){return!0}function Lt(n,t,e,r,i){var u=[],o=[];if(n.forEach(function(n){if(!((t=n.length-1)<=0)){var t,e=n[0],r=n[t];if(wt(e,r)){i.lineStart();for(var a=0;t>a;++a)i.point((e=n[a])[0],e[1]);return void i.lineEnd()}var l=new Tt(e,n,null,!0),c=new Tt(e,null,l,!1);l.o=c,u.push(l),o.push(c),l=new Tt(r,n,null,!1),c=new Tt(r,null,l,!0),l.o=c,u.push(l),o.push(c)}}),o.sort(t),qt(u),qt(o),u.length){for(var a=0,l=e,c=o.length;c>a;++a)o[a].e=l=!l;for(var f,s,h=u[0];;){for(var p=h,g=!0;p.v;)if((p=p.n)===h)return;f=p.z,i.lineStart();do{if(p.v=p.o.v=!0,p.e){if(g)for(var a=0,c=f.length;c>a;++a)i.point((s=f[a])[0],s[1]);else r(p.x,p.n.x,1,i);p=p.n}else{if(g){f=p.p.z;for(var a=f.length-1;a>=0;--a)i.point((s=f[a])[0],s[1])}else r(p.x,p.p.x,-1,i);p=p.p}p=p.o,f=p.z,g=!g}while(!p.v);i.lineEnd()}}}function qt(n){if(t=n.length){for(var t,e,r=0,i=n[0];++r<t;)i.n=e=n[r],e.p=i,i=e;i.n=e=n[0],e.p=i}}function Tt(n,t,e,r){this.x=n,this.z=t,this.o=e,this.e=r,this.v=!1,this.n=this.p=null}function Rt(n,t,e,r){return function(i,u){function o(t,e){var r=i(t,e);n(t=r[0],e=r[1])&&u.point(t,e)}function a(n,t){var e=i(n,t);d.point(e[0],e[1])}function l(){m.point=a,d.lineStart()}function c(){m.point=o,d.lineEnd()}function f(n,t){v.push([n,t]);var e=i(n,t);x.point(e[0],e[1])}function s(){x.lineStart(),v=[]}function h(){f(v[0][0],v[0][1]),x.lineEnd();var n,t=x.clean(),e=M.buffer(),r=e.length;if(v.pop(),g.push(v),v=null,r)if(1&t){n=e[0];var i,r=n.length-1,o=-1;if(r>0){for(b||(u.polygonStart(),b=!0),u.lineStart();++o<r;)u.point((i=n[o])[0],i[1]);u.lineEnd()}}else r>1&&2&t&&e.push(e.pop().concat(e.shift())),p.push(e.filter(Dt))}var p,g,v,d=t(u),y=i.invert(r[0],r[1]),m={point:o,lineStart:l,lineEnd:c,polygonStart:function(){m.point=f,m.lineStart=s,m.lineEnd=h,p=[],g=[]},polygonEnd:function(){m.point=o,m.lineStart=l,m.lineEnd=c,p=ao.merge(p);var n=Ot(y,g);p.length?(b||(u.polygonStart(),b=!0),Lt(p,Ut,n,e,u)):n&&(b||(u.polygonStart(),b=!0),u.lineStart(),e(null,null,1,u),u.lineEnd()),b&&(u.polygonEnd(),b=!1),p=g=null},sphere:function(){u.polygonStart(),u.lineStart(),e(null,null,1,u),u.lineEnd(),u.polygonEnd()}},M=Pt(),x=t(M),b=!1;return m}}function Dt(n){return n.length>1}function Pt(){var n,t=[];return{lineStart:function(){t.push(n=[])},point:function(t,e){n.push([t,e])},lineEnd:b,buffer:function(){var e=t;return t=[],n=null,e},rejoin:function(){t.length>1&&t.push(t.pop().concat(t.shift()))}}}function Ut(n,t){return((n=n.x)[0]<0?n[1]-Io-Uo:Io-n[1])-((t=t.x)[0]<0?t[1]-Io-Uo:Io-t[1])}function jt(n){var t,e=NaN,r=NaN,i=NaN;return{lineStart:function(){n.lineStart(),t=1},point:function(u,o){var a=u>0?Fo:-Fo,l=xo(u-e);xo(l-Fo)<Uo?(n.point(e,r=(r+o)/2>0?Io:-Io),n.point(i,r),n.lineEnd(),n.lineStart(),n.point(a,r),n.point(u,r),t=0):i!==a&&l>=Fo&&(xo(e-i)<Uo&&(e-=i*Uo),xo(u-a)<Uo&&(u-=a*Uo),r=Ft(e,r,u,o),n.point(i,r),n.lineEnd(),n.lineStart(),n.point(a,r),t=0),n.point(e=u,r=o),i=a},lineEnd:function(){n.lineEnd(),e=r=NaN},clean:function(){return 2-t}}}function Ft(n,t,e,r){var i,u,o=Math.sin(n-e);return xo(o)>Uo?Math.atan((Math.sin(t)*(u=Math.cos(r))*Math.sin(e)-Math.sin(r)*(i=Math.cos(t))*Math.sin(n))/(i*u*o)):(t+r)/2}function Ht(n,t,e,r){var i;if(null==n)i=e*Io,r.point(-Fo,i),r.point(0,i),r.point(Fo,i),r.point(Fo,0),r.point(Fo,-i),r.point(0,-i),r.point(-Fo,-i),r.point(-Fo,0),r.point(-Fo,i);else if(xo(n[0]-t[0])>Uo){var u=n[0]<t[0]?Fo:-Fo;i=e*u/2,r.point(-u,i),r.point(0,i),r.point(u,i)}else r.point(t[0],t[1])}function Ot(n,t){var e=n[0],r=n[1],i=[Math.sin(e),-Math.cos(e),0],u=0,o=0;ka.reset();for(var a=0,l=t.length;l>a;++a){var c=t[a],f=c.length;if(f)for(var s=c[0],h=s[0],p=s[1]/2+Fo/4,g=Math.sin(p),v=Math.cos(p),d=1;;){d===f&&(d=0),n=c[d];var y=n[0],m=n[1]/2+Fo/4,M=Math.sin(m),x=Math.cos(m),b=y-h,_=b>=0?1:-1,w=_*b,S=w>Fo,k=g*M;if(ka.add(Math.atan2(k*_*Math.sin(w),v*x+k*Math.cos(w))),u+=S?b+_*Ho:b,S^h>=e^y>=e){var N=mt(dt(s),dt(n));bt(N);var E=mt(i,N);bt(E);var A=(S^b>=0?-1:1)*tn(E[2]);(r>A||r===A&&(N[0]||N[1]))&&(o+=S^b>=0?1:-1)}if(!d++)break;h=y,g=M,v=x,s=n}}return(-Uo>u||Uo>u&&-Uo>ka)^1&o}function It(n){function t(n,t){return Math.cos(n)*Math.cos(t)>u}function e(n){var e,u,l,c,f;return{lineStart:function(){c=l=!1,f=1},point:function(s,h){var p,g=[s,h],v=t(s,h),d=o?v?0:i(s,h):v?i(s+(0>s?Fo:-Fo),h):0;if(!e&&(c=l=v)&&n.lineStart(),v!==l&&(p=r(e,g),(wt(e,p)||wt(g,p))&&(g[0]+=Uo,g[1]+=Uo,v=t(g[0],g[1]))),v!==l)f=0,v?(n.lineStart(),p=r(g,e),n.point(p[0],p[1])):(p=r(e,g),n.point(p[0],p[1]),n.lineEnd()),e=p;else if(a&&e&&o^v){var y;d&u||!(y=r(g,e,!0))||(f=0,o?(n.lineStart(),n.point(y[0][0],y[0][1]),n.point(y[1][0],y[1][1]),n.lineEnd()):(n.point(y[1][0],y[1][1]),n.lineEnd(),n.lineStart(),n.point(y[0][0],y[0][1])))}!v||e&&wt(e,g)||n.point(g[0],g[1]),e=g,l=v,u=d},lineEnd:function(){l&&n.lineEnd(),e=null},clean:function(){return f|(c&&l)<<1}}}function r(n,t,e){var r=dt(n),i=dt(t),o=[1,0,0],a=mt(r,i),l=yt(a,a),c=a[0],f=l-c*c;if(!f)return!e&&n;var s=u*l/f,h=-u*c/f,p=mt(o,a),g=xt(o,s),v=xt(a,h);Mt(g,v);var d=p,y=yt(g,d),m=yt(d,d),M=y*y-m*(yt(g,g)-1);if(!(0>M)){var x=Math.sqrt(M),b=xt(d,(-y-x)/m);if(Mt(b,g),b=_t(b),!e)return b;var _,w=n[0],S=t[0],k=n[1],N=t[1];w>S&&(_=w,w=S,S=_);var E=S-w,A=xo(E-Fo)<Uo,C=A||Uo>E;if(!A&&k>N&&(_=k,k=N,N=_),C?A?k+N>0^b[1]<(xo(b[0]-w)<Uo?k:N):k<=b[1]&&b[1]<=N:E>Fo^(w<=b[0]&&b[0]<=S)){var z=xt(d,(-y+x)/m);return Mt(z,g),[b,_t(z)]}}}function i(t,e){var r=o?n:Fo-n,i=0;return-r>t?i|=1:t>r&&(i|=2),-r>e?i|=4:e>r&&(i|=8),i}var u=Math.cos(n),o=u>0,a=xo(u)>Uo,l=ve(n,6*Yo);return Rt(t,e,l,o?[0,-n]:[-Fo,n-Fo])}function Yt(n,t,e,r){return function(i){var u,o=i.a,a=i.b,l=o.x,c=o.y,f=a.x,s=a.y,h=0,p=1,g=f-l,v=s-c;if(u=n-l,g||!(u>0)){if(u/=g,0>g){if(h>u)return;p>u&&(p=u)}else if(g>0){if(u>p)return;u>h&&(h=u)}if(u=e-l,g||!(0>u)){if(u/=g,0>g){if(u>p)return;u>h&&(h=u)}else if(g>0){if(h>u)return;p>u&&(p=u)}if(u=t-c,v||!(u>0)){if(u/=v,0>v){if(h>u)return;p>u&&(p=u)}else if(v>0){if(u>p)return;u>h&&(h=u)}if(u=r-c,v||!(0>u)){if(u/=v,0>v){if(u>p)return;u>h&&(h=u)}else if(v>0){if(h>u)return;p>u&&(p=u)}return h>0&&(i.a={x:l+h*g,y:c+h*v}),1>p&&(i.b={x:l+p*g,y:c+p*v}),i}}}}}}function Zt(n,t,e,r){function i(r,i){return xo(r[0]-n)<Uo?i>0?0:3:xo(r[0]-e)<Uo?i>0?2:1:xo(r[1]-t)<Uo?i>0?1:0:i>0?3:2}function u(n,t){return o(n.x,t.x)}function o(n,t){var e=i(n,1),r=i(t,1);return e!==r?e-r:0===e?t[1]-n[1]:1===e?n[0]-t[0]:2===e?n[1]-t[1]:t[0]-n[0]}return function(a){function l(n){for(var t=0,e=d.length,r=n[1],i=0;e>i;++i)for(var u,o=1,a=d[i],l=a.length,c=a[0];l>o;++o)u=a[o],c[1]<=r?u[1]>r&&Q(c,u,n)>0&&++t:u[1]<=r&&Q(c,u,n)<0&&--t,c=u;return 0!==t}function c(u,a,l,c){var f=0,s=0;if(null==u||(f=i(u,l))!==(s=i(a,l))||o(u,a)<0^l>0){do c.point(0===f||3===f?n:e,f>1?r:t);while((f=(f+l+4)%4)!==s)}else c.point(a[0],a[1])}function f(i,u){return i>=n&&e>=i&&u>=t&&r>=u}function s(n,t){f(n,t)&&a.point(n,t)}function h(){C.point=g,d&&d.push(y=[]),S=!0,w=!1,b=_=NaN}function p(){v&&(g(m,M),x&&w&&E.rejoin(),v.push(E.buffer())),C.point=s,w&&a.lineEnd()}function g(n,t){n=Math.max(-Ha,Math.min(Ha,n)),t=Math.max(-Ha,Math.min(Ha,t));var e=f(n,t);if(d&&y.push([n,t]),S)m=n,M=t,x=e,S=!1,e&&(a.lineStart(),a.point(n,t));else if(e&&w)a.point(n,t);else{var r={a:{x:b,y:_},b:{x:n,y:t}};A(r)?(w||(a.lineStart(),a.point(r.a.x,r.a.y)),a.point(r.b.x,r.b.y),e||a.lineEnd(),k=!1):e&&(a.lineStart(),a.point(n,t),k=!1)}b=n,_=t,w=e}var v,d,y,m,M,x,b,_,w,S,k,N=a,E=Pt(),A=Yt(n,t,e,r),C={point:s,lineStart:h,lineEnd:p,polygonStart:function(){a=E,v=[],d=[],k=!0},polygonEnd:function(){a=N,v=ao.merge(v);var t=l([n,r]),e=k&&t,i=v.length;(e||i)&&(a.polygonStart(),e&&(a.lineStart(),c(null,null,1,a),a.lineEnd()),i&&Lt(v,u,t,c,a),a.polygonEnd()),v=d=y=null}};return C}}function Vt(n){var t=0,e=Fo/3,r=ae(n),i=r(t,e);return i.parallels=function(n){return arguments.length?r(t=n[0]*Fo/180,e=n[1]*Fo/180):[t/Fo*180,e/Fo*180]},i}function Xt(n,t){function e(n,t){var e=Math.sqrt(u-2*i*Math.sin(t))/i;return[e*Math.sin(n*=i),o-e*Math.cos(n)]}var r=Math.sin(n),i=(r+Math.sin(t))/2,u=1+r*(2*i-r),o=Math.sqrt(u)/i;return e.invert=function(n,t){var e=o-t;return[Math.atan2(n,e)/i,tn((u-(n*n+e*e)*i*i)/(2*i))]},e}function $t(){function n(n,t){Ia+=i*n-r*t,r=n,i=t}var t,e,r,i;$a.point=function(u,o){$a.point=n,t=r=u,e=i=o},$a.lineEnd=function(){n(t,e)}}function Bt(n,t){Ya>n&&(Ya=n),n>Va&&(Va=n),Za>t&&(Za=t),t>Xa&&(Xa=t)}function Wt(){function n(n,t){o.push(\"M\",n,\",\",t,u)}function t(n,t){o.push(\"M\",n,\",\",t),a.point=e}function e(n,t){o.push(\"L\",n,\",\",t)}function r(){a.point=n}function i(){o.push(\"Z\")}var u=Jt(4.5),o=[],a={point:n,lineStart:function(){a.point=t},lineEnd:r,polygonStart:function(){a.lineEnd=i},polygonEnd:function(){a.lineEnd=r,a.point=n},pointRadius:function(n){return u=Jt(n),a},result:function(){if(o.length){var n=o.join(\"\");return o=[],n}}};return a}function Jt(n){return\"m0,\"+n+\"a\"+n+\",\"+n+\" 0 1,1 0,\"+-2*n+\"a\"+n+\",\"+n+\" 0 1,1 0,\"+2*n+\"z\"}function Gt(n,t){Ca+=n,za+=t,++La}function Kt(){function n(n,r){var i=n-t,u=r-e,o=Math.sqrt(i*i+u*u);qa+=o*(t+n)/2,Ta+=o*(e+r)/2,Ra+=o,Gt(t=n,e=r)}var t,e;Wa.point=function(r,i){Wa.point=n,Gt(t=r,e=i)}}function Qt(){Wa.point=Gt}function ne(){function n(n,t){var e=n-r,u=t-i,o=Math.sqrt(e*e+u*u);qa+=o*(r+n)/2,Ta+=o*(i+t)/2,Ra+=o,o=i*n-r*t,Da+=o*(r+n),Pa+=o*(i+t),Ua+=3*o,Gt(r=n,i=t)}var t,e,r,i;Wa.point=function(u,o){Wa.point=n,Gt(t=r=u,e=i=o)},Wa.lineEnd=function(){n(t,e)}}function te(n){function t(t,e){n.moveTo(t+o,e),n.arc(t,e,o,0,Ho)}function e(t,e){n.moveTo(t,e),a.point=r}function r(t,e){n.lineTo(t,e)}function i(){a.point=t}function u(){n.closePath()}var o=4.5,a={point:t,lineStart:function(){a.point=e},lineEnd:i,polygonStart:function(){a.lineEnd=u},polygonEnd:function(){a.lineEnd=i,a.point=t},pointRadius:function(n){return o=n,a},result:b};return a}function ee(n){function t(n){return(a?r:e)(n)}function e(t){return ue(t,function(e,r){e=n(e,r),t.point(e[0],e[1])})}function r(t){function e(e,r){e=n(e,r),t.point(e[0],e[1])}function r(){M=NaN,S.point=u,t.lineStart()}function u(e,r){var u=dt([e,r]),o=n(e,r);i(M,x,m,b,_,w,M=o[0],x=o[1],m=e,b=u[0],_=u[1],w=u[2],a,t),t.point(M,x)}function o(){S.point=e,t.lineEnd()}function l(){\nr(),S.point=c,S.lineEnd=f}function c(n,t){u(s=n,h=t),p=M,g=x,v=b,d=_,y=w,S.point=u}function f(){i(M,x,m,b,_,w,p,g,s,v,d,y,a,t),S.lineEnd=o,o()}var s,h,p,g,v,d,y,m,M,x,b,_,w,S={point:e,lineStart:r,lineEnd:o,polygonStart:function(){t.polygonStart(),S.lineStart=l},polygonEnd:function(){t.polygonEnd(),S.lineStart=r}};return S}function i(t,e,r,a,l,c,f,s,h,p,g,v,d,y){var m=f-t,M=s-e,x=m*m+M*M;if(x>4*u&&d--){var b=a+p,_=l+g,w=c+v,S=Math.sqrt(b*b+_*_+w*w),k=Math.asin(w/=S),N=xo(xo(w)-1)<Uo||xo(r-h)<Uo?(r+h)/2:Math.atan2(_,b),E=n(N,k),A=E[0],C=E[1],z=A-t,L=C-e,q=M*z-m*L;(q*q/x>u||xo((m*z+M*L)/x-.5)>.3||o>a*p+l*g+c*v)&&(i(t,e,r,a,l,c,A,C,N,b/=S,_/=S,w,d,y),y.point(A,C),i(A,C,N,b,_,w,f,s,h,p,g,v,d,y))}}var u=.5,o=Math.cos(30*Yo),a=16;return t.precision=function(n){return arguments.length?(a=(u=n*n)>0&&16,t):Math.sqrt(u)},t}function re(n){var t=ee(function(t,e){return n([t*Zo,e*Zo])});return function(n){return le(t(n))}}function ie(n){this.stream=n}function ue(n,t){return{point:t,sphere:function(){n.sphere()},lineStart:function(){n.lineStart()},lineEnd:function(){n.lineEnd()},polygonStart:function(){n.polygonStart()},polygonEnd:function(){n.polygonEnd()}}}function oe(n){return ae(function(){return n})()}function ae(n){function t(n){return n=a(n[0]*Yo,n[1]*Yo),[n[0]*h+l,c-n[1]*h]}function e(n){return n=a.invert((n[0]-l)/h,(c-n[1])/h),n&&[n[0]*Zo,n[1]*Zo]}function r(){a=Ct(o=se(y,M,x),u);var n=u(v,d);return l=p-n[0]*h,c=g+n[1]*h,i()}function i(){return f&&(f.valid=!1,f=null),t}var u,o,a,l,c,f,s=ee(function(n,t){return n=u(n,t),[n[0]*h+l,c-n[1]*h]}),h=150,p=480,g=250,v=0,d=0,y=0,M=0,x=0,b=Fa,_=m,w=null,S=null;return t.stream=function(n){return f&&(f.valid=!1),f=le(b(o,s(_(n)))),f.valid=!0,f},t.clipAngle=function(n){return arguments.length?(b=null==n?(w=n,Fa):It((w=+n)*Yo),i()):w},t.clipExtent=function(n){return arguments.length?(S=n,_=n?Zt(n[0][0],n[0][1],n[1][0],n[1][1]):m,i()):S},t.scale=function(n){return arguments.length?(h=+n,r()):h},t.translate=function(n){return arguments.length?(p=+n[0],g=+n[1],r()):[p,g]},t.center=function(n){return arguments.length?(v=n[0]%360*Yo,d=n[1]%360*Yo,r()):[v*Zo,d*Zo]},t.rotate=function(n){return arguments.length?(y=n[0]%360*Yo,M=n[1]%360*Yo,x=n.length>2?n[2]%360*Yo:0,r()):[y*Zo,M*Zo,x*Zo]},ao.rebind(t,s,\"precision\"),function(){return u=n.apply(this,arguments),t.invert=u.invert&&e,r()}}function le(n){return ue(n,function(t,e){n.point(t*Yo,e*Yo)})}function ce(n,t){return[n,t]}function fe(n,t){return[n>Fo?n-Ho:-Fo>n?n+Ho:n,t]}function se(n,t,e){return n?t||e?Ct(pe(n),ge(t,e)):pe(n):t||e?ge(t,e):fe}function he(n){return function(t,e){return t+=n,[t>Fo?t-Ho:-Fo>t?t+Ho:t,e]}}function pe(n){var t=he(n);return t.invert=he(-n),t}function ge(n,t){function e(n,t){var e=Math.cos(t),a=Math.cos(n)*e,l=Math.sin(n)*e,c=Math.sin(t),f=c*r+a*i;return[Math.atan2(l*u-f*o,a*r-c*i),tn(f*u+l*o)]}var r=Math.cos(n),i=Math.sin(n),u=Math.cos(t),o=Math.sin(t);return e.invert=function(n,t){var e=Math.cos(t),a=Math.cos(n)*e,l=Math.sin(n)*e,c=Math.sin(t),f=c*u-l*o;return[Math.atan2(l*u+c*o,a*r+f*i),tn(f*r-a*i)]},e}function ve(n,t){var e=Math.cos(n),r=Math.sin(n);return function(i,u,o,a){var l=o*t;null!=i?(i=de(e,i),u=de(e,u),(o>0?u>i:i>u)&&(i+=o*Ho)):(i=n+o*Ho,u=n-.5*l);for(var c,f=i;o>0?f>u:u>f;f-=l)a.point((c=_t([e,-r*Math.cos(f),-r*Math.sin(f)]))[0],c[1])}}function de(n,t){var e=dt(t);e[0]-=n,bt(e);var r=nn(-e[1]);return((-e[2]<0?-r:r)+2*Math.PI-Uo)%(2*Math.PI)}function ye(n,t,e){var r=ao.range(n,t-Uo,e).concat(t);return function(n){return r.map(function(t){return[n,t]})}}function me(n,t,e){var r=ao.range(n,t-Uo,e).concat(t);return function(n){return r.map(function(t){return[t,n]})}}function Me(n){return n.source}function xe(n){return n.target}function be(n,t,e,r){var i=Math.cos(t),u=Math.sin(t),o=Math.cos(r),a=Math.sin(r),l=i*Math.cos(n),c=i*Math.sin(n),f=o*Math.cos(e),s=o*Math.sin(e),h=2*Math.asin(Math.sqrt(on(r-t)+i*o*on(e-n))),p=1/Math.sin(h),g=h?function(n){var t=Math.sin(n*=h)*p,e=Math.sin(h-n)*p,r=e*l+t*f,i=e*c+t*s,o=e*u+t*a;return[Math.atan2(i,r)*Zo,Math.atan2(o,Math.sqrt(r*r+i*i))*Zo]}:function(){return[n*Zo,t*Zo]};return g.distance=h,g}function _e(){function n(n,i){var u=Math.sin(i*=Yo),o=Math.cos(i),a=xo((n*=Yo)-t),l=Math.cos(a);Ja+=Math.atan2(Math.sqrt((a=o*Math.sin(a))*a+(a=r*u-e*o*l)*a),e*u+r*o*l),t=n,e=u,r=o}var t,e,r;Ga.point=function(i,u){t=i*Yo,e=Math.sin(u*=Yo),r=Math.cos(u),Ga.point=n},Ga.lineEnd=function(){Ga.point=Ga.lineEnd=b}}function we(n,t){function e(t,e){var r=Math.cos(t),i=Math.cos(e),u=n(r*i);return[u*i*Math.sin(t),u*Math.sin(e)]}return e.invert=function(n,e){var r=Math.sqrt(n*n+e*e),i=t(r),u=Math.sin(i),o=Math.cos(i);return[Math.atan2(n*u,r*o),Math.asin(r&&e*u/r)]},e}function Se(n,t){function e(n,t){o>0?-Io+Uo>t&&(t=-Io+Uo):t>Io-Uo&&(t=Io-Uo);var e=o/Math.pow(i(t),u);return[e*Math.sin(u*n),o-e*Math.cos(u*n)]}var r=Math.cos(n),i=function(n){return Math.tan(Fo/4+n/2)},u=n===t?Math.sin(n):Math.log(r/Math.cos(t))/Math.log(i(t)/i(n)),o=r*Math.pow(i(n),u)/u;return u?(e.invert=function(n,t){var e=o-t,r=K(u)*Math.sqrt(n*n+e*e);return[Math.atan2(n,e)/u,2*Math.atan(Math.pow(o/r,1/u))-Io]},e):Ne}function ke(n,t){function e(n,t){var e=u-t;return[e*Math.sin(i*n),u-e*Math.cos(i*n)]}var r=Math.cos(n),i=n===t?Math.sin(n):(r-Math.cos(t))/(t-n),u=r/i+n;return xo(i)<Uo?ce:(e.invert=function(n,t){var e=u-t;return[Math.atan2(n,e)/i,u-K(i)*Math.sqrt(n*n+e*e)]},e)}function Ne(n,t){return[n,Math.log(Math.tan(Fo/4+t/2))]}function Ee(n){var t,e=oe(n),r=e.scale,i=e.translate,u=e.clipExtent;return e.scale=function(){var n=r.apply(e,arguments);return n===e?t?e.clipExtent(null):e:n},e.translate=function(){var n=i.apply(e,arguments);return n===e?t?e.clipExtent(null):e:n},e.clipExtent=function(n){var o=u.apply(e,arguments);if(o===e){if(t=null==n){var a=Fo*r(),l=i();u([[l[0]-a,l[1]-a],[l[0]+a,l[1]+a]])}}else t&&(o=null);return o},e.clipExtent(null)}function Ae(n,t){return[Math.log(Math.tan(Fo/4+t/2)),-n]}function Ce(n){return n[0]}function ze(n){return n[1]}function Le(n){for(var t=n.length,e=[0,1],r=2,i=2;t>i;i++){for(;r>1&&Q(n[e[r-2]],n[e[r-1]],n[i])<=0;)--r;e[r++]=i}return e.slice(0,r)}function qe(n,t){return n[0]-t[0]||n[1]-t[1]}function Te(n,t,e){return(e[0]-t[0])*(n[1]-t[1])<(e[1]-t[1])*(n[0]-t[0])}function Re(n,t,e,r){var i=n[0],u=e[0],o=t[0]-i,a=r[0]-u,l=n[1],c=e[1],f=t[1]-l,s=r[1]-c,h=(a*(l-c)-s*(i-u))/(s*o-a*f);return[i+h*o,l+h*f]}function De(n){var t=n[0],e=n[n.length-1];return!(t[0]-e[0]||t[1]-e[1])}function Pe(){rr(this),this.edge=this.site=this.circle=null}function Ue(n){var t=cl.pop()||new Pe;return t.site=n,t}function je(n){Be(n),ol.remove(n),cl.push(n),rr(n)}function Fe(n){var t=n.circle,e=t.x,r=t.cy,i={x:e,y:r},u=n.P,o=n.N,a=[n];je(n);for(var l=u;l.circle&&xo(e-l.circle.x)<Uo&&xo(r-l.circle.cy)<Uo;)u=l.P,a.unshift(l),je(l),l=u;a.unshift(l),Be(l);for(var c=o;c.circle&&xo(e-c.circle.x)<Uo&&xo(r-c.circle.cy)<Uo;)o=c.N,a.push(c),je(c),c=o;a.push(c),Be(c);var f,s=a.length;for(f=1;s>f;++f)c=a[f],l=a[f-1],nr(c.edge,l.site,c.site,i);l=a[0],c=a[s-1],c.edge=Ke(l.site,c.site,null,i),$e(l),$e(c)}function He(n){for(var t,e,r,i,u=n.x,o=n.y,a=ol._;a;)if(r=Oe(a,o)-u,r>Uo)a=a.L;else{if(i=u-Ie(a,o),!(i>Uo)){r>-Uo?(t=a.P,e=a):i>-Uo?(t=a,e=a.N):t=e=a;break}if(!a.R){t=a;break}a=a.R}var l=Ue(n);if(ol.insert(t,l),t||e){if(t===e)return Be(t),e=Ue(t.site),ol.insert(l,e),l.edge=e.edge=Ke(t.site,l.site),$e(t),void $e(e);if(!e)return void(l.edge=Ke(t.site,l.site));Be(t),Be(e);var c=t.site,f=c.x,s=c.y,h=n.x-f,p=n.y-s,g=e.site,v=g.x-f,d=g.y-s,y=2*(h*d-p*v),m=h*h+p*p,M=v*v+d*d,x={x:(d*m-p*M)/y+f,y:(h*M-v*m)/y+s};nr(e.edge,c,g,x),l.edge=Ke(c,n,null,x),e.edge=Ke(n,g,null,x),$e(t),$e(e)}}function Oe(n,t){var e=n.site,r=e.x,i=e.y,u=i-t;if(!u)return r;var o=n.P;if(!o)return-(1/0);e=o.site;var a=e.x,l=e.y,c=l-t;if(!c)return a;var f=a-r,s=1/u-1/c,h=f/c;return s?(-h+Math.sqrt(h*h-2*s*(f*f/(-2*c)-l+c/2+i-u/2)))/s+r:(r+a)/2}function Ie(n,t){var e=n.N;if(e)return Oe(e,t);var r=n.site;return r.y===t?r.x:1/0}function Ye(n){this.site=n,this.edges=[]}function Ze(n){for(var t,e,r,i,u,o,a,l,c,f,s=n[0][0],h=n[1][0],p=n[0][1],g=n[1][1],v=ul,d=v.length;d--;)if(u=v[d],u&&u.prepare())for(a=u.edges,l=a.length,o=0;l>o;)f=a[o].end(),r=f.x,i=f.y,c=a[++o%l].start(),t=c.x,e=c.y,(xo(r-t)>Uo||xo(i-e)>Uo)&&(a.splice(o,0,new tr(Qe(u.site,f,xo(r-s)<Uo&&g-i>Uo?{x:s,y:xo(t-s)<Uo?e:g}:xo(i-g)<Uo&&h-r>Uo?{x:xo(e-g)<Uo?t:h,y:g}:xo(r-h)<Uo&&i-p>Uo?{x:h,y:xo(t-h)<Uo?e:p}:xo(i-p)<Uo&&r-s>Uo?{x:xo(e-p)<Uo?t:s,y:p}:null),u.site,null)),++l)}function Ve(n,t){return t.angle-n.angle}function Xe(){rr(this),this.x=this.y=this.arc=this.site=this.cy=null}function $e(n){var t=n.P,e=n.N;if(t&&e){var r=t.site,i=n.site,u=e.site;if(r!==u){var o=i.x,a=i.y,l=r.x-o,c=r.y-a,f=u.x-o,s=u.y-a,h=2*(l*s-c*f);if(!(h>=-jo)){var p=l*l+c*c,g=f*f+s*s,v=(s*p-c*g)/h,d=(l*g-f*p)/h,s=d+a,y=fl.pop()||new Xe;y.arc=n,y.site=i,y.x=v+o,y.y=s+Math.sqrt(v*v+d*d),y.cy=s,n.circle=y;for(var m=null,M=ll._;M;)if(y.y<M.y||y.y===M.y&&y.x<=M.x){if(!M.L){m=M.P;break}M=M.L}else{if(!M.R){m=M;break}M=M.R}ll.insert(m,y),m||(al=y)}}}}function Be(n){var t=n.circle;t&&(t.P||(al=t.N),ll.remove(t),fl.push(t),rr(t),n.circle=null)}function We(n){for(var t,e=il,r=Yt(n[0][0],n[0][1],n[1][0],n[1][1]),i=e.length;i--;)t=e[i],(!Je(t,n)||!r(t)||xo(t.a.x-t.b.x)<Uo&&xo(t.a.y-t.b.y)<Uo)&&(t.a=t.b=null,e.splice(i,1))}function Je(n,t){var e=n.b;if(e)return!0;var r,i,u=n.a,o=t[0][0],a=t[1][0],l=t[0][1],c=t[1][1],f=n.l,s=n.r,h=f.x,p=f.y,g=s.x,v=s.y,d=(h+g)/2,y=(p+v)/2;if(v===p){if(o>d||d>=a)return;if(h>g){if(u){if(u.y>=c)return}else u={x:d,y:l};e={x:d,y:c}}else{if(u){if(u.y<l)return}else u={x:d,y:c};e={x:d,y:l}}}else if(r=(h-g)/(v-p),i=y-r*d,-1>r||r>1)if(h>g){if(u){if(u.y>=c)return}else u={x:(l-i)/r,y:l};e={x:(c-i)/r,y:c}}else{if(u){if(u.y<l)return}else u={x:(c-i)/r,y:c};e={x:(l-i)/r,y:l}}else if(v>p){if(u){if(u.x>=a)return}else u={x:o,y:r*o+i};e={x:a,y:r*a+i}}else{if(u){if(u.x<o)return}else u={x:a,y:r*a+i};e={x:o,y:r*o+i}}return n.a=u,n.b=e,!0}function Ge(n,t){this.l=n,this.r=t,this.a=this.b=null}function Ke(n,t,e,r){var i=new Ge(n,t);return il.push(i),e&&nr(i,n,t,e),r&&nr(i,t,n,r),ul[n.i].edges.push(new tr(i,n,t)),ul[t.i].edges.push(new tr(i,t,n)),i}function Qe(n,t,e){var r=new Ge(n,null);return r.a=t,r.b=e,il.push(r),r}function nr(n,t,e,r){n.a||n.b?n.l===e?n.b=r:n.a=r:(n.a=r,n.l=t,n.r=e)}function tr(n,t,e){var r=n.a,i=n.b;this.edge=n,this.site=t,this.angle=e?Math.atan2(e.y-t.y,e.x-t.x):n.l===t?Math.atan2(i.x-r.x,r.y-i.y):Math.atan2(r.x-i.x,i.y-r.y)}function er(){this._=null}function rr(n){n.U=n.C=n.L=n.R=n.P=n.N=null}function ir(n,t){var e=t,r=t.R,i=e.U;i?i.L===e?i.L=r:i.R=r:n._=r,r.U=i,e.U=r,e.R=r.L,e.R&&(e.R.U=e),r.L=e}function ur(n,t){var e=t,r=t.L,i=e.U;i?i.L===e?i.L=r:i.R=r:n._=r,r.U=i,e.U=r,e.L=r.R,e.L&&(e.L.U=e),r.R=e}function or(n){for(;n.L;)n=n.L;return n}function ar(n,t){var e,r,i,u=n.sort(lr).pop();for(il=[],ul=new Array(n.length),ol=new er,ll=new er;;)if(i=al,u&&(!i||u.y<i.y||u.y===i.y&&u.x<i.x))u.x===e&&u.y===r||(ul[u.i]=new Ye(u),He(u),e=u.x,r=u.y),u=n.pop();else{if(!i)break;Fe(i.arc)}t&&(We(t),Ze(t));var o={cells:ul,edges:il};return ol=ll=il=ul=null,o}function lr(n,t){return t.y-n.y||t.x-n.x}function cr(n,t,e){return(n.x-e.x)*(t.y-n.y)-(n.x-t.x)*(e.y-n.y)}function fr(n){return n.x}function sr(n){return n.y}function hr(){return{leaf:!0,nodes:[],point:null,x:null,y:null}}function pr(n,t,e,r,i,u){if(!n(t,e,r,i,u)){var o=.5*(e+i),a=.5*(r+u),l=t.nodes;l[0]&&pr(n,l[0],e,r,o,a),l[1]&&pr(n,l[1],o,r,i,a),l[2]&&pr(n,l[2],e,a,o,u),l[3]&&pr(n,l[3],o,a,i,u)}}function gr(n,t,e,r,i,u,o){var a,l=1/0;return function c(n,f,s,h,p){if(!(f>u||s>o||r>h||i>p)){if(g=n.point){var g,v=t-n.x,d=e-n.y,y=v*v+d*d;if(l>y){var m=Math.sqrt(l=y);r=t-m,i=e-m,u=t+m,o=e+m,a=g}}for(var M=n.nodes,x=.5*(f+h),b=.5*(s+p),_=t>=x,w=e>=b,S=w<<1|_,k=S+4;k>S;++S)if(n=M[3&S])switch(3&S){case 0:c(n,f,s,x,b);break;case 1:c(n,x,s,h,b);break;case 2:c(n,f,b,x,p);break;case 3:c(n,x,b,h,p)}}}(n,r,i,u,o),a}function vr(n,t){n=ao.rgb(n),t=ao.rgb(t);var e=n.r,r=n.g,i=n.b,u=t.r-e,o=t.g-r,a=t.b-i;return function(n){return\"#\"+bn(Math.round(e+u*n))+bn(Math.round(r+o*n))+bn(Math.round(i+a*n))}}function dr(n,t){var e,r={},i={};for(e in n)e in t?r[e]=Mr(n[e],t[e]):i[e]=n[e];for(e in t)e in n||(i[e]=t[e]);return function(n){for(e in r)i[e]=r[e](n);return i}}function yr(n,t){return n=+n,t=+t,function(e){return n*(1-e)+t*e}}function mr(n,t){var e,r,i,u=hl.lastIndex=pl.lastIndex=0,o=-1,a=[],l=[];for(n+=\"\",t+=\"\";(e=hl.exec(n))&&(r=pl.exec(t));)(i=r.index)>u&&(i=t.slice(u,i),a[o]?a[o]+=i:a[++o]=i),(e=e[0])===(r=r[0])?a[o]?a[o]+=r:a[++o]=r:(a[++o]=null,l.push({i:o,x:yr(e,r)})),u=pl.lastIndex;return u<t.length&&(i=t.slice(u),a[o]?a[o]+=i:a[++o]=i),a.length<2?l[0]?(t=l[0].x,function(n){return t(n)+\"\"}):function(){return t}:(t=l.length,function(n){for(var e,r=0;t>r;++r)a[(e=l[r]).i]=e.x(n);return a.join(\"\")})}function Mr(n,t){for(var e,r=ao.interpolators.length;--r>=0&&!(e=ao.interpolators[r](n,t)););return e}function xr(n,t){var e,r=[],i=[],u=n.length,o=t.length,a=Math.min(n.length,t.length);for(e=0;a>e;++e)r.push(Mr(n[e],t[e]));for(;u>e;++e)i[e]=n[e];for(;o>e;++e)i[e]=t[e];return function(n){for(e=0;a>e;++e)i[e]=r[e](n);return i}}function br(n){return function(t){return 0>=t?0:t>=1?1:n(t)}}function _r(n){return function(t){return 1-n(1-t)}}function wr(n){return function(t){return.5*(.5>t?n(2*t):2-n(2-2*t))}}function Sr(n){return n*n}function kr(n){return n*n*n}function Nr(n){if(0>=n)return 0;if(n>=1)return 1;var t=n*n,e=t*n;return 4*(.5>n?e:3*(n-t)+e-.75)}function Er(n){return function(t){return Math.pow(t,n)}}function Ar(n){return 1-Math.cos(n*Io)}function Cr(n){return Math.pow(2,10*(n-1))}function zr(n){return 1-Math.sqrt(1-n*n)}function Lr(n,t){var e;return arguments.length<2&&(t=.45),arguments.length?e=t/Ho*Math.asin(1/n):(n=1,e=t/4),function(r){return 1+n*Math.pow(2,-10*r)*Math.sin((r-e)*Ho/t)}}function qr(n){return n||(n=1.70158),function(t){return t*t*((n+1)*t-n)}}function Tr(n){return 1/2.75>n?7.5625*n*n:2/2.75>n?7.5625*(n-=1.5/2.75)*n+.75:2.5/2.75>n?7.5625*(n-=2.25/2.75)*n+.9375:7.5625*(n-=2.625/2.75)*n+.984375}function Rr(n,t){n=ao.hcl(n),t=ao.hcl(t);var e=n.h,r=n.c,i=n.l,u=t.h-e,o=t.c-r,a=t.l-i;return isNaN(o)&&(o=0,r=isNaN(r)?t.c:r),isNaN(u)?(u=0,e=isNaN(e)?t.h:e):u>180?u-=360:-180>u&&(u+=360),function(n){return sn(e+u*n,r+o*n,i+a*n)+\"\"}}function Dr(n,t){n=ao.hsl(n),t=ao.hsl(t);var e=n.h,r=n.s,i=n.l,u=t.h-e,o=t.s-r,a=t.l-i;return isNaN(o)&&(o=0,r=isNaN(r)?t.s:r),isNaN(u)?(u=0,e=isNaN(e)?t.h:e):u>180?u-=360:-180>u&&(u+=360),function(n){return cn(e+u*n,r+o*n,i+a*n)+\"\"}}function Pr(n,t){n=ao.lab(n),t=ao.lab(t);var e=n.l,r=n.a,i=n.b,u=t.l-e,o=t.a-r,a=t.b-i;return function(n){return pn(e+u*n,r+o*n,i+a*n)+\"\"}}function Ur(n,t){return t-=n,function(e){return Math.round(n+t*e)}}function jr(n){var t=[n.a,n.b],e=[n.c,n.d],r=Hr(t),i=Fr(t,e),u=Hr(Or(e,t,-i))||0;t[0]*e[1]<e[0]*t[1]&&(t[0]*=-1,t[1]*=-1,r*=-1,i*=-1),this.rotate=(r?Math.atan2(t[1],t[0]):Math.atan2(-e[0],e[1]))*Zo,this.translate=[n.e,n.f],this.scale=[r,u],this.skew=u?Math.atan2(i,u)*Zo:0}function Fr(n,t){return n[0]*t[0]+n[1]*t[1]}function Hr(n){var t=Math.sqrt(Fr(n,n));return t&&(n[0]/=t,n[1]/=t),t}function Or(n,t,e){return n[0]+=e*t[0],n[1]+=e*t[1],n}function Ir(n){return n.length?n.pop()+\",\":\"\"}function Yr(n,t,e,r){if(n[0]!==t[0]||n[1]!==t[1]){var i=e.push(\"translate(\",null,\",\",null,\")\");r.push({i:i-4,x:yr(n[0],t[0])},{i:i-2,x:yr(n[1],t[1])})}else(t[0]||t[1])&&e.push(\"translate(\"+t+\")\")}function Zr(n,t,e,r){n!==t?(n-t>180?t+=360:t-n>180&&(n+=360),r.push({i:e.push(Ir(e)+\"rotate(\",null,\")\")-2,x:yr(n,t)})):t&&e.push(Ir(e)+\"rotate(\"+t+\")\")}function Vr(n,t,e,r){n!==t?r.push({i:e.push(Ir(e)+\"skewX(\",null,\")\")-2,x:yr(n,t)}):t&&e.push(Ir(e)+\"skewX(\"+t+\")\")}function Xr(n,t,e,r){if(n[0]!==t[0]||n[1]!==t[1]){var i=e.push(Ir(e)+\"scale(\",null,\",\",null,\")\");r.push({i:i-4,x:yr(n[0],t[0])},{i:i-2,x:yr(n[1],t[1])})}else 1===t[0]&&1===t[1]||e.push(Ir(e)+\"scale(\"+t+\")\")}function $r(n,t){var e=[],r=[];return n=ao.transform(n),t=ao.transform(t),Yr(n.translate,t.translate,e,r),Zr(n.rotate,t.rotate,e,r),Vr(n.skew,t.skew,e,r),Xr(n.scale,t.scale,e,r),n=t=null,function(n){for(var t,i=-1,u=r.length;++i<u;)e[(t=r[i]).i]=t.x(n);return e.join(\"\")}}function Br(n,t){return t=(t-=n=+n)||1/t,function(e){return(e-n)/t}}function Wr(n,t){return t=(t-=n=+n)||1/t,function(e){return Math.max(0,Math.min(1,(e-n)/t))}}function Jr(n){for(var t=n.source,e=n.target,r=Kr(t,e),i=[t];t!==r;)t=t.parent,i.push(t);for(var u=i.length;e!==r;)i.splice(u,0,e),e=e.parent;return i}function Gr(n){for(var t=[],e=n.parent;null!=e;)t.push(n),n=e,e=e.parent;return t.push(n),t}function Kr(n,t){if(n===t)return n;for(var e=Gr(n),r=Gr(t),i=e.pop(),u=r.pop(),o=null;i===u;)o=i,i=e.pop(),u=r.pop();return o}function Qr(n){n.fixed|=2}function ni(n){n.fixed&=-7}function ti(n){n.fixed|=4,n.px=n.x,n.py=n.y}function ei(n){n.fixed&=-5}function ri(n,t,e){var r=0,i=0;if(n.charge=0,!n.leaf)for(var u,o=n.nodes,a=o.length,l=-1;++l<a;)u=o[l],null!=u&&(ri(u,t,e),n.charge+=u.charge,r+=u.charge*u.cx,i+=u.charge*u.cy);if(n.point){n.leaf||(n.point.x+=Math.random()-.5,n.point.y+=Math.random()-.5);var c=t*e[n.point.index];n.charge+=n.pointCharge=c,r+=c*n.point.x,i+=c*n.point.y}n.cx=r/n.charge,n.cy=i/n.charge}function ii(n,t){return ao.rebind(n,t,\"sort\",\"children\",\"value\"),n.nodes=n,n.links=fi,n}function ui(n,t){for(var e=[n];null!=(n=e.pop());)if(t(n),(i=n.children)&&(r=i.length))for(var r,i;--r>=0;)e.push(i[r])}function oi(n,t){for(var e=[n],r=[];null!=(n=e.pop());)if(r.push(n),(u=n.children)&&(i=u.length))for(var i,u,o=-1;++o<i;)e.push(u[o]);for(;null!=(n=r.pop());)t(n)}function ai(n){return n.children}function li(n){return n.value}function ci(n,t){return t.value-n.value}function fi(n){return ao.merge(n.map(function(n){return(n.children||[]).map(function(t){return{source:n,target:t}})}))}function si(n){return n.x}function hi(n){return n.y}function pi(n,t,e){n.y0=t,n.y=e}function gi(n){return ao.range(n.length)}function vi(n){for(var t=-1,e=n[0].length,r=[];++t<e;)r[t]=0;return r}function di(n){for(var t,e=1,r=0,i=n[0][1],u=n.length;u>e;++e)(t=n[e][1])>i&&(r=e,i=t);return r}function yi(n){return n.reduce(mi,0)}function mi(n,t){return n+t[1]}function Mi(n,t){return xi(n,Math.ceil(Math.log(t.length)/Math.LN2+1))}function xi(n,t){for(var e=-1,r=+n[0],i=(n[1]-r)/t,u=[];++e<=t;)u[e]=i*e+r;return u}function bi(n){return[ao.min(n),ao.max(n)]}function _i(n,t){return n.value-t.value}function wi(n,t){var e=n._pack_next;n._pack_next=t,t._pack_prev=n,t._pack_next=e,e._pack_prev=t}function Si(n,t){n._pack_next=t,t._pack_prev=n}function ki(n,t){var e=t.x-n.x,r=t.y-n.y,i=n.r+t.r;return.999*i*i>e*e+r*r}function Ni(n){function t(n){f=Math.min(n.x-n.r,f),s=Math.max(n.x+n.r,s),h=Math.min(n.y-n.r,h),p=Math.max(n.y+n.r,p)}if((e=n.children)&&(c=e.length)){var e,r,i,u,o,a,l,c,f=1/0,s=-(1/0),h=1/0,p=-(1/0);if(e.forEach(Ei),r=e[0],r.x=-r.r,r.y=0,t(r),c>1&&(i=e[1],i.x=i.r,i.y=0,t(i),c>2))for(u=e[2],zi(r,i,u),t(u),wi(r,u),r._pack_prev=u,wi(u,i),i=r._pack_next,o=3;c>o;o++){zi(r,i,u=e[o]);var g=0,v=1,d=1;for(a=i._pack_next;a!==i;a=a._pack_next,v++)if(ki(a,u)){g=1;break}if(1==g)for(l=r._pack_prev;l!==a._pack_prev&&!ki(l,u);l=l._pack_prev,d++);g?(d>v||v==d&&i.r<r.r?Si(r,i=a):Si(r=l,i),o--):(wi(r,u),i=u,t(u))}var y=(f+s)/2,m=(h+p)/2,M=0;for(o=0;c>o;o++)u=e[o],u.x-=y,u.y-=m,M=Math.max(M,u.r+Math.sqrt(u.x*u.x+u.y*u.y));n.r=M,e.forEach(Ai)}}function Ei(n){n._pack_next=n._pack_prev=n}function Ai(n){delete n._pack_next,delete n._pack_prev}function Ci(n,t,e,r){var i=n.children;if(n.x=t+=r*n.x,n.y=e+=r*n.y,n.r*=r,i)for(var u=-1,o=i.length;++u<o;)Ci(i[u],t,e,r)}function zi(n,t,e){var r=n.r+e.r,i=t.x-n.x,u=t.y-n.y;if(r&&(i||u)){var o=t.r+e.r,a=i*i+u*u;o*=o,r*=r;var l=.5+(r-o)/(2*a),c=Math.sqrt(Math.max(0,2*o*(r+a)-(r-=a)*r-o*o))/(2*a);e.x=n.x+l*i+c*u,e.y=n.y+l*u-c*i}else e.x=n.x+r,e.y=n.y}function Li(n,t){return n.parent==t.parent?1:2}function qi(n){var t=n.children;return t.length?t[0]:n.t}function Ti(n){var t,e=n.children;return(t=e.length)?e[t-1]:n.t}function Ri(n,t,e){var r=e/(t.i-n.i);t.c-=r,t.s+=e,n.c+=r,t.z+=e,t.m+=e}function Di(n){for(var t,e=0,r=0,i=n.children,u=i.length;--u>=0;)t=i[u],t.z+=e,t.m+=e,e+=t.s+(r+=t.c)}function Pi(n,t,e){return n.a.parent===t.parent?n.a:e}function Ui(n){return 1+ao.max(n,function(n){return n.y})}function ji(n){return n.reduce(function(n,t){return n+t.x},0)/n.length}function Fi(n){var t=n.children;return t&&t.length?Fi(t[0]):n}function Hi(n){var t,e=n.children;return e&&(t=e.length)?Hi(e[t-1]):n}function Oi(n){return{x:n.x,y:n.y,dx:n.dx,dy:n.dy}}function Ii(n,t){var e=n.x+t[3],r=n.y+t[0],i=n.dx-t[1]-t[3],u=n.dy-t[0]-t[2];return 0>i&&(e+=i/2,i=0),0>u&&(r+=u/2,u=0),{x:e,y:r,dx:i,dy:u}}function Yi(n){var t=n[0],e=n[n.length-1];return e>t?[t,e]:[e,t]}function Zi(n){return n.rangeExtent?n.rangeExtent():Yi(n.range())}function Vi(n,t,e,r){var i=e(n[0],n[1]),u=r(t[0],t[1]);return function(n){return u(i(n))}}function Xi(n,t){var e,r=0,i=n.length-1,u=n[r],o=n[i];return u>o&&(e=r,r=i,i=e,e=u,u=o,o=e),n[r]=t.floor(u),n[i]=t.ceil(o),n}function $i(n){return n?{floor:function(t){return Math.floor(t/n)*n},ceil:function(t){return Math.ceil(t/n)*n}}:Sl}function Bi(n,t,e,r){var i=[],u=[],o=0,a=Math.min(n.length,t.length)-1;for(n[a]<n[0]&&(n=n.slice().reverse(),t=t.slice().reverse());++o<=a;)i.push(e(n[o-1],n[o])),u.push(r(t[o-1],t[o]));return function(t){var e=ao.bisect(n,t,1,a)-1;return u[e](i[e](t))}}function Wi(n,t,e,r){function i(){var i=Math.min(n.length,t.length)>2?Bi:Vi,l=r?Wr:Br;return o=i(n,t,l,e),a=i(t,n,l,Mr),u}function u(n){return o(n)}var o,a;return u.invert=function(n){return a(n)},u.domain=function(t){return arguments.length?(n=t.map(Number),i()):n},u.range=function(n){return arguments.length?(t=n,i()):t},u.rangeRound=function(n){return u.range(n).interpolate(Ur)},u.clamp=function(n){return arguments.length?(r=n,i()):r},u.interpolate=function(n){return arguments.length?(e=n,i()):e},u.ticks=function(t){return Qi(n,t)},u.tickFormat=function(t,e){return nu(n,t,e)},u.nice=function(t){return Gi(n,t),i()},u.copy=function(){return Wi(n,t,e,r)},i()}function Ji(n,t){return ao.rebind(n,t,\"range\",\"rangeRound\",\"interpolate\",\"clamp\")}function Gi(n,t){return Xi(n,$i(Ki(n,t)[2])),Xi(n,$i(Ki(n,t)[2])),n}function Ki(n,t){null==t&&(t=10);var e=Yi(n),r=e[1]-e[0],i=Math.pow(10,Math.floor(Math.log(r/t)/Math.LN10)),u=t/r*i;return.15>=u?i*=10:.35>=u?i*=5:.75>=u&&(i*=2),e[0]=Math.ceil(e[0]/i)*i,e[1]=Math.floor(e[1]/i)*i+.5*i,e[2]=i,e}function Qi(n,t){return ao.range.apply(ao,Ki(n,t))}function nu(n,t,e){var r=Ki(n,t);if(e){var i=ha.exec(e);if(i.shift(),\"s\"===i[8]){var u=ao.formatPrefix(Math.max(xo(r[0]),xo(r[1])));return i[7]||(i[7]=\".\"+tu(u.scale(r[2]))),i[8]=\"f\",e=ao.format(i.join(\"\")),function(n){return e(u.scale(n))+u.symbol}}i[7]||(i[7]=\".\"+eu(i[8],r)),e=i.join(\"\")}else e=\",.\"+tu(r[2])+\"f\";return ao.format(e)}function tu(n){return-Math.floor(Math.log(n)/Math.LN10+.01)}function eu(n,t){var e=tu(t[2]);return n in kl?Math.abs(e-tu(Math.max(xo(t[0]),xo(t[1]))))+ +(\"e\"!==n):e-2*(\"%\"===n)}function ru(n,t,e,r){function i(n){return(e?Math.log(0>n?0:n):-Math.log(n>0?0:-n))/Math.log(t)}function u(n){return e?Math.pow(t,n):-Math.pow(t,-n)}function o(t){return n(i(t))}return o.invert=function(t){return u(n.invert(t))},o.domain=function(t){return arguments.length?(e=t[0]>=0,n.domain((r=t.map(Number)).map(i)),o):r},o.base=function(e){return arguments.length?(t=+e,n.domain(r.map(i)),o):t},o.nice=function(){var t=Xi(r.map(i),e?Math:El);return n.domain(t),r=t.map(u),o},o.ticks=function(){var n=Yi(r),o=[],a=n[0],l=n[1],c=Math.floor(i(a)),f=Math.ceil(i(l)),s=t%1?2:t;if(isFinite(f-c)){if(e){for(;f>c;c++)for(var h=1;s>h;h++)o.push(u(c)*h);o.push(u(c))}else for(o.push(u(c));c++<f;)for(var h=s-1;h>0;h--)o.push(u(c)*h);for(c=0;o[c]<a;c++);for(f=o.length;o[f-1]>l;f--);o=o.slice(c,f)}return o},o.tickFormat=function(n,e){if(!arguments.length)return Nl;arguments.length<2?e=Nl:\"function\"!=typeof e&&(e=ao.format(e));var r=Math.max(1,t*n/o.ticks().length);return function(n){var o=n/u(Math.round(i(n)));return t-.5>o*t&&(o*=t),r>=o?e(n):\"\"}},o.copy=function(){return ru(n.copy(),t,e,r)},Ji(o,n)}function iu(n,t,e){function r(t){return n(i(t))}var i=uu(t),u=uu(1/t);return r.invert=function(t){return u(n.invert(t))},r.domain=function(t){return arguments.length?(n.domain((e=t.map(Number)).map(i)),r):e},r.ticks=function(n){return Qi(e,n)},r.tickFormat=function(n,t){return nu(e,n,t)},r.nice=function(n){return r.domain(Gi(e,n))},r.exponent=function(o){return arguments.length?(i=uu(t=o),u=uu(1/t),n.domain(e.map(i)),r):t},r.copy=function(){return iu(n.copy(),t,e)},Ji(r,n)}function uu(n){return function(t){return 0>t?-Math.pow(-t,n):Math.pow(t,n)}}function ou(n,t){function e(e){return u[((i.get(e)||(\"range\"===t.t?i.set(e,n.push(e)):NaN))-1)%u.length]}function r(t,e){return ao.range(n.length).map(function(n){return t+e*n})}var i,u,o;return e.domain=function(r){if(!arguments.length)return n;n=[],i=new c;for(var u,o=-1,a=r.length;++o<a;)i.has(u=r[o])||i.set(u,n.push(u));return e[t.t].apply(e,t.a)},e.range=function(n){return arguments.length?(u=n,o=0,t={t:\"range\",a:arguments},e):u},e.rangePoints=function(i,a){arguments.length<2&&(a=0);var l=i[0],c=i[1],f=n.length<2?(l=(l+c)/2,0):(c-l)/(n.length-1+a);return u=r(l+f*a/2,f),o=0,t={t:\"rangePoints\",a:arguments},e},e.rangeRoundPoints=function(i,a){arguments.length<2&&(a=0);var l=i[0],c=i[1],f=n.length<2?(l=c=Math.round((l+c)/2),0):(c-l)/(n.length-1+a)|0;return u=r(l+Math.round(f*a/2+(c-l-(n.length-1+a)*f)/2),f),o=0,t={t:\"rangeRoundPoints\",a:arguments},e},e.rangeBands=function(i,a,l){arguments.length<2&&(a=0),arguments.length<3&&(l=a);var c=i[1]<i[0],f=i[c-0],s=i[1-c],h=(s-f)/(n.length-a+2*l);return u=r(f+h*l,h),c&&u.reverse(),o=h*(1-a),t={t:\"rangeBands\",a:arguments},e},e.rangeRoundBands=function(i,a,l){arguments.length<2&&(a=0),arguments.length<3&&(l=a);var c=i[1]<i[0],f=i[c-0],s=i[1-c],h=Math.floor((s-f)/(n.length-a+2*l));return u=r(f+Math.round((s-f-(n.length-a)*h)/2),h),c&&u.reverse(),o=Math.round(h*(1-a)),t={t:\"rangeRoundBands\",a:arguments},e},e.rangeBand=function(){return o},e.rangeExtent=function(){return Yi(t.a[0])},e.copy=function(){return ou(n,t)},e.domain(n)}function au(n,t){function u(){var e=0,r=t.length;for(a=[];++e<r;)a[e-1]=ao.quantile(n,e/r);return o}function o(n){return isNaN(n=+n)?void 0:t[ao.bisect(a,n)]}var a;return o.domain=function(t){return arguments.length?(n=t.map(r).filter(i).sort(e),u()):n},o.range=function(n){return arguments.length?(t=n,u()):t},o.quantiles=function(){return a},o.invertExtent=function(e){return e=t.indexOf(e),0>e?[NaN,NaN]:[e>0?a[e-1]:n[0],e<a.length?a[e]:n[n.length-1]]},o.copy=function(){return au(n,t)},u()}function lu(n,t,e){function r(t){return e[Math.max(0,Math.min(o,Math.floor(u*(t-n))))]}function i(){return u=e.length/(t-n),o=e.length-1,r}var u,o;return r.domain=function(e){return arguments.length?(n=+e[0],t=+e[e.length-1],i()):[n,t]},r.range=function(n){return arguments.length?(e=n,i()):e},r.invertExtent=function(t){return t=e.indexOf(t),t=0>t?NaN:t/u+n,[t,t+1/u]},r.copy=function(){return lu(n,t,e)},i()}function cu(n,t){function e(e){return e>=e?t[ao.bisect(n,e)]:void 0}return e.domain=function(t){return arguments.length?(n=t,e):n},e.range=function(n){return arguments.length?(t=n,e):t},e.invertExtent=function(e){return e=t.indexOf(e),[n[e-1],n[e]]},e.copy=function(){return cu(n,t)},e}function fu(n){function t(n){return+n}return t.invert=t,t.domain=t.range=function(e){return arguments.length?(n=e.map(t),t):n},t.ticks=function(t){return Qi(n,t)},t.tickFormat=function(t,e){return nu(n,t,e)},t.copy=function(){return fu(n)},t}function su(){return 0}function hu(n){return n.innerRadius}function pu(n){return n.outerRadius}function gu(n){return n.startAngle}function vu(n){return n.endAngle}function du(n){return n&&n.padAngle}function yu(n,t,e,r){return(n-e)*t-(t-r)*n>0?0:1}function mu(n,t,e,r,i){var u=n[0]-t[0],o=n[1]-t[1],a=(i?r:-r)/Math.sqrt(u*u+o*o),l=a*o,c=-a*u,f=n[0]+l,s=n[1]+c,h=t[0]+l,p=t[1]+c,g=(f+h)/2,v=(s+p)/2,d=h-f,y=p-s,m=d*d+y*y,M=e-r,x=f*p-h*s,b=(0>y?-1:1)*Math.sqrt(Math.max(0,M*M*m-x*x)),_=(x*y-d*b)/m,w=(-x*d-y*b)/m,S=(x*y+d*b)/m,k=(-x*d+y*b)/m,N=_-g,E=w-v,A=S-g,C=k-v;return N*N+E*E>A*A+C*C&&(_=S,w=k),[[_-l,w-c],[_*e/M,w*e/M]]}function Mu(n){function t(t){function o(){c.push(\"M\",u(n(f),a))}for(var l,c=[],f=[],s=-1,h=t.length,p=En(e),g=En(r);++s<h;)i.call(this,l=t[s],s)?f.push([+p.call(this,l,s),+g.call(this,l,s)]):f.length&&(o(),f=[]);return f.length&&o(),c.length?c.join(\"\"):null}var e=Ce,r=ze,i=zt,u=xu,o=u.key,a=.7;return t.x=function(n){return arguments.length?(e=n,t):e},t.y=function(n){return arguments.length?(r=n,t):r},t.defined=function(n){return arguments.length?(i=n,t):i},t.interpolate=function(n){return arguments.length?(o=\"function\"==typeof n?u=n:(u=Tl.get(n)||xu).key,t):o},t.tension=function(n){return arguments.length?(a=n,t):a},t}function xu(n){return n.length>1?n.join(\"L\"):n+\"Z\"}function bu(n){return n.join(\"L\")+\"Z\"}function _u(n){for(var t=0,e=n.length,r=n[0],i=[r[0],\",\",r[1]];++t<e;)i.push(\"H\",(r[0]+(r=n[t])[0])/2,\"V\",r[1]);return e>1&&i.push(\"H\",r[0]),i.join(\"\")}function wu(n){for(var t=0,e=n.length,r=n[0],i=[r[0],\",\",r[1]];++t<e;)i.push(\"V\",(r=n[t])[1],\"H\",r[0]);return i.join(\"\")}function Su(n){for(var t=0,e=n.length,r=n[0],i=[r[0],\",\",r[1]];++t<e;)i.push(\"H\",(r=n[t])[0],\"V\",r[1]);return i.join(\"\")}function ku(n,t){return n.length<4?xu(n):n[1]+Au(n.slice(1,-1),Cu(n,t))}function Nu(n,t){return n.length<3?bu(n):n[0]+Au((n.push(n[0]),n),Cu([n[n.length-2]].concat(n,[n[1]]),t))}function Eu(n,t){return n.length<3?xu(n):n[0]+Au(n,Cu(n,t))}function Au(n,t){if(t.length<1||n.length!=t.length&&n.length!=t.length+2)return xu(n);var e=n.length!=t.length,r=\"\",i=n[0],u=n[1],o=t[0],a=o,l=1;if(e&&(r+=\"Q\"+(u[0]-2*o[0]/3)+\",\"+(u[1]-2*o[1]/3)+\",\"+u[0]+\",\"+u[1],i=n[1],l=2),t.length>1){a=t[1],u=n[l],l++,r+=\"C\"+(i[0]+o[0])+\",\"+(i[1]+o[1])+\",\"+(u[0]-a[0])+\",\"+(u[1]-a[1])+\",\"+u[0]+\",\"+u[1];for(var c=2;c<t.length;c++,l++)u=n[l],a=t[c],r+=\"S\"+(u[0]-a[0])+\",\"+(u[1]-a[1])+\",\"+u[0]+\",\"+u[1]}if(e){var f=n[l];r+=\"Q\"+(u[0]+2*a[0]/3)+\",\"+(u[1]+2*a[1]/3)+\",\"+f[0]+\",\"+f[1]}return r}function Cu(n,t){for(var e,r=[],i=(1-t)/2,u=n[0],o=n[1],a=1,l=n.length;++a<l;)e=u,u=o,o=n[a],r.push([i*(o[0]-e[0]),i*(o[1]-e[1])]);return r}function zu(n){if(n.length<3)return xu(n);var t=1,e=n.length,r=n[0],i=r[0],u=r[1],o=[i,i,i,(r=n[1])[0]],a=[u,u,u,r[1]],l=[i,\",\",u,\"L\",Ru(Pl,o),\",\",Ru(Pl,a)];for(n.push(n[e-1]);++t<=e;)r=n[t],o.shift(),o.push(r[0]),a.shift(),a.push(r[1]),Du(l,o,a);return n.pop(),l.push(\"L\",r),l.join(\"\")}function Lu(n){if(n.length<4)return xu(n);for(var t,e=[],r=-1,i=n.length,u=[0],o=[0];++r<3;)t=n[r],u.push(t[0]),o.push(t[1]);for(e.push(Ru(Pl,u)+\",\"+Ru(Pl,o)),--r;++r<i;)t=n[r],u.shift(),u.push(t[0]),o.shift(),o.push(t[1]),Du(e,u,o);return e.join(\"\")}function qu(n){for(var t,e,r=-1,i=n.length,u=i+4,o=[],a=[];++r<4;)e=n[r%i],o.push(e[0]),a.push(e[1]);for(t=[Ru(Pl,o),\",\",Ru(Pl,a)],--r;++r<u;)e=n[r%i],o.shift(),o.push(e[0]),a.shift(),a.push(e[1]),Du(t,o,a);return t.join(\"\")}function Tu(n,t){var e=n.length-1;if(e)for(var r,i,u=n[0][0],o=n[0][1],a=n[e][0]-u,l=n[e][1]-o,c=-1;++c<=e;)r=n[c],i=c/e,r[0]=t*r[0]+(1-t)*(u+i*a),r[1]=t*r[1]+(1-t)*(o+i*l);return zu(n)}function Ru(n,t){return n[0]*t[0]+n[1]*t[1]+n[2]*t[2]+n[3]*t[3]}function Du(n,t,e){n.push(\"C\",Ru(Rl,t),\",\",Ru(Rl,e),\",\",Ru(Dl,t),\",\",Ru(Dl,e),\",\",Ru(Pl,t),\",\",Ru(Pl,e))}function Pu(n,t){return(t[1]-n[1])/(t[0]-n[0])}function Uu(n){for(var t=0,e=n.length-1,r=[],i=n[0],u=n[1],o=r[0]=Pu(i,u);++t<e;)r[t]=(o+(o=Pu(i=u,u=n[t+1])))/2;return r[t]=o,r}function ju(n){for(var t,e,r,i,u=[],o=Uu(n),a=-1,l=n.length-1;++a<l;)t=Pu(n[a],n[a+1]),xo(t)<Uo?o[a]=o[a+1]=0:(e=o[a]/t,r=o[a+1]/t,i=e*e+r*r,i>9&&(i=3*t/Math.sqrt(i),o[a]=i*e,o[a+1]=i*r));for(a=-1;++a<=l;)i=(n[Math.min(l,a+1)][0]-n[Math.max(0,a-1)][0])/(6*(1+o[a]*o[a])),u.push([i||0,o[a]*i||0]);return u}function Fu(n){return n.length<3?xu(n):n[0]+Au(n,ju(n))}function Hu(n){for(var t,e,r,i=-1,u=n.length;++i<u;)t=n[i],e=t[0],r=t[1]-Io,t[0]=e*Math.cos(r),t[1]=e*Math.sin(r);return n}function Ou(n){function t(t){function l(){v.push(\"M\",a(n(y),s),f,c(n(d.reverse()),s),\"Z\")}for(var h,p,g,v=[],d=[],y=[],m=-1,M=t.length,x=En(e),b=En(i),_=e===r?function(){\nreturn p}:En(r),w=i===u?function(){return g}:En(u);++m<M;)o.call(this,h=t[m],m)?(d.push([p=+x.call(this,h,m),g=+b.call(this,h,m)]),y.push([+_.call(this,h,m),+w.call(this,h,m)])):d.length&&(l(),d=[],y=[]);return d.length&&l(),v.length?v.join(\"\"):null}var e=Ce,r=Ce,i=0,u=ze,o=zt,a=xu,l=a.key,c=a,f=\"L\",s=.7;return t.x=function(n){return arguments.length?(e=r=n,t):r},t.x0=function(n){return arguments.length?(e=n,t):e},t.x1=function(n){return arguments.length?(r=n,t):r},t.y=function(n){return arguments.length?(i=u=n,t):u},t.y0=function(n){return arguments.length?(i=n,t):i},t.y1=function(n){return arguments.length?(u=n,t):u},t.defined=function(n){return arguments.length?(o=n,t):o},t.interpolate=function(n){return arguments.length?(l=\"function\"==typeof n?a=n:(a=Tl.get(n)||xu).key,c=a.reverse||a,f=a.closed?\"M\":\"L\",t):l},t.tension=function(n){return arguments.length?(s=n,t):s},t}function Iu(n){return n.radius}function Yu(n){return[n.x,n.y]}function Zu(n){return function(){var t=n.apply(this,arguments),e=t[0],r=t[1]-Io;return[e*Math.cos(r),e*Math.sin(r)]}}function Vu(){return 64}function Xu(){return\"circle\"}function $u(n){var t=Math.sqrt(n/Fo);return\"M0,\"+t+\"A\"+t+\",\"+t+\" 0 1,1 0,\"+-t+\"A\"+t+\",\"+t+\" 0 1,1 0,\"+t+\"Z\"}function Bu(n){return function(){var t,e,r;(t=this[n])&&(r=t[e=t.active])&&(r.timer.c=null,r.timer.t=NaN,--t.count?delete t[e]:delete this[n],t.active+=.5,r.event&&r.event.interrupt.call(this,this.__data__,r.index))}}function Wu(n,t,e){return ko(n,Yl),n.namespace=t,n.id=e,n}function Ju(n,t,e,r){var i=n.id,u=n.namespace;return Y(n,\"function\"==typeof e?function(n,o,a){n[u][i].tween.set(t,r(e.call(n,n.__data__,o,a)))}:(e=r(e),function(n){n[u][i].tween.set(t,e)}))}function Gu(n){return null==n&&(n=\"\"),function(){this.textContent=n}}function Ku(n){return null==n?\"__transition__\":\"__transition_\"+n+\"__\"}function Qu(n,t,e,r,i){function u(n){var t=v.delay;return f.t=t+l,n>=t?o(n-t):void(f.c=o)}function o(e){var i=g.active,u=g[i];u&&(u.timer.c=null,u.timer.t=NaN,--g.count,delete g[i],u.event&&u.event.interrupt.call(n,n.__data__,u.index));for(var o in g)if(r>+o){var c=g[o];c.timer.c=null,c.timer.t=NaN,--g.count,delete g[o]}f.c=a,qn(function(){return f.c&&a(e||1)&&(f.c=null,f.t=NaN),1},0,l),g.active=r,v.event&&v.event.start.call(n,n.__data__,t),p=[],v.tween.forEach(function(e,r){(r=r.call(n,n.__data__,t))&&p.push(r)}),h=v.ease,s=v.duration}function a(i){for(var u=i/s,o=h(u),a=p.length;a>0;)p[--a].call(n,o);return u>=1?(v.event&&v.event.end.call(n,n.__data__,t),--g.count?delete g[r]:delete n[e],1):void 0}var l,f,s,h,p,g=n[e]||(n[e]={active:0,count:0}),v=g[r];v||(l=i.time,f=qn(u,0,l),v=g[r]={tween:new c,time:l,timer:f,delay:i.delay,duration:i.duration,ease:i.ease,index:t},i=null,++g.count)}function no(n,t,e){n.attr(\"transform\",function(n){var r=t(n);return\"translate(\"+(isFinite(r)?r:e(n))+\",0)\"})}function to(n,t,e){n.attr(\"transform\",function(n){var r=t(n);return\"translate(0,\"+(isFinite(r)?r:e(n))+\")\"})}function eo(n){return n.toISOString()}function ro(n,t,e){function r(t){return n(t)}function i(n,e){var r=n[1]-n[0],i=r/e,u=ao.bisect(Kl,i);return u==Kl.length?[t.year,Ki(n.map(function(n){return n/31536e6}),e)[2]]:u?t[i/Kl[u-1]<Kl[u]/i?u-1:u]:[tc,Ki(n,e)[2]]}return r.invert=function(t){return io(n.invert(t))},r.domain=function(t){return arguments.length?(n.domain(t),r):n.domain().map(io)},r.nice=function(n,t){function e(e){return!isNaN(e)&&!n.range(e,io(+e+1),t).length}var u=r.domain(),o=Yi(u),a=null==n?i(o,10):\"number\"==typeof n&&i(o,n);return a&&(n=a[0],t=a[1]),r.domain(Xi(u,t>1?{floor:function(t){for(;e(t=n.floor(t));)t=io(t-1);return t},ceil:function(t){for(;e(t=n.ceil(t));)t=io(+t+1);return t}}:n))},r.ticks=function(n,t){var e=Yi(r.domain()),u=null==n?i(e,10):\"number\"==typeof n?i(e,n):!n.range&&[{range:n},t];return u&&(n=u[0],t=u[1]),n.range(e[0],io(+e[1]+1),1>t?1:t)},r.tickFormat=function(){return e},r.copy=function(){return ro(n.copy(),t,e)},Ji(r,n)}function io(n){return new Date(n)}function uo(n){return JSON.parse(n.responseText)}function oo(n){var t=fo.createRange();return t.selectNode(fo.body),t.createContextualFragment(n.responseText)}var ao={version:\"3.5.17\"},lo=[].slice,co=function(n){return lo.call(n)},fo=this.document;if(fo)try{co(fo.documentElement.childNodes)[0].nodeType}catch(so){co=function(n){for(var t=n.length,e=new Array(t);t--;)e[t]=n[t];return e}}if(Date.now||(Date.now=function(){return+new Date}),fo)try{fo.createElement(\"DIV\").style.setProperty(\"opacity\",0,\"\")}catch(ho){var po=this.Element.prototype,go=po.setAttribute,vo=po.setAttributeNS,yo=this.CSSStyleDeclaration.prototype,mo=yo.setProperty;po.setAttribute=function(n,t){go.call(this,n,t+\"\")},po.setAttributeNS=function(n,t,e){vo.call(this,n,t,e+\"\")},yo.setProperty=function(n,t,e){mo.call(this,n,t+\"\",e)}}ao.ascending=e,ao.descending=function(n,t){return n>t?-1:t>n?1:t>=n?0:NaN},ao.min=function(n,t){var e,r,i=-1,u=n.length;if(1===arguments.length){for(;++i<u;)if(null!=(r=n[i])&&r>=r){e=r;break}for(;++i<u;)null!=(r=n[i])&&e>r&&(e=r)}else{for(;++i<u;)if(null!=(r=t.call(n,n[i],i))&&r>=r){e=r;break}for(;++i<u;)null!=(r=t.call(n,n[i],i))&&e>r&&(e=r)}return e},ao.max=function(n,t){var e,r,i=-1,u=n.length;if(1===arguments.length){for(;++i<u;)if(null!=(r=n[i])&&r>=r){e=r;break}for(;++i<u;)null!=(r=n[i])&&r>e&&(e=r)}else{for(;++i<u;)if(null!=(r=t.call(n,n[i],i))&&r>=r){e=r;break}for(;++i<u;)null!=(r=t.call(n,n[i],i))&&r>e&&(e=r)}return e},ao.extent=function(n,t){var e,r,i,u=-1,o=n.length;if(1===arguments.length){for(;++u<o;)if(null!=(r=n[u])&&r>=r){e=i=r;break}for(;++u<o;)null!=(r=n[u])&&(e>r&&(e=r),r>i&&(i=r))}else{for(;++u<o;)if(null!=(r=t.call(n,n[u],u))&&r>=r){e=i=r;break}for(;++u<o;)null!=(r=t.call(n,n[u],u))&&(e>r&&(e=r),r>i&&(i=r))}return[e,i]},ao.sum=function(n,t){var e,r=0,u=n.length,o=-1;if(1===arguments.length)for(;++o<u;)i(e=+n[o])&&(r+=e);else for(;++o<u;)i(e=+t.call(n,n[o],o))&&(r+=e);return r},ao.mean=function(n,t){var e,u=0,o=n.length,a=-1,l=o;if(1===arguments.length)for(;++a<o;)i(e=r(n[a]))?u+=e:--l;else for(;++a<o;)i(e=r(t.call(n,n[a],a)))?u+=e:--l;return l?u/l:void 0},ao.quantile=function(n,t){var e=(n.length-1)*t+1,r=Math.floor(e),i=+n[r-1],u=e-r;return u?i+u*(n[r]-i):i},ao.median=function(n,t){var u,o=[],a=n.length,l=-1;if(1===arguments.length)for(;++l<a;)i(u=r(n[l]))&&o.push(u);else for(;++l<a;)i(u=r(t.call(n,n[l],l)))&&o.push(u);return o.length?ao.quantile(o.sort(e),.5):void 0},ao.variance=function(n,t){var e,u,o=n.length,a=0,l=0,c=-1,f=0;if(1===arguments.length)for(;++c<o;)i(e=r(n[c]))&&(u=e-a,a+=u/++f,l+=u*(e-a));else for(;++c<o;)i(e=r(t.call(n,n[c],c)))&&(u=e-a,a+=u/++f,l+=u*(e-a));return f>1?l/(f-1):void 0},ao.deviation=function(){var n=ao.variance.apply(this,arguments);return n?Math.sqrt(n):n};var Mo=u(e);ao.bisectLeft=Mo.left,ao.bisect=ao.bisectRight=Mo.right,ao.bisector=function(n){return u(1===n.length?function(t,r){return e(n(t),r)}:n)},ao.shuffle=function(n,t,e){(u=arguments.length)<3&&(e=n.length,2>u&&(t=0));for(var r,i,u=e-t;u;)i=Math.random()*u--|0,r=n[u+t],n[u+t]=n[i+t],n[i+t]=r;return n},ao.permute=function(n,t){for(var e=t.length,r=new Array(e);e--;)r[e]=n[t[e]];return r},ao.pairs=function(n){for(var t,e=0,r=n.length-1,i=n[0],u=new Array(0>r?0:r);r>e;)u[e]=[t=i,i=n[++e]];return u},ao.transpose=function(n){if(!(i=n.length))return[];for(var t=-1,e=ao.min(n,o),r=new Array(e);++t<e;)for(var i,u=-1,a=r[t]=new Array(i);++u<i;)a[u]=n[u][t];return r},ao.zip=function(){return ao.transpose(arguments)},ao.keys=function(n){var t=[];for(var e in n)t.push(e);return t},ao.values=function(n){var t=[];for(var e in n)t.push(n[e]);return t},ao.entries=function(n){var t=[];for(var e in n)t.push({key:e,value:n[e]});return t},ao.merge=function(n){for(var t,e,r,i=n.length,u=-1,o=0;++u<i;)o+=n[u].length;for(e=new Array(o);--i>=0;)for(r=n[i],t=r.length;--t>=0;)e[--o]=r[t];return e};var xo=Math.abs;ao.range=function(n,t,e){if(arguments.length<3&&(e=1,arguments.length<2&&(t=n,n=0)),(t-n)/e===1/0)throw new Error(\"infinite range\");var r,i=[],u=a(xo(e)),o=-1;if(n*=u,t*=u,e*=u,0>e)for(;(r=n+e*++o)>t;)i.push(r/u);else for(;(r=n+e*++o)<t;)i.push(r/u);return i},ao.map=function(n,t){var e=new c;if(n instanceof c)n.forEach(function(n,t){e.set(n,t)});else if(Array.isArray(n)){var r,i=-1,u=n.length;if(1===arguments.length)for(;++i<u;)e.set(i,n[i]);else for(;++i<u;)e.set(t.call(n,r=n[i],i),r)}else for(var o in n)e.set(o,n[o]);return e};var bo=\"__proto__\",_o=\"\\x00\";l(c,{has:h,get:function(n){return this._[f(n)]},set:function(n,t){return this._[f(n)]=t},remove:p,keys:g,values:function(){var n=[];for(var t in this._)n.push(this._[t]);return n},entries:function(){var n=[];for(var t in this._)n.push({key:s(t),value:this._[t]});return n},size:v,empty:d,forEach:function(n){for(var t in this._)n.call(this,s(t),this._[t])}}),ao.nest=function(){function n(t,o,a){if(a>=u.length)return r?r.call(i,o):e?o.sort(e):o;for(var l,f,s,h,p=-1,g=o.length,v=u[a++],d=new c;++p<g;)(h=d.get(l=v(f=o[p])))?h.push(f):d.set(l,[f]);return t?(f=t(),s=function(e,r){f.set(e,n(t,r,a))}):(f={},s=function(e,r){f[e]=n(t,r,a)}),d.forEach(s),f}function t(n,e){if(e>=u.length)return n;var r=[],i=o[e++];return n.forEach(function(n,i){r.push({key:n,values:t(i,e)})}),i?r.sort(function(n,t){return i(n.key,t.key)}):r}var e,r,i={},u=[],o=[];return i.map=function(t,e){return n(e,t,0)},i.entries=function(e){return t(n(ao.map,e,0),0)},i.key=function(n){return u.push(n),i},i.sortKeys=function(n){return o[u.length-1]=n,i},i.sortValues=function(n){return e=n,i},i.rollup=function(n){return r=n,i},i},ao.set=function(n){var t=new y;if(n)for(var e=0,r=n.length;r>e;++e)t.add(n[e]);return t},l(y,{has:h,add:function(n){return this._[f(n+=\"\")]=!0,n},remove:p,values:g,size:v,empty:d,forEach:function(n){for(var t in this._)n.call(this,s(t))}}),ao.behavior={},ao.rebind=function(n,t){for(var e,r=1,i=arguments.length;++r<i;)n[e=arguments[r]]=M(n,t,t[e]);return n};var wo=[\"webkit\",\"ms\",\"moz\",\"Moz\",\"o\",\"O\"];ao.dispatch=function(){for(var n=new _,t=-1,e=arguments.length;++t<e;)n[arguments[t]]=w(n);return n},_.prototype.on=function(n,t){var e=n.indexOf(\".\"),r=\"\";if(e>=0&&(r=n.slice(e+1),n=n.slice(0,e)),n)return arguments.length<2?this[n].on(r):this[n].on(r,t);if(2===arguments.length){if(null==t)for(n in this)this.hasOwnProperty(n)&&this[n].on(r,null);return this}},ao.event=null,ao.requote=function(n){return n.replace(So,\"\\\\$&\")};var So=/[\\\\\\^\\$\\*\\+\\?\\|\\[\\]\\(\\)\\.\\{\\}]/g,ko={}.__proto__?function(n,t){n.__proto__=t}:function(n,t){for(var e in t)n[e]=t[e]},No=function(n,t){return t.querySelector(n)},Eo=function(n,t){return t.querySelectorAll(n)},Ao=function(n,t){var e=n.matches||n[x(n,\"matchesSelector\")];return(Ao=function(n,t){return e.call(n,t)})(n,t)};\"function\"==typeof Sizzle&&(No=function(n,t){return Sizzle(n,t)[0]||null},Eo=Sizzle,Ao=Sizzle.matchesSelector),ao.selection=function(){return ao.select(fo.documentElement)};var Co=ao.selection.prototype=[];Co.select=function(n){var t,e,r,i,u=[];n=A(n);for(var o=-1,a=this.length;++o<a;){u.push(t=[]),t.parentNode=(r=this[o]).parentNode;for(var l=-1,c=r.length;++l<c;)(i=r[l])?(t.push(e=n.call(i,i.__data__,l,o)),e&&\"__data__\"in i&&(e.__data__=i.__data__)):t.push(null)}return E(u)},Co.selectAll=function(n){var t,e,r=[];n=C(n);for(var i=-1,u=this.length;++i<u;)for(var o=this[i],a=-1,l=o.length;++a<l;)(e=o[a])&&(r.push(t=co(n.call(e,e.__data__,a,i))),t.parentNode=e);return E(r)};var zo=\"http://www.w3.org/1999/xhtml\",Lo={svg:\"http://www.w3.org/2000/svg\",xhtml:zo,xlink:\"http://www.w3.org/1999/xlink\",xml:\"http://www.w3.org/XML/1998/namespace\",xmlns:\"http://www.w3.org/2000/xmlns/\"};ao.ns={prefix:Lo,qualify:function(n){var t=n.indexOf(\":\"),e=n;return t>=0&&\"xmlns\"!==(e=n.slice(0,t))&&(n=n.slice(t+1)),Lo.hasOwnProperty(e)?{space:Lo[e],local:n}:n}},Co.attr=function(n,t){if(arguments.length<2){if(\"string\"==typeof n){var e=this.node();return n=ao.ns.qualify(n),n.local?e.getAttributeNS(n.space,n.local):e.getAttribute(n)}for(t in n)this.each(z(t,n[t]));return this}return this.each(z(n,t))},Co.classed=function(n,t){if(arguments.length<2){if(\"string\"==typeof n){var e=this.node(),r=(n=T(n)).length,i=-1;if(t=e.classList){for(;++i<r;)if(!t.contains(n[i]))return!1}else for(t=e.getAttribute(\"class\");++i<r;)if(!q(n[i]).test(t))return!1;return!0}for(t in n)this.each(R(t,n[t]));return this}return this.each(R(n,t))},Co.style=function(n,e,r){var i=arguments.length;if(3>i){if(\"string\"!=typeof n){2>i&&(e=\"\");for(r in n)this.each(P(r,n[r],e));return this}if(2>i){var u=this.node();return t(u).getComputedStyle(u,null).getPropertyValue(n)}r=\"\"}return this.each(P(n,e,r))},Co.property=function(n,t){if(arguments.length<2){if(\"string\"==typeof n)return this.node()[n];for(t in n)this.each(U(t,n[t]));return this}return this.each(U(n,t))},Co.text=function(n){return arguments.length?this.each(\"function\"==typeof n?function(){var t=n.apply(this,arguments);this.textContent=null==t?\"\":t}:null==n?function(){this.textContent=\"\"}:function(){this.textContent=n}):this.node().textContent},Co.html=function(n){return arguments.length?this.each(\"function\"==typeof n?function(){var t=n.apply(this,arguments);this.innerHTML=null==t?\"\":t}:null==n?function(){this.innerHTML=\"\"}:function(){this.innerHTML=n}):this.node().innerHTML},Co.append=function(n){return n=j(n),this.select(function(){return this.appendChild(n.apply(this,arguments))})},Co.insert=function(n,t){return n=j(n),t=A(t),this.select(function(){return this.insertBefore(n.apply(this,arguments),t.apply(this,arguments)||null)})},Co.remove=function(){return this.each(F)},Co.data=function(n,t){function e(n,e){var r,i,u,o=n.length,s=e.length,h=Math.min(o,s),p=new Array(s),g=new Array(s),v=new Array(o);if(t){var d,y=new c,m=new Array(o);for(r=-1;++r<o;)(i=n[r])&&(y.has(d=t.call(i,i.__data__,r))?v[r]=i:y.set(d,i),m[r]=d);for(r=-1;++r<s;)(i=y.get(d=t.call(e,u=e[r],r)))?i!==!0&&(p[r]=i,i.__data__=u):g[r]=H(u),y.set(d,!0);for(r=-1;++r<o;)r in m&&y.get(m[r])!==!0&&(v[r]=n[r])}else{for(r=-1;++r<h;)i=n[r],u=e[r],i?(i.__data__=u,p[r]=i):g[r]=H(u);for(;s>r;++r)g[r]=H(e[r]);for(;o>r;++r)v[r]=n[r]}g.update=p,g.parentNode=p.parentNode=v.parentNode=n.parentNode,a.push(g),l.push(p),f.push(v)}var r,i,u=-1,o=this.length;if(!arguments.length){for(n=new Array(o=(r=this[0]).length);++u<o;)(i=r[u])&&(n[u]=i.__data__);return n}var a=Z([]),l=E([]),f=E([]);if(\"function\"==typeof n)for(;++u<o;)e(r=this[u],n.call(r,r.parentNode.__data__,u));else for(;++u<o;)e(r=this[u],n);return l.enter=function(){return a},l.exit=function(){return f},l},Co.datum=function(n){return arguments.length?this.property(\"__data__\",n):this.property(\"__data__\")},Co.filter=function(n){var t,e,r,i=[];\"function\"!=typeof n&&(n=O(n));for(var u=0,o=this.length;o>u;u++){i.push(t=[]),t.parentNode=(e=this[u]).parentNode;for(var a=0,l=e.length;l>a;a++)(r=e[a])&&n.call(r,r.__data__,a,u)&&t.push(r)}return E(i)},Co.order=function(){for(var n=-1,t=this.length;++n<t;)for(var e,r=this[n],i=r.length-1,u=r[i];--i>=0;)(e=r[i])&&(u&&u!==e.nextSibling&&u.parentNode.insertBefore(e,u),u=e);return this},Co.sort=function(n){n=I.apply(this,arguments);for(var t=-1,e=this.length;++t<e;)this[t].sort(n);return this.order()},Co.each=function(n){return Y(this,function(t,e,r){n.call(t,t.__data__,e,r)})},Co.call=function(n){var t=co(arguments);return n.apply(t[0]=this,t),this},Co.empty=function(){return!this.node()},Co.node=function(){for(var n=0,t=this.length;t>n;n++)for(var e=this[n],r=0,i=e.length;i>r;r++){var u=e[r];if(u)return u}return null},Co.size=function(){var n=0;return Y(this,function(){++n}),n};var qo=[];ao.selection.enter=Z,ao.selection.enter.prototype=qo,qo.append=Co.append,qo.empty=Co.empty,qo.node=Co.node,qo.call=Co.call,qo.size=Co.size,qo.select=function(n){for(var t,e,r,i,u,o=[],a=-1,l=this.length;++a<l;){r=(i=this[a]).update,o.push(t=[]),t.parentNode=i.parentNode;for(var c=-1,f=i.length;++c<f;)(u=i[c])?(t.push(r[c]=e=n.call(i.parentNode,u.__data__,c,a)),e.__data__=u.__data__):t.push(null)}return E(o)},qo.insert=function(n,t){return arguments.length<2&&(t=V(this)),Co.insert.call(this,n,t)},ao.select=function(t){var e;return\"string\"==typeof t?(e=[No(t,fo)],e.parentNode=fo.documentElement):(e=[t],e.parentNode=n(t)),E([e])},ao.selectAll=function(n){var t;return\"string\"==typeof n?(t=co(Eo(n,fo)),t.parentNode=fo.documentElement):(t=co(n),t.parentNode=null),E([t])},Co.on=function(n,t,e){var r=arguments.length;if(3>r){if(\"string\"!=typeof n){2>r&&(t=!1);for(e in n)this.each(X(e,n[e],t));return this}if(2>r)return(r=this.node()[\"__on\"+n])&&r._;e=!1}return this.each(X(n,t,e))};var To=ao.map({mouseenter:\"mouseover\",mouseleave:\"mouseout\"});fo&&To.forEach(function(n){\"on\"+n in fo&&To.remove(n)});var Ro,Do=0;ao.mouse=function(n){return J(n,k())};var Po=this.navigator&&/WebKit/.test(this.navigator.userAgent)?-1:0;ao.touch=function(n,t,e){if(arguments.length<3&&(e=t,t=k().changedTouches),t)for(var r,i=0,u=t.length;u>i;++i)if((r=t[i]).identifier===e)return J(n,r)},ao.behavior.drag=function(){function n(){this.on(\"mousedown.drag\",u).on(\"touchstart.drag\",o)}function e(n,t,e,u,o){return function(){function a(){var n,e,r=t(h,v);r&&(n=r[0]-M[0],e=r[1]-M[1],g|=n|e,M=r,p({type:\"drag\",x:r[0]+c[0],y:r[1]+c[1],dx:n,dy:e}))}function l(){t(h,v)&&(y.on(u+d,null).on(o+d,null),m(g),p({type:\"dragend\"}))}var c,f=this,s=ao.event.target.correspondingElement||ao.event.target,h=f.parentNode,p=r.of(f,arguments),g=0,v=n(),d=\".drag\"+(null==v?\"\":\"-\"+v),y=ao.select(e(s)).on(u+d,a).on(o+d,l),m=W(s),M=t(h,v);i?(c=i.apply(f,arguments),c=[c.x-M[0],c.y-M[1]]):c=[0,0],p({type:\"dragstart\"})}}var r=N(n,\"drag\",\"dragstart\",\"dragend\"),i=null,u=e(b,ao.mouse,t,\"mousemove\",\"mouseup\"),o=e(G,ao.touch,m,\"touchmove\",\"touchend\");return n.origin=function(t){return arguments.length?(i=t,n):i},ao.rebind(n,r,\"on\")},ao.touches=function(n,t){return arguments.length<2&&(t=k().touches),t?co(t).map(function(t){var e=J(n,t);return e.identifier=t.identifier,e}):[]};var Uo=1e-6,jo=Uo*Uo,Fo=Math.PI,Ho=2*Fo,Oo=Ho-Uo,Io=Fo/2,Yo=Fo/180,Zo=180/Fo,Vo=Math.SQRT2,Xo=2,$o=4;ao.interpolateZoom=function(n,t){var e,r,i=n[0],u=n[1],o=n[2],a=t[0],l=t[1],c=t[2],f=a-i,s=l-u,h=f*f+s*s;if(jo>h)r=Math.log(c/o)/Vo,e=function(n){return[i+n*f,u+n*s,o*Math.exp(Vo*n*r)]};else{var p=Math.sqrt(h),g=(c*c-o*o+$o*h)/(2*o*Xo*p),v=(c*c-o*o-$o*h)/(2*c*Xo*p),d=Math.log(Math.sqrt(g*g+1)-g),y=Math.log(Math.sqrt(v*v+1)-v);r=(y-d)/Vo,e=function(n){var t=n*r,e=rn(d),a=o/(Xo*p)*(e*un(Vo*t+d)-en(d));return[i+a*f,u+a*s,o*e/rn(Vo*t+d)]}}return e.duration=1e3*r,e},ao.behavior.zoom=function(){function n(n){n.on(L,s).on(Wo+\".zoom\",p).on(\"dblclick.zoom\",g).on(R,h)}function e(n){return[(n[0]-k.x)/k.k,(n[1]-k.y)/k.k]}function r(n){return[n[0]*k.k+k.x,n[1]*k.k+k.y]}function i(n){k.k=Math.max(A[0],Math.min(A[1],n))}function u(n,t){t=r(t),k.x+=n[0]-t[0],k.y+=n[1]-t[1]}function o(t,e,r,o){t.__chart__={x:k.x,y:k.y,k:k.k},i(Math.pow(2,o)),u(d=e,r),t=ao.select(t),C>0&&(t=t.transition().duration(C)),t.call(n.event)}function a(){b&&b.domain(x.range().map(function(n){return(n-k.x)/k.k}).map(x.invert)),w&&w.domain(_.range().map(function(n){return(n-k.y)/k.k}).map(_.invert))}function l(n){z++||n({type:\"zoomstart\"})}function c(n){a(),n({type:\"zoom\",scale:k.k,translate:[k.x,k.y]})}function f(n){--z||(n({type:\"zoomend\"}),d=null)}function s(){function n(){a=1,u(ao.mouse(i),h),c(o)}function r(){s.on(q,null).on(T,null),p(a),f(o)}var i=this,o=D.of(i,arguments),a=0,s=ao.select(t(i)).on(q,n).on(T,r),h=e(ao.mouse(i)),p=W(i);Il.call(i),l(o)}function h(){function n(){var n=ao.touches(g);return p=k.k,n.forEach(function(n){n.identifier in d&&(d[n.identifier]=e(n))}),n}function t(){var t=ao.event.target;ao.select(t).on(x,r).on(b,a),_.push(t);for(var e=ao.event.changedTouches,i=0,u=e.length;u>i;++i)d[e[i].identifier]=null;var l=n(),c=Date.now();if(1===l.length){if(500>c-M){var f=l[0];o(g,f,d[f.identifier],Math.floor(Math.log(k.k)/Math.LN2)+1),S()}M=c}else if(l.length>1){var f=l[0],s=l[1],h=f[0]-s[0],p=f[1]-s[1];y=h*h+p*p}}function r(){var n,t,e,r,o=ao.touches(g);Il.call(g);for(var a=0,l=o.length;l>a;++a,r=null)if(e=o[a],r=d[e.identifier]){if(t)break;n=e,t=r}if(r){var f=(f=e[0]-n[0])*f+(f=e[1]-n[1])*f,s=y&&Math.sqrt(f/y);n=[(n[0]+e[0])/2,(n[1]+e[1])/2],t=[(t[0]+r[0])/2,(t[1]+r[1])/2],i(s*p)}M=null,u(n,t),c(v)}function a(){if(ao.event.touches.length){for(var t=ao.event.changedTouches,e=0,r=t.length;r>e;++e)delete d[t[e].identifier];for(var i in d)return void n()}ao.selectAll(_).on(m,null),w.on(L,s).on(R,h),N(),f(v)}var p,g=this,v=D.of(g,arguments),d={},y=0,m=\".zoom-\"+ao.event.changedTouches[0].identifier,x=\"touchmove\"+m,b=\"touchend\"+m,_=[],w=ao.select(g),N=W(g);t(),l(v),w.on(L,null).on(R,t)}function p(){var n=D.of(this,arguments);m?clearTimeout(m):(Il.call(this),v=e(d=y||ao.mouse(this)),l(n)),m=setTimeout(function(){m=null,f(n)},50),S(),i(Math.pow(2,.002*Bo())*k.k),u(d,v),c(n)}function g(){var n=ao.mouse(this),t=Math.log(k.k)/Math.LN2;o(this,n,e(n),ao.event.shiftKey?Math.ceil(t)-1:Math.floor(t)+1)}var v,d,y,m,M,x,b,_,w,k={x:0,y:0,k:1},E=[960,500],A=Jo,C=250,z=0,L=\"mousedown.zoom\",q=\"mousemove.zoom\",T=\"mouseup.zoom\",R=\"touchstart.zoom\",D=N(n,\"zoomstart\",\"zoom\",\"zoomend\");return Wo||(Wo=\"onwheel\"in fo?(Bo=function(){return-ao.event.deltaY*(ao.event.deltaMode?120:1)},\"wheel\"):\"onmousewheel\"in fo?(Bo=function(){return ao.event.wheelDelta},\"mousewheel\"):(Bo=function(){return-ao.event.detail},\"MozMousePixelScroll\")),n.event=function(n){n.each(function(){var n=D.of(this,arguments),t=k;Hl?ao.select(this).transition().each(\"start.zoom\",function(){k=this.__chart__||{x:0,y:0,k:1},l(n)}).tween(\"zoom:zoom\",function(){var e=E[0],r=E[1],i=d?d[0]:e/2,u=d?d[1]:r/2,o=ao.interpolateZoom([(i-k.x)/k.k,(u-k.y)/k.k,e/k.k],[(i-t.x)/t.k,(u-t.y)/t.k,e/t.k]);return function(t){var r=o(t),a=e/r[2];this.__chart__=k={x:i-r[0]*a,y:u-r[1]*a,k:a},c(n)}}).each(\"interrupt.zoom\",function(){f(n)}).each(\"end.zoom\",function(){f(n)}):(this.__chart__=k,l(n),c(n),f(n))})},n.translate=function(t){return arguments.length?(k={x:+t[0],y:+t[1],k:k.k},a(),n):[k.x,k.y]},n.scale=function(t){return arguments.length?(k={x:k.x,y:k.y,k:null},i(+t),a(),n):k.k},n.scaleExtent=function(t){return arguments.length?(A=null==t?Jo:[+t[0],+t[1]],n):A},n.center=function(t){return arguments.length?(y=t&&[+t[0],+t[1]],n):y},n.size=function(t){return arguments.length?(E=t&&[+t[0],+t[1]],n):E},n.duration=function(t){return arguments.length?(C=+t,n):C},n.x=function(t){return arguments.length?(b=t,x=t.copy(),k={x:0,y:0,k:1},n):b},n.y=function(t){return arguments.length?(w=t,_=t.copy(),k={x:0,y:0,k:1},n):w},ao.rebind(n,D,\"on\")};var Bo,Wo,Jo=[0,1/0];ao.color=an,an.prototype.toString=function(){return this.rgb()+\"\"},ao.hsl=ln;var Go=ln.prototype=new an;Go.brighter=function(n){return n=Math.pow(.7,arguments.length?n:1),new ln(this.h,this.s,this.l/n)},Go.darker=function(n){return n=Math.pow(.7,arguments.length?n:1),new ln(this.h,this.s,n*this.l)},Go.rgb=function(){return cn(this.h,this.s,this.l)},ao.hcl=fn;var Ko=fn.prototype=new an;Ko.brighter=function(n){return new fn(this.h,this.c,Math.min(100,this.l+Qo*(arguments.length?n:1)))},Ko.darker=function(n){return new fn(this.h,this.c,Math.max(0,this.l-Qo*(arguments.length?n:1)))},Ko.rgb=function(){return sn(this.h,this.c,this.l).rgb()},ao.lab=hn;var Qo=18,na=.95047,ta=1,ea=1.08883,ra=hn.prototype=new an;ra.brighter=function(n){return new hn(Math.min(100,this.l+Qo*(arguments.length?n:1)),this.a,this.b)},ra.darker=function(n){return new hn(Math.max(0,this.l-Qo*(arguments.length?n:1)),this.a,this.b)},ra.rgb=function(){return pn(this.l,this.a,this.b)},ao.rgb=mn;var ia=mn.prototype=new an;ia.brighter=function(n){n=Math.pow(.7,arguments.length?n:1);var t=this.r,e=this.g,r=this.b,i=30;return t||e||r?(t&&i>t&&(t=i),e&&i>e&&(e=i),r&&i>r&&(r=i),new mn(Math.min(255,t/n),Math.min(255,e/n),Math.min(255,r/n))):new mn(i,i,i)},ia.darker=function(n){return n=Math.pow(.7,arguments.length?n:1),new mn(n*this.r,n*this.g,n*this.b)},ia.hsl=function(){return wn(this.r,this.g,this.b)},ia.toString=function(){return\"#\"+bn(this.r)+bn(this.g)+bn(this.b)};var ua=ao.map({aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsalmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,lightyellow:16777184,lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palegoldenrod:15657130,palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,rebeccapurple:6697881,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal:32896,thistle:14204888,tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074});ua.forEach(function(n,t){ua.set(n,Mn(t))}),ao.functor=En,ao.xhr=An(m),ao.dsv=function(n,t){function e(n,e,u){arguments.length<3&&(u=e,e=null);var o=Cn(n,t,null==e?r:i(e),u);return o.row=function(n){return arguments.length?o.response(null==(e=n)?r:i(n)):e},o}function r(n){return e.parse(n.responseText)}function i(n){return function(t){return e.parse(t.responseText,n)}}function u(t){return t.map(o).join(n)}function o(n){return a.test(n)?'\"'+n.replace(/\\\"/g,'\"\"')+'\"':n}var a=new RegExp('[\"'+n+\"\\n]\"),l=n.charCodeAt(0);return e.parse=function(n,t){var r;return e.parseRows(n,function(n,e){if(r)return r(n,e-1);var i=new Function(\"d\",\"return {\"+n.map(function(n,t){return JSON.stringify(n)+\": d[\"+t+\"]\"}).join(\",\")+\"}\");r=t?function(n,e){return t(i(n),e)}:i})},e.parseRows=function(n,t){function e(){if(f>=c)return o;if(i)return i=!1,u;var t=f;if(34===n.charCodeAt(t)){for(var e=t;e++<c;)if(34===n.charCodeAt(e)){if(34!==n.charCodeAt(e+1))break;++e}f=e+2;var r=n.charCodeAt(e+1);return 13===r?(i=!0,10===n.charCodeAt(e+2)&&++f):10===r&&(i=!0),n.slice(t+1,e).replace(/\"\"/g,'\"')}for(;c>f;){var r=n.charCodeAt(f++),a=1;if(10===r)i=!0;else if(13===r)i=!0,10===n.charCodeAt(f)&&(++f,++a);else if(r!==l)continue;return n.slice(t,f-a)}return n.slice(t)}for(var r,i,u={},o={},a=[],c=n.length,f=0,s=0;(r=e())!==o;){for(var h=[];r!==u&&r!==o;)h.push(r),r=e();t&&null==(h=t(h,s++))||a.push(h)}return a},e.format=function(t){if(Array.isArray(t[0]))return e.formatRows(t);var r=new y,i=[];return t.forEach(function(n){for(var t in n)r.has(t)||i.push(r.add(t))}),[i.map(o).join(n)].concat(t.map(function(t){return i.map(function(n){return o(t[n])}).join(n)})).join(\"\\n\")},e.formatRows=function(n){return n.map(u).join(\"\\n\")},e},ao.csv=ao.dsv(\",\",\"text/csv\"),ao.tsv=ao.dsv(\"\t\",\"text/tab-separated-values\");var oa,aa,la,ca,fa=this[x(this,\"requestAnimationFrame\")]||function(n){setTimeout(n,17)};ao.timer=function(){qn.apply(this,arguments)},ao.timer.flush=function(){Rn(),Dn()},ao.round=function(n,t){return t?Math.round(n*(t=Math.pow(10,t)))/t:Math.round(n)};var sa=[\"y\",\"z\",\"a\",\"f\",\"p\",\"n\",\"\\xb5\",\"m\",\"\",\"k\",\"M\",\"G\",\"T\",\"P\",\"E\",\"Z\",\"Y\"].map(Un);ao.formatPrefix=function(n,t){var e=0;return(n=+n)&&(0>n&&(n*=-1),t&&(n=ao.round(n,Pn(n,t))),e=1+Math.floor(1e-12+Math.log(n)/Math.LN10),e=Math.max(-24,Math.min(24,3*Math.floor((e-1)/3)))),sa[8+e/3]};var ha=/(?:([^{])?([<>=^]))?([+\\- ])?([$#])?(0)?(\\d+)?(,)?(\\.-?\\d+)?([a-z%])?/i,pa=ao.map({b:function(n){return n.toString(2)},c:function(n){return String.fromCharCode(n)},o:function(n){return n.toString(8)},x:function(n){return n.toString(16)},X:function(n){return n.toString(16).toUpperCase()},g:function(n,t){return n.toPrecision(t)},e:function(n,t){return n.toExponential(t)},f:function(n,t){return n.toFixed(t)},r:function(n,t){return(n=ao.round(n,Pn(n,t))).toFixed(Math.max(0,Math.min(20,Pn(n*(1+1e-15),t))))}}),ga=ao.time={},va=Date;Hn.prototype={getDate:function(){return this._.getUTCDate()},getDay:function(){return this._.getUTCDay()},getFullYear:function(){return this._.getUTCFullYear()},getHours:function(){return this._.getUTCHours()},getMilliseconds:function(){return this._.getUTCMilliseconds()},getMinutes:function(){return this._.getUTCMinutes()},getMonth:function(){return this._.getUTCMonth()},getSeconds:function(){return this._.getUTCSeconds()},getTime:function(){return this._.getTime()},getTimezoneOffset:function(){return 0},valueOf:function(){return this._.valueOf()},setDate:function(){da.setUTCDate.apply(this._,arguments)},setDay:function(){da.setUTCDay.apply(this._,arguments)},setFullYear:function(){da.setUTCFullYear.apply(this._,arguments)},setHours:function(){da.setUTCHours.apply(this._,arguments)},setMilliseconds:function(){da.setUTCMilliseconds.apply(this._,arguments)},setMinutes:function(){da.setUTCMinutes.apply(this._,arguments)},setMonth:function(){da.setUTCMonth.apply(this._,arguments)},setSeconds:function(){da.setUTCSeconds.apply(this._,arguments)},setTime:function(){da.setTime.apply(this._,arguments)}};var da=Date.prototype;ga.year=On(function(n){return n=ga.day(n),n.setMonth(0,1),n},function(n,t){n.setFullYear(n.getFullYear()+t)},function(n){return n.getFullYear()}),ga.years=ga.year.range,ga.years.utc=ga.year.utc.range,ga.day=On(function(n){var t=new va(2e3,0);return t.setFullYear(n.getFullYear(),n.getMonth(),n.getDate()),t},function(n,t){n.setDate(n.getDate()+t)},function(n){return n.getDate()-1}),ga.days=ga.day.range,ga.days.utc=ga.day.utc.range,ga.dayOfYear=function(n){var t=ga.year(n);return Math.floor((n-t-6e4*(n.getTimezoneOffset()-t.getTimezoneOffset()))/864e5)},[\"sunday\",\"monday\",\"tuesday\",\"wednesday\",\"thursday\",\"friday\",\"saturday\"].forEach(function(n,t){t=7-t;var e=ga[n]=On(function(n){return(n=ga.day(n)).setDate(n.getDate()-(n.getDay()+t)%7),n},function(n,t){n.setDate(n.getDate()+7*Math.floor(t))},function(n){var e=ga.year(n).getDay();return Math.floor((ga.dayOfYear(n)+(e+t)%7)/7)-(e!==t)});ga[n+\"s\"]=e.range,ga[n+\"s\"].utc=e.utc.range,ga[n+\"OfYear\"]=function(n){var e=ga.year(n).getDay();return Math.floor((ga.dayOfYear(n)+(e+t)%7)/7)}}),ga.week=ga.sunday,ga.weeks=ga.sunday.range,ga.weeks.utc=ga.sunday.utc.range,ga.weekOfYear=ga.sundayOfYear;var ya={\"-\":\"\",_:\" \",0:\"0\"},ma=/^\\s*\\d+/,Ma=/^%/;ao.locale=function(n){return{numberFormat:jn(n),timeFormat:Yn(n)}};var xa=ao.locale({decimal:\".\",thousands:\",\",grouping:[3],currency:[\"$\",\"\"],dateTime:\"%a %b %e %X %Y\",date:\"%m/%d/%Y\",time:\"%H:%M:%S\",periods:[\"AM\",\"PM\"],days:[\"Sunday\",\"Monday\",\"Tuesday\",\"Wednesday\",\"Thursday\",\"Friday\",\"Saturday\"],\nshortDays:[\"Sun\",\"Mon\",\"Tue\",\"Wed\",\"Thu\",\"Fri\",\"Sat\"],months:[\"January\",\"February\",\"March\",\"April\",\"May\",\"June\",\"July\",\"August\",\"September\",\"October\",\"November\",\"December\"],shortMonths:[\"Jan\",\"Feb\",\"Mar\",\"Apr\",\"May\",\"Jun\",\"Jul\",\"Aug\",\"Sep\",\"Oct\",\"Nov\",\"Dec\"]});ao.format=xa.numberFormat,ao.geo={},ft.prototype={s:0,t:0,add:function(n){st(n,this.t,ba),st(ba.s,this.s,this),this.s?this.t+=ba.t:this.s=ba.t},reset:function(){this.s=this.t=0},valueOf:function(){return this.s}};var ba=new ft;ao.geo.stream=function(n,t){n&&_a.hasOwnProperty(n.type)?_a[n.type](n,t):ht(n,t)};var _a={Feature:function(n,t){ht(n.geometry,t)},FeatureCollection:function(n,t){for(var e=n.features,r=-1,i=e.length;++r<i;)ht(e[r].geometry,t)}},wa={Sphere:function(n,t){t.sphere()},Point:function(n,t){n=n.coordinates,t.point(n[0],n[1],n[2])},MultiPoint:function(n,t){for(var e=n.coordinates,r=-1,i=e.length;++r<i;)n=e[r],t.point(n[0],n[1],n[2])},LineString:function(n,t){pt(n.coordinates,t,0)},MultiLineString:function(n,t){for(var e=n.coordinates,r=-1,i=e.length;++r<i;)pt(e[r],t,0)},Polygon:function(n,t){gt(n.coordinates,t)},MultiPolygon:function(n,t){for(var e=n.coordinates,r=-1,i=e.length;++r<i;)gt(e[r],t)},GeometryCollection:function(n,t){for(var e=n.geometries,r=-1,i=e.length;++r<i;)ht(e[r],t)}};ao.geo.area=function(n){return Sa=0,ao.geo.stream(n,Na),Sa};var Sa,ka=new ft,Na={sphere:function(){Sa+=4*Fo},point:b,lineStart:b,lineEnd:b,polygonStart:function(){ka.reset(),Na.lineStart=vt},polygonEnd:function(){var n=2*ka;Sa+=0>n?4*Fo+n:n,Na.lineStart=Na.lineEnd=Na.point=b}};ao.geo.bounds=function(){function n(n,t){M.push(x=[f=n,h=n]),s>t&&(s=t),t>p&&(p=t)}function t(t,e){var r=dt([t*Yo,e*Yo]);if(y){var i=mt(y,r),u=[i[1],-i[0],0],o=mt(u,i);bt(o),o=_t(o);var l=t-g,c=l>0?1:-1,v=o[0]*Zo*c,d=xo(l)>180;if(d^(v>c*g&&c*t>v)){var m=o[1]*Zo;m>p&&(p=m)}else if(v=(v+360)%360-180,d^(v>c*g&&c*t>v)){var m=-o[1]*Zo;s>m&&(s=m)}else s>e&&(s=e),e>p&&(p=e);d?g>t?a(f,t)>a(f,h)&&(h=t):a(t,h)>a(f,h)&&(f=t):h>=f?(f>t&&(f=t),t>h&&(h=t)):t>g?a(f,t)>a(f,h)&&(h=t):a(t,h)>a(f,h)&&(f=t)}else n(t,e);y=r,g=t}function e(){b.point=t}function r(){x[0]=f,x[1]=h,b.point=n,y=null}function i(n,e){if(y){var r=n-g;m+=xo(r)>180?r+(r>0?360:-360):r}else v=n,d=e;Na.point(n,e),t(n,e)}function u(){Na.lineStart()}function o(){i(v,d),Na.lineEnd(),xo(m)>Uo&&(f=-(h=180)),x[0]=f,x[1]=h,y=null}function a(n,t){return(t-=n)<0?t+360:t}function l(n,t){return n[0]-t[0]}function c(n,t){return t[0]<=t[1]?t[0]<=n&&n<=t[1]:n<t[0]||t[1]<n}var f,s,h,p,g,v,d,y,m,M,x,b={point:n,lineStart:e,lineEnd:r,polygonStart:function(){b.point=i,b.lineStart=u,b.lineEnd=o,m=0,Na.polygonStart()},polygonEnd:function(){Na.polygonEnd(),b.point=n,b.lineStart=e,b.lineEnd=r,0>ka?(f=-(h=180),s=-(p=90)):m>Uo?p=90:-Uo>m&&(s=-90),x[0]=f,x[1]=h}};return function(n){p=h=-(f=s=1/0),M=[],ao.geo.stream(n,b);var t=M.length;if(t){M.sort(l);for(var e,r=1,i=M[0],u=[i];t>r;++r)e=M[r],c(e[0],i)||c(e[1],i)?(a(i[0],e[1])>a(i[0],i[1])&&(i[1]=e[1]),a(e[0],i[1])>a(i[0],i[1])&&(i[0]=e[0])):u.push(i=e);for(var o,e,g=-(1/0),t=u.length-1,r=0,i=u[t];t>=r;i=e,++r)e=u[r],(o=a(i[1],e[0]))>g&&(g=o,f=e[0],h=i[1])}return M=x=null,f===1/0||s===1/0?[[NaN,NaN],[NaN,NaN]]:[[f,s],[h,p]]}}(),ao.geo.centroid=function(n){Ea=Aa=Ca=za=La=qa=Ta=Ra=Da=Pa=Ua=0,ao.geo.stream(n,ja);var t=Da,e=Pa,r=Ua,i=t*t+e*e+r*r;return jo>i&&(t=qa,e=Ta,r=Ra,Uo>Aa&&(t=Ca,e=za,r=La),i=t*t+e*e+r*r,jo>i)?[NaN,NaN]:[Math.atan2(e,t)*Zo,tn(r/Math.sqrt(i))*Zo]};var Ea,Aa,Ca,za,La,qa,Ta,Ra,Da,Pa,Ua,ja={sphere:b,point:St,lineStart:Nt,lineEnd:Et,polygonStart:function(){ja.lineStart=At},polygonEnd:function(){ja.lineStart=Nt}},Fa=Rt(zt,jt,Ht,[-Fo,-Fo/2]),Ha=1e9;ao.geo.clipExtent=function(){var n,t,e,r,i,u,o={stream:function(n){return i&&(i.valid=!1),i=u(n),i.valid=!0,i},extent:function(a){return arguments.length?(u=Zt(n=+a[0][0],t=+a[0][1],e=+a[1][0],r=+a[1][1]),i&&(i.valid=!1,i=null),o):[[n,t],[e,r]]}};return o.extent([[0,0],[960,500]])},(ao.geo.conicEqualArea=function(){return Vt(Xt)}).raw=Xt,ao.geo.albers=function(){return ao.geo.conicEqualArea().rotate([96,0]).center([-.6,38.7]).parallels([29.5,45.5]).scale(1070)},ao.geo.albersUsa=function(){function n(n){var u=n[0],o=n[1];return t=null,e(u,o),t||(r(u,o),t)||i(u,o),t}var t,e,r,i,u=ao.geo.albers(),o=ao.geo.conicEqualArea().rotate([154,0]).center([-2,58.5]).parallels([55,65]),a=ao.geo.conicEqualArea().rotate([157,0]).center([-3,19.9]).parallels([8,18]),l={point:function(n,e){t=[n,e]}};return n.invert=function(n){var t=u.scale(),e=u.translate(),r=(n[0]-e[0])/t,i=(n[1]-e[1])/t;return(i>=.12&&.234>i&&r>=-.425&&-.214>r?o:i>=.166&&.234>i&&r>=-.214&&-.115>r?a:u).invert(n)},n.stream=function(n){var t=u.stream(n),e=o.stream(n),r=a.stream(n);return{point:function(n,i){t.point(n,i),e.point(n,i),r.point(n,i)},sphere:function(){t.sphere(),e.sphere(),r.sphere()},lineStart:function(){t.lineStart(),e.lineStart(),r.lineStart()},lineEnd:function(){t.lineEnd(),e.lineEnd(),r.lineEnd()},polygonStart:function(){t.polygonStart(),e.polygonStart(),r.polygonStart()},polygonEnd:function(){t.polygonEnd(),e.polygonEnd(),r.polygonEnd()}}},n.precision=function(t){return arguments.length?(u.precision(t),o.precision(t),a.precision(t),n):u.precision()},n.scale=function(t){return arguments.length?(u.scale(t),o.scale(.35*t),a.scale(t),n.translate(u.translate())):u.scale()},n.translate=function(t){if(!arguments.length)return u.translate();var c=u.scale(),f=+t[0],s=+t[1];return e=u.translate(t).clipExtent([[f-.455*c,s-.238*c],[f+.455*c,s+.238*c]]).stream(l).point,r=o.translate([f-.307*c,s+.201*c]).clipExtent([[f-.425*c+Uo,s+.12*c+Uo],[f-.214*c-Uo,s+.234*c-Uo]]).stream(l).point,i=a.translate([f-.205*c,s+.212*c]).clipExtent([[f-.214*c+Uo,s+.166*c+Uo],[f-.115*c-Uo,s+.234*c-Uo]]).stream(l).point,n},n.scale(1070)};var Oa,Ia,Ya,Za,Va,Xa,$a={point:b,lineStart:b,lineEnd:b,polygonStart:function(){Ia=0,$a.lineStart=$t},polygonEnd:function(){$a.lineStart=$a.lineEnd=$a.point=b,Oa+=xo(Ia/2)}},Ba={point:Bt,lineStart:b,lineEnd:b,polygonStart:b,polygonEnd:b},Wa={point:Gt,lineStart:Kt,lineEnd:Qt,polygonStart:function(){Wa.lineStart=ne},polygonEnd:function(){Wa.point=Gt,Wa.lineStart=Kt,Wa.lineEnd=Qt}};ao.geo.path=function(){function n(n){return n&&(\"function\"==typeof a&&u.pointRadius(+a.apply(this,arguments)),o&&o.valid||(o=i(u)),ao.geo.stream(n,o)),u.result()}function t(){return o=null,n}var e,r,i,u,o,a=4.5;return n.area=function(n){return Oa=0,ao.geo.stream(n,i($a)),Oa},n.centroid=function(n){return Ca=za=La=qa=Ta=Ra=Da=Pa=Ua=0,ao.geo.stream(n,i(Wa)),Ua?[Da/Ua,Pa/Ua]:Ra?[qa/Ra,Ta/Ra]:La?[Ca/La,za/La]:[NaN,NaN]},n.bounds=function(n){return Va=Xa=-(Ya=Za=1/0),ao.geo.stream(n,i(Ba)),[[Ya,Za],[Va,Xa]]},n.projection=function(n){return arguments.length?(i=(e=n)?n.stream||re(n):m,t()):e},n.context=function(n){return arguments.length?(u=null==(r=n)?new Wt:new te(n),\"function\"!=typeof a&&u.pointRadius(a),t()):r},n.pointRadius=function(t){return arguments.length?(a=\"function\"==typeof t?t:(u.pointRadius(+t),+t),n):a},n.projection(ao.geo.albersUsa()).context(null)},ao.geo.transform=function(n){return{stream:function(t){var e=new ie(t);for(var r in n)e[r]=n[r];return e}}},ie.prototype={point:function(n,t){this.stream.point(n,t)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}},ao.geo.projection=oe,ao.geo.projectionMutator=ae,(ao.geo.equirectangular=function(){return oe(ce)}).raw=ce.invert=ce,ao.geo.rotation=function(n){function t(t){return t=n(t[0]*Yo,t[1]*Yo),t[0]*=Zo,t[1]*=Zo,t}return n=se(n[0]%360*Yo,n[1]*Yo,n.length>2?n[2]*Yo:0),t.invert=function(t){return t=n.invert(t[0]*Yo,t[1]*Yo),t[0]*=Zo,t[1]*=Zo,t},t},fe.invert=ce,ao.geo.circle=function(){function n(){var n=\"function\"==typeof r?r.apply(this,arguments):r,t=se(-n[0]*Yo,-n[1]*Yo,0).invert,i=[];return e(null,null,1,{point:function(n,e){i.push(n=t(n,e)),n[0]*=Zo,n[1]*=Zo}}),{type:\"Polygon\",coordinates:[i]}}var t,e,r=[0,0],i=6;return n.origin=function(t){return arguments.length?(r=t,n):r},n.angle=function(r){return arguments.length?(e=ve((t=+r)*Yo,i*Yo),n):t},n.precision=function(r){return arguments.length?(e=ve(t*Yo,(i=+r)*Yo),n):i},n.angle(90)},ao.geo.distance=function(n,t){var e,r=(t[0]-n[0])*Yo,i=n[1]*Yo,u=t[1]*Yo,o=Math.sin(r),a=Math.cos(r),l=Math.sin(i),c=Math.cos(i),f=Math.sin(u),s=Math.cos(u);return Math.atan2(Math.sqrt((e=s*o)*e+(e=c*f-l*s*a)*e),l*f+c*s*a)},ao.geo.graticule=function(){function n(){return{type:\"MultiLineString\",coordinates:t()}}function t(){return ao.range(Math.ceil(u/d)*d,i,d).map(h).concat(ao.range(Math.ceil(c/y)*y,l,y).map(p)).concat(ao.range(Math.ceil(r/g)*g,e,g).filter(function(n){return xo(n%d)>Uo}).map(f)).concat(ao.range(Math.ceil(a/v)*v,o,v).filter(function(n){return xo(n%y)>Uo}).map(s))}var e,r,i,u,o,a,l,c,f,s,h,p,g=10,v=g,d=90,y=360,m=2.5;return n.lines=function(){return t().map(function(n){return{type:\"LineString\",coordinates:n}})},n.outline=function(){return{type:\"Polygon\",coordinates:[h(u).concat(p(l).slice(1),h(i).reverse().slice(1),p(c).reverse().slice(1))]}},n.extent=function(t){return arguments.length?n.majorExtent(t).minorExtent(t):n.minorExtent()},n.majorExtent=function(t){return arguments.length?(u=+t[0][0],i=+t[1][0],c=+t[0][1],l=+t[1][1],u>i&&(t=u,u=i,i=t),c>l&&(t=c,c=l,l=t),n.precision(m)):[[u,c],[i,l]]},n.minorExtent=function(t){return arguments.length?(r=+t[0][0],e=+t[1][0],a=+t[0][1],o=+t[1][1],r>e&&(t=r,r=e,e=t),a>o&&(t=a,a=o,o=t),n.precision(m)):[[r,a],[e,o]]},n.step=function(t){return arguments.length?n.majorStep(t).minorStep(t):n.minorStep()},n.majorStep=function(t){return arguments.length?(d=+t[0],y=+t[1],n):[d,y]},n.minorStep=function(t){return arguments.length?(g=+t[0],v=+t[1],n):[g,v]},n.precision=function(t){return arguments.length?(m=+t,f=ye(a,o,90),s=me(r,e,m),h=ye(c,l,90),p=me(u,i,m),n):m},n.majorExtent([[-180,-90+Uo],[180,90-Uo]]).minorExtent([[-180,-80-Uo],[180,80+Uo]])},ao.geo.greatArc=function(){function n(){return{type:\"LineString\",coordinates:[t||r.apply(this,arguments),e||i.apply(this,arguments)]}}var t,e,r=Me,i=xe;return n.distance=function(){return ao.geo.distance(t||r.apply(this,arguments),e||i.apply(this,arguments))},n.source=function(e){return arguments.length?(r=e,t=\"function\"==typeof e?null:e,n):r},n.target=function(t){return arguments.length?(i=t,e=\"function\"==typeof t?null:t,n):i},n.precision=function(){return arguments.length?n:0},n},ao.geo.interpolate=function(n,t){return be(n[0]*Yo,n[1]*Yo,t[0]*Yo,t[1]*Yo)},ao.geo.length=function(n){return Ja=0,ao.geo.stream(n,Ga),Ja};var Ja,Ga={sphere:b,point:b,lineStart:_e,lineEnd:b,polygonStart:b,polygonEnd:b},Ka=we(function(n){return Math.sqrt(2/(1+n))},function(n){return 2*Math.asin(n/2)});(ao.geo.azimuthalEqualArea=function(){return oe(Ka)}).raw=Ka;var Qa=we(function(n){var t=Math.acos(n);return t&&t/Math.sin(t)},m);(ao.geo.azimuthalEquidistant=function(){return oe(Qa)}).raw=Qa,(ao.geo.conicConformal=function(){return Vt(Se)}).raw=Se,(ao.geo.conicEquidistant=function(){return Vt(ke)}).raw=ke;var nl=we(function(n){return 1/n},Math.atan);(ao.geo.gnomonic=function(){return oe(nl)}).raw=nl,Ne.invert=function(n,t){return[n,2*Math.atan(Math.exp(t))-Io]},(ao.geo.mercator=function(){return Ee(Ne)}).raw=Ne;var tl=we(function(){return 1},Math.asin);(ao.geo.orthographic=function(){return oe(tl)}).raw=tl;var el=we(function(n){return 1/(1+n)},function(n){return 2*Math.atan(n)});(ao.geo.stereographic=function(){return oe(el)}).raw=el,Ae.invert=function(n,t){return[-t,2*Math.atan(Math.exp(n))-Io]},(ao.geo.transverseMercator=function(){var n=Ee(Ae),t=n.center,e=n.rotate;return n.center=function(n){return n?t([-n[1],n[0]]):(n=t(),[n[1],-n[0]])},n.rotate=function(n){return n?e([n[0],n[1],n.length>2?n[2]+90:90]):(n=e(),[n[0],n[1],n[2]-90])},e([0,0,90])}).raw=Ae,ao.geom={},ao.geom.hull=function(n){function t(n){if(n.length<3)return[];var t,i=En(e),u=En(r),o=n.length,a=[],l=[];for(t=0;o>t;t++)a.push([+i.call(this,n[t],t),+u.call(this,n[t],t),t]);for(a.sort(qe),t=0;o>t;t++)l.push([a[t][0],-a[t][1]]);var c=Le(a),f=Le(l),s=f[0]===c[0],h=f[f.length-1]===c[c.length-1],p=[];for(t=c.length-1;t>=0;--t)p.push(n[a[c[t]][2]]);for(t=+s;t<f.length-h;++t)p.push(n[a[f[t]][2]]);return p}var e=Ce,r=ze;return arguments.length?t(n):(t.x=function(n){return arguments.length?(e=n,t):e},t.y=function(n){return arguments.length?(r=n,t):r},t)},ao.geom.polygon=function(n){return ko(n,rl),n};var rl=ao.geom.polygon.prototype=[];rl.area=function(){for(var n,t=-1,e=this.length,r=this[e-1],i=0;++t<e;)n=r,r=this[t],i+=n[1]*r[0]-n[0]*r[1];return.5*i},rl.centroid=function(n){var t,e,r=-1,i=this.length,u=0,o=0,a=this[i-1];for(arguments.length||(n=-1/(6*this.area()));++r<i;)t=a,a=this[r],e=t[0]*a[1]-a[0]*t[1],u+=(t[0]+a[0])*e,o+=(t[1]+a[1])*e;return[u*n,o*n]},rl.clip=function(n){for(var t,e,r,i,u,o,a=De(n),l=-1,c=this.length-De(this),f=this[c-1];++l<c;){for(t=n.slice(),n.length=0,i=this[l],u=t[(r=t.length-a)-1],e=-1;++e<r;)o=t[e],Te(o,f,i)?(Te(u,f,i)||n.push(Re(u,o,f,i)),n.push(o)):Te(u,f,i)&&n.push(Re(u,o,f,i)),u=o;a&&n.push(n[0]),f=i}return n};var il,ul,ol,al,ll,cl=[],fl=[];Ye.prototype.prepare=function(){for(var n,t=this.edges,e=t.length;e--;)n=t[e].edge,n.b&&n.a||t.splice(e,1);return t.sort(Ve),t.length},tr.prototype={start:function(){return this.edge.l===this.site?this.edge.a:this.edge.b},end:function(){return this.edge.l===this.site?this.edge.b:this.edge.a}},er.prototype={insert:function(n,t){var e,r,i;if(n){if(t.P=n,t.N=n.N,n.N&&(n.N.P=t),n.N=t,n.R){for(n=n.R;n.L;)n=n.L;n.L=t}else n.R=t;e=n}else this._?(n=or(this._),t.P=null,t.N=n,n.P=n.L=t,e=n):(t.P=t.N=null,this._=t,e=null);for(t.L=t.R=null,t.U=e,t.C=!0,n=t;e&&e.C;)r=e.U,e===r.L?(i=r.R,i&&i.C?(e.C=i.C=!1,r.C=!0,n=r):(n===e.R&&(ir(this,e),n=e,e=n.U),e.C=!1,r.C=!0,ur(this,r))):(i=r.L,i&&i.C?(e.C=i.C=!1,r.C=!0,n=r):(n===e.L&&(ur(this,e),n=e,e=n.U),e.C=!1,r.C=!0,ir(this,r))),e=n.U;this._.C=!1},remove:function(n){n.N&&(n.N.P=n.P),n.P&&(n.P.N=n.N),n.N=n.P=null;var t,e,r,i=n.U,u=n.L,o=n.R;if(e=u?o?or(o):u:o,i?i.L===n?i.L=e:i.R=e:this._=e,u&&o?(r=e.C,e.C=n.C,e.L=u,u.U=e,e!==o?(i=e.U,e.U=n.U,n=e.R,i.L=n,e.R=o,o.U=e):(e.U=i,i=e,n=e.R)):(r=n.C,n=e),n&&(n.U=i),!r){if(n&&n.C)return void(n.C=!1);do{if(n===this._)break;if(n===i.L){if(t=i.R,t.C&&(t.C=!1,i.C=!0,ir(this,i),t=i.R),t.L&&t.L.C||t.R&&t.R.C){t.R&&t.R.C||(t.L.C=!1,t.C=!0,ur(this,t),t=i.R),t.C=i.C,i.C=t.R.C=!1,ir(this,i),n=this._;break}}else if(t=i.L,t.C&&(t.C=!1,i.C=!0,ur(this,i),t=i.L),t.L&&t.L.C||t.R&&t.R.C){t.L&&t.L.C||(t.R.C=!1,t.C=!0,ir(this,t),t=i.L),t.C=i.C,i.C=t.L.C=!1,ur(this,i),n=this._;break}t.C=!0,n=i,i=i.U}while(!n.C);n&&(n.C=!1)}}},ao.geom.voronoi=function(n){function t(n){var t=new Array(n.length),r=a[0][0],i=a[0][1],u=a[1][0],o=a[1][1];return ar(e(n),a).cells.forEach(function(e,a){var l=e.edges,c=e.site,f=t[a]=l.length?l.map(function(n){var t=n.start();return[t.x,t.y]}):c.x>=r&&c.x<=u&&c.y>=i&&c.y<=o?[[r,o],[u,o],[u,i],[r,i]]:[];f.point=n[a]}),t}function e(n){return n.map(function(n,t){return{x:Math.round(u(n,t)/Uo)*Uo,y:Math.round(o(n,t)/Uo)*Uo,i:t}})}var r=Ce,i=ze,u=r,o=i,a=sl;return n?t(n):(t.links=function(n){return ar(e(n)).edges.filter(function(n){return n.l&&n.r}).map(function(t){return{source:n[t.l.i],target:n[t.r.i]}})},t.triangles=function(n){var t=[];return ar(e(n)).cells.forEach(function(e,r){for(var i,u,o=e.site,a=e.edges.sort(Ve),l=-1,c=a.length,f=a[c-1].edge,s=f.l===o?f.r:f.l;++l<c;)i=f,u=s,f=a[l].edge,s=f.l===o?f.r:f.l,r<u.i&&r<s.i&&cr(o,u,s)<0&&t.push([n[r],n[u.i],n[s.i]])}),t},t.x=function(n){return arguments.length?(u=En(r=n),t):r},t.y=function(n){return arguments.length?(o=En(i=n),t):i},t.clipExtent=function(n){return arguments.length?(a=null==n?sl:n,t):a===sl?null:a},t.size=function(n){return arguments.length?t.clipExtent(n&&[[0,0],n]):a===sl?null:a&&a[1]},t)};var sl=[[-1e6,-1e6],[1e6,1e6]];ao.geom.delaunay=function(n){return ao.geom.voronoi().triangles(n)},ao.geom.quadtree=function(n,t,e,r,i){function u(n){function u(n,t,e,r,i,u,o,a){if(!isNaN(e)&&!isNaN(r))if(n.leaf){var l=n.x,f=n.y;if(null!=l)if(xo(l-e)+xo(f-r)<.01)c(n,t,e,r,i,u,o,a);else{var s=n.point;n.x=n.y=n.point=null,c(n,s,l,f,i,u,o,a),c(n,t,e,r,i,u,o,a)}else n.x=e,n.y=r,n.point=t}else c(n,t,e,r,i,u,o,a)}function c(n,t,e,r,i,o,a,l){var c=.5*(i+a),f=.5*(o+l),s=e>=c,h=r>=f,p=h<<1|s;n.leaf=!1,n=n.nodes[p]||(n.nodes[p]=hr()),s?i=c:a=c,h?o=f:l=f,u(n,t,e,r,i,o,a,l)}var f,s,h,p,g,v,d,y,m,M=En(a),x=En(l);if(null!=t)v=t,d=e,y=r,m=i;else if(y=m=-(v=d=1/0),s=[],h=[],g=n.length,o)for(p=0;g>p;++p)f=n[p],f.x<v&&(v=f.x),f.y<d&&(d=f.y),f.x>y&&(y=f.x),f.y>m&&(m=f.y),s.push(f.x),h.push(f.y);else for(p=0;g>p;++p){var b=+M(f=n[p],p),_=+x(f,p);v>b&&(v=b),d>_&&(d=_),b>y&&(y=b),_>m&&(m=_),s.push(b),h.push(_)}var w=y-v,S=m-d;w>S?m=d+w:y=v+S;var k=hr();if(k.add=function(n){u(k,n,+M(n,++p),+x(n,p),v,d,y,m)},k.visit=function(n){pr(n,k,v,d,y,m)},k.find=function(n){return gr(k,n[0],n[1],v,d,y,m)},p=-1,null==t){for(;++p<g;)u(k,n[p],s[p],h[p],v,d,y,m);--p}else n.forEach(k.add);return s=h=n=f=null,k}var o,a=Ce,l=ze;return(o=arguments.length)?(a=fr,l=sr,3===o&&(i=e,r=t,e=t=0),u(n)):(u.x=function(n){return arguments.length?(a=n,u):a},u.y=function(n){return arguments.length?(l=n,u):l},u.extent=function(n){return arguments.length?(null==n?t=e=r=i=null:(t=+n[0][0],e=+n[0][1],r=+n[1][0],i=+n[1][1]),u):null==t?null:[[t,e],[r,i]]},u.size=function(n){return arguments.length?(null==n?t=e=r=i=null:(t=e=0,r=+n[0],i=+n[1]),u):null==t?null:[r-t,i-e]},u)},ao.interpolateRgb=vr,ao.interpolateObject=dr,ao.interpolateNumber=yr,ao.interpolateString=mr;var hl=/[-+]?(?:\\d+\\.?\\d*|\\.?\\d+)(?:[eE][-+]?\\d+)?/g,pl=new RegExp(hl.source,\"g\");ao.interpolate=Mr,ao.interpolators=[function(n,t){var e=typeof t;return(\"string\"===e?ua.has(t.toLowerCase())||/^(#|rgb\\(|hsl\\()/i.test(t)?vr:mr:t instanceof an?vr:Array.isArray(t)?xr:\"object\"===e&&isNaN(t)?dr:yr)(n,t)}],ao.interpolateArray=xr;var gl=function(){return m},vl=ao.map({linear:gl,poly:Er,quad:function(){return Sr},cubic:function(){return kr},sin:function(){return Ar},exp:function(){return Cr},circle:function(){return zr},elastic:Lr,back:qr,bounce:function(){return Tr}}),dl=ao.map({\"in\":m,out:_r,\"in-out\":wr,\"out-in\":function(n){return wr(_r(n))}});ao.ease=function(n){var t=n.indexOf(\"-\"),e=t>=0?n.slice(0,t):n,r=t>=0?n.slice(t+1):\"in\";return e=vl.get(e)||gl,r=dl.get(r)||m,br(r(e.apply(null,lo.call(arguments,1))))},ao.interpolateHcl=Rr,ao.interpolateHsl=Dr,ao.interpolateLab=Pr,ao.interpolateRound=Ur,ao.transform=function(n){var t=fo.createElementNS(ao.ns.prefix.svg,\"g\");return(ao.transform=function(n){if(null!=n){t.setAttribute(\"transform\",n);var e=t.transform.baseVal.consolidate()}return new jr(e?e.matrix:yl)})(n)},jr.prototype.toString=function(){return\"translate(\"+this.translate+\")rotate(\"+this.rotate+\")skewX(\"+this.skew+\")scale(\"+this.scale+\")\"};var yl={a:1,b:0,c:0,d:1,e:0,f:0};ao.interpolateTransform=$r,ao.layout={},ao.layout.bundle=function(){return function(n){for(var t=[],e=-1,r=n.length;++e<r;)t.push(Jr(n[e]));return t}},ao.layout.chord=function(){function n(){var n,c,s,h,p,g={},v=[],d=ao.range(u),y=[];for(e=[],r=[],n=0,h=-1;++h<u;){for(c=0,p=-1;++p<u;)c+=i[h][p];v.push(c),y.push(ao.range(u)),n+=c}for(o&&d.sort(function(n,t){return o(v[n],v[t])}),a&&y.forEach(function(n,t){n.sort(function(n,e){return a(i[t][n],i[t][e])})}),n=(Ho-f*u)/n,c=0,h=-1;++h<u;){for(s=c,p=-1;++p<u;){var m=d[h],M=y[m][p],x=i[m][M],b=c,_=c+=x*n;g[m+\"-\"+M]={index:m,subindex:M,startAngle:b,endAngle:_,value:x}}r[m]={index:m,startAngle:s,endAngle:c,value:v[m]},c+=f}for(h=-1;++h<u;)for(p=h-1;++p<u;){var w=g[h+\"-\"+p],S=g[p+\"-\"+h];(w.value||S.value)&&e.push(w.value<S.value?{source:S,target:w}:{source:w,target:S})}l&&t()}function t(){e.sort(function(n,t){return l((n.source.value+n.target.value)/2,(t.source.value+t.target.value)/2)})}var e,r,i,u,o,a,l,c={},f=0;return c.matrix=function(n){return arguments.length?(u=(i=n)&&i.length,e=r=null,c):i},c.padding=function(n){return arguments.length?(f=n,e=r=null,c):f},c.sortGroups=function(n){return arguments.length?(o=n,e=r=null,c):o},c.sortSubgroups=function(n){return arguments.length?(a=n,e=null,c):a},c.sortChords=function(n){return arguments.length?(l=n,e&&t(),c):l},c.chords=function(){return e||n(),e},c.groups=function(){return r||n(),r},c},ao.layout.force=function(){function n(n){return function(t,e,r,i){if(t.point!==n){var u=t.cx-n.x,o=t.cy-n.y,a=i-e,l=u*u+o*o;if(l>a*a/y){if(v>l){var c=t.charge/l;n.px-=u*c,n.py-=o*c}return!0}if(t.point&&l&&v>l){var c=t.pointCharge/l;n.px-=u*c,n.py-=o*c}}return!t.charge}}function t(n){n.px=ao.event.x,n.py=ao.event.y,l.resume()}var e,r,i,u,o,a,l={},c=ao.dispatch(\"start\",\"tick\",\"end\"),f=[1,1],s=.9,h=ml,p=Ml,g=-30,v=xl,d=.1,y=.64,M=[],x=[];return l.tick=function(){if((i*=.99)<.005)return e=null,c.end({type:\"end\",alpha:i=0}),!0;var t,r,l,h,p,v,y,m,b,_=M.length,w=x.length;for(r=0;w>r;++r)l=x[r],h=l.source,p=l.target,m=p.x-h.x,b=p.y-h.y,(v=m*m+b*b)&&(v=i*o[r]*((v=Math.sqrt(v))-u[r])/v,m*=v,b*=v,p.x-=m*(y=h.weight+p.weight?h.weight/(h.weight+p.weight):.5),p.y-=b*y,h.x+=m*(y=1-y),h.y+=b*y);if((y=i*d)&&(m=f[0]/2,b=f[1]/2,r=-1,y))for(;++r<_;)l=M[r],l.x+=(m-l.x)*y,l.y+=(b-l.y)*y;if(g)for(ri(t=ao.geom.quadtree(M),i,a),r=-1;++r<_;)(l=M[r]).fixed||t.visit(n(l));for(r=-1;++r<_;)l=M[r],l.fixed?(l.x=l.px,l.y=l.py):(l.x-=(l.px-(l.px=l.x))*s,l.y-=(l.py-(l.py=l.y))*s);c.tick({type:\"tick\",alpha:i})},l.nodes=function(n){return arguments.length?(M=n,l):M},l.links=function(n){return arguments.length?(x=n,l):x},l.size=function(n){return arguments.length?(f=n,l):f},l.linkDistance=function(n){return arguments.length?(h=\"function\"==typeof n?n:+n,l):h},l.distance=l.linkDistance,l.linkStrength=function(n){return arguments.length?(p=\"function\"==typeof n?n:+n,l):p},l.friction=function(n){return arguments.length?(s=+n,l):s},l.charge=function(n){return arguments.length?(g=\"function\"==typeof n?n:+n,l):g},l.chargeDistance=function(n){return arguments.length?(v=n*n,l):Math.sqrt(v)},l.gravity=function(n){return arguments.length?(d=+n,l):d},l.theta=function(n){return arguments.length?(y=n*n,l):Math.sqrt(y)},l.alpha=function(n){return arguments.length?(n=+n,i?n>0?i=n:(e.c=null,e.t=NaN,e=null,c.end({type:\"end\",alpha:i=0})):n>0&&(c.start({type:\"start\",alpha:i=n}),e=qn(l.tick)),l):i},l.start=function(){function n(n,r){if(!e){for(e=new Array(i),l=0;i>l;++l)e[l]=[];for(l=0;c>l;++l){var u=x[l];e[u.source.index].push(u.target),e[u.target.index].push(u.source)}}for(var o,a=e[t],l=-1,f=a.length;++l<f;)if(!isNaN(o=a[l][n]))return o;return Math.random()*r}var t,e,r,i=M.length,c=x.length,s=f[0],v=f[1];for(t=0;i>t;++t)(r=M[t]).index=t,r.weight=0;for(t=0;c>t;++t)r=x[t],\"number\"==typeof r.source&&(r.source=M[r.source]),\"number\"==typeof r.target&&(r.target=M[r.target]),++r.source.weight,++r.target.weight;for(t=0;i>t;++t)r=M[t],isNaN(r.x)&&(r.x=n(\"x\",s)),isNaN(r.y)&&(r.y=n(\"y\",v)),isNaN(r.px)&&(r.px=r.x),isNaN(r.py)&&(r.py=r.y);if(u=[],\"function\"==typeof h)for(t=0;c>t;++t)u[t]=+h.call(this,x[t],t);else for(t=0;c>t;++t)u[t]=h;if(o=[],\"function\"==typeof p)for(t=0;c>t;++t)o[t]=+p.call(this,x[t],t);else for(t=0;c>t;++t)o[t]=p;if(a=[],\"function\"==typeof g)for(t=0;i>t;++t)a[t]=+g.call(this,M[t],t);else for(t=0;i>t;++t)a[t]=g;return l.resume()},l.resume=function(){return l.alpha(.1)},l.stop=function(){return l.alpha(0)},l.drag=function(){return r||(r=ao.behavior.drag().origin(m).on(\"dragstart.force\",Qr).on(\"drag.force\",t).on(\"dragend.force\",ni)),arguments.length?void this.on(\"mouseover.force\",ti).on(\"mouseout.force\",ei).call(r):r},ao.rebind(l,c,\"on\")};var ml=20,Ml=1,xl=1/0;ao.layout.hierarchy=function(){function n(i){var u,o=[i],a=[];for(i.depth=0;null!=(u=o.pop());)if(a.push(u),(c=e.call(n,u,u.depth))&&(l=c.length)){for(var l,c,f;--l>=0;)o.push(f=c[l]),f.parent=u,f.depth=u.depth+1;r&&(u.value=0),u.children=c}else r&&(u.value=+r.call(n,u,u.depth)||0),delete u.children;return oi(i,function(n){var e,i;t&&(e=n.children)&&e.sort(t),r&&(i=n.parent)&&(i.value+=n.value)}),a}var t=ci,e=ai,r=li;return n.sort=function(e){return arguments.length?(t=e,n):t},n.children=function(t){return arguments.length?(e=t,n):e},n.value=function(t){return arguments.length?(r=t,n):r},n.revalue=function(t){return r&&(ui(t,function(n){n.children&&(n.value=0)}),oi(t,function(t){var e;t.children||(t.value=+r.call(n,t,t.depth)||0),(e=t.parent)&&(e.value+=t.value)})),t},n},ao.layout.partition=function(){function n(t,e,r,i){var u=t.children;if(t.x=e,t.y=t.depth*i,t.dx=r,t.dy=i,u&&(o=u.length)){var o,a,l,c=-1;for(r=t.value?r/t.value:0;++c<o;)n(a=u[c],e,l=a.value*r,i),e+=l}}function t(n){var e=n.children,r=0;if(e&&(i=e.length))for(var i,u=-1;++u<i;)r=Math.max(r,t(e[u]));return 1+r}function e(e,u){var o=r.call(this,e,u);return n(o[0],0,i[0],i[1]/t(o[0])),o}var r=ao.layout.hierarchy(),i=[1,1];return e.size=function(n){return arguments.length?(i=n,e):i},ii(e,r)},ao.layout.pie=function(){function n(o){var a,l=o.length,c=o.map(function(e,r){return+t.call(n,e,r)}),f=+(\"function\"==typeof r?r.apply(this,arguments):r),s=(\"function\"==typeof i?i.apply(this,arguments):i)-f,h=Math.min(Math.abs(s)/l,+(\"function\"==typeof u?u.apply(this,arguments):u)),p=h*(0>s?-1:1),g=ao.sum(c),v=g?(s-l*p)/g:0,d=ao.range(l),y=[];return null!=e&&d.sort(e===bl?function(n,t){return c[t]-c[n]}:function(n,t){return e(o[n],o[t])}),d.forEach(function(n){y[n]={data:o[n],value:a=c[n],startAngle:f,endAngle:f+=a*v+p,padAngle:h}}),y}var t=Number,e=bl,r=0,i=Ho,u=0;return n.value=function(e){return arguments.length?(t=e,n):t},n.sort=function(t){return arguments.length?(e=t,n):e},n.startAngle=function(t){return arguments.length?(r=t,n):r},n.endAngle=function(t){return arguments.length?(i=t,n):i},n.padAngle=function(t){return arguments.length?(u=t,n):u},n};var bl={};ao.layout.stack=function(){function n(a,l){if(!(h=a.length))return a;var c=a.map(function(e,r){return t.call(n,e,r)}),f=c.map(function(t){return t.map(function(t,e){return[u.call(n,t,e),o.call(n,t,e)]})}),s=e.call(n,f,l);c=ao.permute(c,s),f=ao.permute(f,s);var h,p,g,v,d=r.call(n,f,l),y=c[0].length;for(g=0;y>g;++g)for(i.call(n,c[0][g],v=d[g],f[0][g][1]),p=1;h>p;++p)i.call(n,c[p][g],v+=f[p-1][g][1],f[p][g][1]);return a}var t=m,e=gi,r=vi,i=pi,u=si,o=hi;return n.values=function(e){return arguments.length?(t=e,n):t},n.order=function(t){return arguments.length?(e=\"function\"==typeof t?t:_l.get(t)||gi,n):e},n.offset=function(t){return arguments.length?(r=\"function\"==typeof t?t:wl.get(t)||vi,n):r},n.x=function(t){return arguments.length?(u=t,n):u},n.y=function(t){return arguments.length?(o=t,n):o},n.out=function(t){return arguments.length?(i=t,n):i},n};var _l=ao.map({\"inside-out\":function(n){var t,e,r=n.length,i=n.map(di),u=n.map(yi),o=ao.range(r).sort(function(n,t){return i[n]-i[t]}),a=0,l=0,c=[],f=[];for(t=0;r>t;++t)e=o[t],l>a?(a+=u[e],c.push(e)):(l+=u[e],f.push(e));return f.reverse().concat(c)},reverse:function(n){return ao.range(n.length).reverse()},\"default\":gi}),wl=ao.map({silhouette:function(n){var t,e,r,i=n.length,u=n[0].length,o=[],a=0,l=[];for(e=0;u>e;++e){for(t=0,r=0;i>t;t++)r+=n[t][e][1];r>a&&(a=r),o.push(r)}for(e=0;u>e;++e)l[e]=(a-o[e])/2;return l},wiggle:function(n){var t,e,r,i,u,o,a,l,c,f=n.length,s=n[0],h=s.length,p=[];for(p[0]=l=c=0,e=1;h>e;++e){for(t=0,i=0;f>t;++t)i+=n[t][e][1];for(t=0,u=0,a=s[e][0]-s[e-1][0];f>t;++t){for(r=0,o=(n[t][e][1]-n[t][e-1][1])/(2*a);t>r;++r)o+=(n[r][e][1]-n[r][e-1][1])/a;u+=o*n[t][e][1]}p[e]=l-=i?u/i*a:0,c>l&&(c=l)}for(e=0;h>e;++e)p[e]-=c;return p},expand:function(n){var t,e,r,i=n.length,u=n[0].length,o=1/i,a=[];for(e=0;u>e;++e){for(t=0,r=0;i>t;t++)r+=n[t][e][1];if(r)for(t=0;i>t;t++)n[t][e][1]/=r;else for(t=0;i>t;t++)n[t][e][1]=o}for(e=0;u>e;++e)a[e]=0;return a},zero:vi});ao.layout.histogram=function(){function n(n,u){for(var o,a,l=[],c=n.map(e,this),f=r.call(this,c,u),s=i.call(this,f,c,u),u=-1,h=c.length,p=s.length-1,g=t?1:1/h;++u<p;)o=l[u]=[],o.dx=s[u+1]-(o.x=s[u]),o.y=0;if(p>0)for(u=-1;++u<h;)a=c[u],a>=f[0]&&a<=f[1]&&(o=l[ao.bisect(s,a,1,p)-1],o.y+=g,o.push(n[u]));return l}var t=!0,e=Number,r=bi,i=Mi;return n.value=function(t){return arguments.length?(e=t,n):e},n.range=function(t){return arguments.length?(r=En(t),n):r},n.bins=function(t){return arguments.length?(i=\"number\"==typeof t?function(n){return xi(n,t)}:En(t),n):i},n.frequency=function(e){return arguments.length?(t=!!e,n):t},n},ao.layout.pack=function(){function n(n,u){var o=e.call(this,n,u),a=o[0],l=i[0],c=i[1],f=null==t?Math.sqrt:\"function\"==typeof t?t:function(){return t};if(a.x=a.y=0,oi(a,function(n){n.r=+f(n.value)}),oi(a,Ni),r){var s=r*(t?1:Math.max(2*a.r/l,2*a.r/c))/2;oi(a,function(n){n.r+=s}),oi(a,Ni),oi(a,function(n){n.r-=s})}return Ci(a,l/2,c/2,t?1:1/Math.max(2*a.r/l,2*a.r/c)),o}var t,e=ao.layout.hierarchy().sort(_i),r=0,i=[1,1];return n.size=function(t){return arguments.length?(i=t,n):i},n.radius=function(e){return arguments.length?(t=null==e||\"function\"==typeof e?e:+e,n):t},n.padding=function(t){return arguments.length?(r=+t,n):r},ii(n,e)},ao.layout.tree=function(){function n(n,i){var f=o.call(this,n,i),s=f[0],h=t(s);if(oi(h,e),h.parent.m=-h.z,ui(h,r),c)ui(s,u);else{var p=s,g=s,v=s;ui(s,function(n){n.x<p.x&&(p=n),n.x>g.x&&(g=n),n.depth>v.depth&&(v=n)});var d=a(p,g)/2-p.x,y=l[0]/(g.x+a(g,p)/2+d),m=l[1]/(v.depth||1);ui(s,function(n){n.x=(n.x+d)*y,n.y=n.depth*m})}return f}function t(n){for(var t,e={A:null,children:[n]},r=[e];null!=(t=r.pop());)for(var i,u=t.children,o=0,a=u.length;a>o;++o)r.push((u[o]=i={_:u[o],parent:t,children:(i=u[o].children)&&i.slice()||[],A:null,a:null,z:0,m:0,c:0,s:0,t:null,i:o}).a=i);return e.children[0]}function e(n){var t=n.children,e=n.parent.children,r=n.i?e[n.i-1]:null;if(t.length){Di(n);var u=(t[0].z+t[t.length-1].z)/2;r?(n.z=r.z+a(n._,r._),n.m=n.z-u):n.z=u}else r&&(n.z=r.z+a(n._,r._));n.parent.A=i(n,r,n.parent.A||e[0])}function r(n){n._.x=n.z+n.parent.m,n.m+=n.parent.m}function i(n,t,e){if(t){for(var r,i=n,u=n,o=t,l=i.parent.children[0],c=i.m,f=u.m,s=o.m,h=l.m;o=Ti(o),i=qi(i),o&&i;)l=qi(l),u=Ti(u),u.a=n,r=o.z+s-i.z-c+a(o._,i._),r>0&&(Ri(Pi(o,n,e),n,r),c+=r,f+=r),s+=o.m,c+=i.m,h+=l.m,f+=u.m;o&&!Ti(u)&&(u.t=o,u.m+=s-f),i&&!qi(l)&&(l.t=i,l.m+=c-h,e=n)}return e}function u(n){n.x*=l[0],n.y=n.depth*l[1]}var o=ao.layout.hierarchy().sort(null).value(null),a=Li,l=[1,1],c=null;return n.separation=function(t){return arguments.length?(a=t,n):a},n.size=function(t){return arguments.length?(c=null==(l=t)?u:null,n):c?null:l},n.nodeSize=function(t){return arguments.length?(c=null==(l=t)?null:u,n):c?l:null},ii(n,o)},ao.layout.cluster=function(){function n(n,u){var o,a=t.call(this,n,u),l=a[0],c=0;oi(l,function(n){var t=n.children;t&&t.length?(n.x=ji(t),n.y=Ui(t)):(n.x=o?c+=e(n,o):0,n.y=0,o=n)});var f=Fi(l),s=Hi(l),h=f.x-e(f,s)/2,p=s.x+e(s,f)/2;return oi(l,i?function(n){n.x=(n.x-l.x)*r[0],n.y=(l.y-n.y)*r[1]}:function(n){n.x=(n.x-h)/(p-h)*r[0],n.y=(1-(l.y?n.y/l.y:1))*r[1]}),a}var t=ao.layout.hierarchy().sort(null).value(null),e=Li,r=[1,1],i=!1;return n.separation=function(t){return arguments.length?(e=t,n):e},n.size=function(t){return arguments.length?(i=null==(r=t),n):i?null:r},n.nodeSize=function(t){return arguments.length?(i=null!=(r=t),n):i?r:null},ii(n,t)},ao.layout.treemap=function(){function n(n,t){for(var e,r,i=-1,u=n.length;++i<u;)r=(e=n[i]).value*(0>t?0:t),e.area=isNaN(r)||0>=r?0:r}function t(e){var u=e.children;if(u&&u.length){var o,a,l,c=s(e),f=[],h=u.slice(),g=1/0,v=\"slice\"===p?c.dx:\"dice\"===p?c.dy:\"slice-dice\"===p?1&e.depth?c.dy:c.dx:Math.min(c.dx,c.dy);for(n(h,c.dx*c.dy/e.value),f.area=0;(l=h.length)>0;)f.push(o=h[l-1]),f.area+=o.area,\"squarify\"!==p||(a=r(f,v))<=g?(h.pop(),g=a):(f.area-=f.pop().area,i(f,v,c,!1),v=Math.min(c.dx,c.dy),f.length=f.area=0,g=1/0);f.length&&(i(f,v,c,!0),f.length=f.area=0),u.forEach(t)}}function e(t){var r=t.children;if(r&&r.length){var u,o=s(t),a=r.slice(),l=[];for(n(a,o.dx*o.dy/t.value),l.area=0;u=a.pop();)l.push(u),l.area+=u.area,null!=u.z&&(i(l,u.z?o.dx:o.dy,o,!a.length),l.length=l.area=0);r.forEach(e)}}function r(n,t){for(var e,r=n.area,i=0,u=1/0,o=-1,a=n.length;++o<a;)(e=n[o].area)&&(u>e&&(u=e),e>i&&(i=e));return r*=r,t*=t,r?Math.max(t*i*g/r,r/(t*u*g)):1/0}function i(n,t,e,r){var i,u=-1,o=n.length,a=e.x,c=e.y,f=t?l(n.area/t):0;\nif(t==e.dx){for((r||f>e.dy)&&(f=e.dy);++u<o;)i=n[u],i.x=a,i.y=c,i.dy=f,a+=i.dx=Math.min(e.x+e.dx-a,f?l(i.area/f):0);i.z=!0,i.dx+=e.x+e.dx-a,e.y+=f,e.dy-=f}else{for((r||f>e.dx)&&(f=e.dx);++u<o;)i=n[u],i.x=a,i.y=c,i.dx=f,c+=i.dy=Math.min(e.y+e.dy-c,f?l(i.area/f):0);i.z=!1,i.dy+=e.y+e.dy-c,e.x+=f,e.dx-=f}}function u(r){var i=o||a(r),u=i[0];return u.x=u.y=0,u.value?(u.dx=c[0],u.dy=c[1]):u.dx=u.dy=0,o&&a.revalue(u),n([u],u.dx*u.dy/u.value),(o?e:t)(u),h&&(o=i),i}var o,a=ao.layout.hierarchy(),l=Math.round,c=[1,1],f=null,s=Oi,h=!1,p=\"squarify\",g=.5*(1+Math.sqrt(5));return u.size=function(n){return arguments.length?(c=n,u):c},u.padding=function(n){function t(t){var e=n.call(u,t,t.depth);return null==e?Oi(t):Ii(t,\"number\"==typeof e?[e,e,e,e]:e)}function e(t){return Ii(t,n)}if(!arguments.length)return f;var r;return s=null==(f=n)?Oi:\"function\"==(r=typeof n)?t:\"number\"===r?(n=[n,n,n,n],e):e,u},u.round=function(n){return arguments.length?(l=n?Math.round:Number,u):l!=Number},u.sticky=function(n){return arguments.length?(h=n,o=null,u):h},u.ratio=function(n){return arguments.length?(g=n,u):g},u.mode=function(n){return arguments.length?(p=n+\"\",u):p},ii(u,a)},ao.random={normal:function(n,t){var e=arguments.length;return 2>e&&(t=1),1>e&&(n=0),function(){var e,r,i;do e=2*Math.random()-1,r=2*Math.random()-1,i=e*e+r*r;while(!i||i>1);return n+t*e*Math.sqrt(-2*Math.log(i)/i)}},logNormal:function(){var n=ao.random.normal.apply(ao,arguments);return function(){return Math.exp(n())}},bates:function(n){var t=ao.random.irwinHall(n);return function(){return t()/n}},irwinHall:function(n){return function(){for(var t=0,e=0;n>e;e++)t+=Math.random();return t}}},ao.scale={};var Sl={floor:m,ceil:m};ao.scale.linear=function(){return Wi([0,1],[0,1],Mr,!1)};var kl={s:1,g:1,p:1,r:1,e:1};ao.scale.log=function(){return ru(ao.scale.linear().domain([0,1]),10,!0,[1,10])};var Nl=ao.format(\".0e\"),El={floor:function(n){return-Math.ceil(-n)},ceil:function(n){return-Math.floor(-n)}};ao.scale.pow=function(){return iu(ao.scale.linear(),1,[0,1])},ao.scale.sqrt=function(){return ao.scale.pow().exponent(.5)},ao.scale.ordinal=function(){return ou([],{t:\"range\",a:[[]]})},ao.scale.category10=function(){return ao.scale.ordinal().range(Al)},ao.scale.category20=function(){return ao.scale.ordinal().range(Cl)},ao.scale.category20b=function(){return ao.scale.ordinal().range(zl)},ao.scale.category20c=function(){return ao.scale.ordinal().range(Ll)};var Al=[2062260,16744206,2924588,14034728,9725885,9197131,14907330,8355711,12369186,1556175].map(xn),Cl=[2062260,11454440,16744206,16759672,2924588,10018698,14034728,16750742,9725885,12955861,9197131,12885140,14907330,16234194,8355711,13092807,12369186,14408589,1556175,10410725].map(xn),zl=[3750777,5395619,7040719,10264286,6519097,9216594,11915115,13556636,9202993,12426809,15186514,15190932,8666169,11356490,14049643,15177372,8077683,10834324,13528509,14589654].map(xn),Ll=[3244733,7057110,10406625,13032431,15095053,16616764,16625259,16634018,3253076,7652470,10607003,13101504,7695281,10394312,12369372,14342891,6513507,9868950,12434877,14277081].map(xn);ao.scale.quantile=function(){return au([],[])},ao.scale.quantize=function(){return lu(0,1,[0,1])},ao.scale.threshold=function(){return cu([.5],[0,1])},ao.scale.identity=function(){return fu([0,1])},ao.svg={},ao.svg.arc=function(){function n(){var n=Math.max(0,+e.apply(this,arguments)),c=Math.max(0,+r.apply(this,arguments)),f=o.apply(this,arguments)-Io,s=a.apply(this,arguments)-Io,h=Math.abs(s-f),p=f>s?0:1;if(n>c&&(g=c,c=n,n=g),h>=Oo)return t(c,p)+(n?t(n,1-p):\"\")+\"Z\";var g,v,d,y,m,M,x,b,_,w,S,k,N=0,E=0,A=[];if((y=(+l.apply(this,arguments)||0)/2)&&(d=u===ql?Math.sqrt(n*n+c*c):+u.apply(this,arguments),p||(E*=-1),c&&(E=tn(d/c*Math.sin(y))),n&&(N=tn(d/n*Math.sin(y)))),c){m=c*Math.cos(f+E),M=c*Math.sin(f+E),x=c*Math.cos(s-E),b=c*Math.sin(s-E);var C=Math.abs(s-f-2*E)<=Fo?0:1;if(E&&yu(m,M,x,b)===p^C){var z=(f+s)/2;m=c*Math.cos(z),M=c*Math.sin(z),x=b=null}}else m=M=0;if(n){_=n*Math.cos(s-N),w=n*Math.sin(s-N),S=n*Math.cos(f+N),k=n*Math.sin(f+N);var L=Math.abs(f-s+2*N)<=Fo?0:1;if(N&&yu(_,w,S,k)===1-p^L){var q=(f+s)/2;_=n*Math.cos(q),w=n*Math.sin(q),S=k=null}}else _=w=0;if(h>Uo&&(g=Math.min(Math.abs(c-n)/2,+i.apply(this,arguments)))>.001){v=c>n^p?0:1;var T=g,R=g;if(Fo>h){var D=null==S?[_,w]:null==x?[m,M]:Re([m,M],[S,k],[x,b],[_,w]),P=m-D[0],U=M-D[1],j=x-D[0],F=b-D[1],H=1/Math.sin(Math.acos((P*j+U*F)/(Math.sqrt(P*P+U*U)*Math.sqrt(j*j+F*F)))/2),O=Math.sqrt(D[0]*D[0]+D[1]*D[1]);R=Math.min(g,(n-O)/(H-1)),T=Math.min(g,(c-O)/(H+1))}if(null!=x){var I=mu(null==S?[_,w]:[S,k],[m,M],c,T,p),Y=mu([x,b],[_,w],c,T,p);g===T?A.push(\"M\",I[0],\"A\",T,\",\",T,\" 0 0,\",v,\" \",I[1],\"A\",c,\",\",c,\" 0 \",1-p^yu(I[1][0],I[1][1],Y[1][0],Y[1][1]),\",\",p,\" \",Y[1],\"A\",T,\",\",T,\" 0 0,\",v,\" \",Y[0]):A.push(\"M\",I[0],\"A\",T,\",\",T,\" 0 1,\",v,\" \",Y[0])}else A.push(\"M\",m,\",\",M);if(null!=S){var Z=mu([m,M],[S,k],n,-R,p),V=mu([_,w],null==x?[m,M]:[x,b],n,-R,p);g===R?A.push(\"L\",V[0],\"A\",R,\",\",R,\" 0 0,\",v,\" \",V[1],\"A\",n,\",\",n,\" 0 \",p^yu(V[1][0],V[1][1],Z[1][0],Z[1][1]),\",\",1-p,\" \",Z[1],\"A\",R,\",\",R,\" 0 0,\",v,\" \",Z[0]):A.push(\"L\",V[0],\"A\",R,\",\",R,\" 0 0,\",v,\" \",Z[0])}else A.push(\"L\",_,\",\",w)}else A.push(\"M\",m,\",\",M),null!=x&&A.push(\"A\",c,\",\",c,\" 0 \",C,\",\",p,\" \",x,\",\",b),A.push(\"L\",_,\",\",w),null!=S&&A.push(\"A\",n,\",\",n,\" 0 \",L,\",\",1-p,\" \",S,\",\",k);return A.push(\"Z\"),A.join(\"\")}function t(n,t){return\"M0,\"+n+\"A\"+n+\",\"+n+\" 0 1,\"+t+\" 0,\"+-n+\"A\"+n+\",\"+n+\" 0 1,\"+t+\" 0,\"+n}var e=hu,r=pu,i=su,u=ql,o=gu,a=vu,l=du;return n.innerRadius=function(t){return arguments.length?(e=En(t),n):e},n.outerRadius=function(t){return arguments.length?(r=En(t),n):r},n.cornerRadius=function(t){return arguments.length?(i=En(t),n):i},n.padRadius=function(t){return arguments.length?(u=t==ql?ql:En(t),n):u},n.startAngle=function(t){return arguments.length?(o=En(t),n):o},n.endAngle=function(t){return arguments.length?(a=En(t),n):a},n.padAngle=function(t){return arguments.length?(l=En(t),n):l},n.centroid=function(){var n=(+e.apply(this,arguments)+ +r.apply(this,arguments))/2,t=(+o.apply(this,arguments)+ +a.apply(this,arguments))/2-Io;return[Math.cos(t)*n,Math.sin(t)*n]},n};var ql=\"auto\";ao.svg.line=function(){return Mu(m)};var Tl=ao.map({linear:xu,\"linear-closed\":bu,step:_u,\"step-before\":wu,\"step-after\":Su,basis:zu,\"basis-open\":Lu,\"basis-closed\":qu,bundle:Tu,cardinal:Eu,\"cardinal-open\":ku,\"cardinal-closed\":Nu,monotone:Fu});Tl.forEach(function(n,t){t.key=n,t.closed=/-closed$/.test(n)});var Rl=[0,2/3,1/3,0],Dl=[0,1/3,2/3,0],Pl=[0,1/6,2/3,1/6];ao.svg.line.radial=function(){var n=Mu(Hu);return n.radius=n.x,delete n.x,n.angle=n.y,delete n.y,n},wu.reverse=Su,Su.reverse=wu,ao.svg.area=function(){return Ou(m)},ao.svg.area.radial=function(){var n=Ou(Hu);return n.radius=n.x,delete n.x,n.innerRadius=n.x0,delete n.x0,n.outerRadius=n.x1,delete n.x1,n.angle=n.y,delete n.y,n.startAngle=n.y0,delete n.y0,n.endAngle=n.y1,delete n.y1,n},ao.svg.chord=function(){function n(n,a){var l=t(this,u,n,a),c=t(this,o,n,a);return\"M\"+l.p0+r(l.r,l.p1,l.a1-l.a0)+(e(l,c)?i(l.r,l.p1,l.r,l.p0):i(l.r,l.p1,c.r,c.p0)+r(c.r,c.p1,c.a1-c.a0)+i(c.r,c.p1,l.r,l.p0))+\"Z\"}function t(n,t,e,r){var i=t.call(n,e,r),u=a.call(n,i,r),o=l.call(n,i,r)-Io,f=c.call(n,i,r)-Io;return{r:u,a0:o,a1:f,p0:[u*Math.cos(o),u*Math.sin(o)],p1:[u*Math.cos(f),u*Math.sin(f)]}}function e(n,t){return n.a0==t.a0&&n.a1==t.a1}function r(n,t,e){return\"A\"+n+\",\"+n+\" 0 \"+ +(e>Fo)+\",1 \"+t}function i(n,t,e,r){return\"Q 0,0 \"+r}var u=Me,o=xe,a=Iu,l=gu,c=vu;return n.radius=function(t){return arguments.length?(a=En(t),n):a},n.source=function(t){return arguments.length?(u=En(t),n):u},n.target=function(t){return arguments.length?(o=En(t),n):o},n.startAngle=function(t){return arguments.length?(l=En(t),n):l},n.endAngle=function(t){return arguments.length?(c=En(t),n):c},n},ao.svg.diagonal=function(){function n(n,i){var u=t.call(this,n,i),o=e.call(this,n,i),a=(u.y+o.y)/2,l=[u,{x:u.x,y:a},{x:o.x,y:a},o];return l=l.map(r),\"M\"+l[0]+\"C\"+l[1]+\" \"+l[2]+\" \"+l[3]}var t=Me,e=xe,r=Yu;return n.source=function(e){return arguments.length?(t=En(e),n):t},n.target=function(t){return arguments.length?(e=En(t),n):e},n.projection=function(t){return arguments.length?(r=t,n):r},n},ao.svg.diagonal.radial=function(){var n=ao.svg.diagonal(),t=Yu,e=n.projection;return n.projection=function(n){return arguments.length?e(Zu(t=n)):t},n},ao.svg.symbol=function(){function n(n,r){return(Ul.get(t.call(this,n,r))||$u)(e.call(this,n,r))}var t=Xu,e=Vu;return n.type=function(e){return arguments.length?(t=En(e),n):t},n.size=function(t){return arguments.length?(e=En(t),n):e},n};var Ul=ao.map({circle:$u,cross:function(n){var t=Math.sqrt(n/5)/2;return\"M\"+-3*t+\",\"+-t+\"H\"+-t+\"V\"+-3*t+\"H\"+t+\"V\"+-t+\"H\"+3*t+\"V\"+t+\"H\"+t+\"V\"+3*t+\"H\"+-t+\"V\"+t+\"H\"+-3*t+\"Z\"},diamond:function(n){var t=Math.sqrt(n/(2*Fl)),e=t*Fl;return\"M0,\"+-t+\"L\"+e+\",0 0,\"+t+\" \"+-e+\",0Z\"},square:function(n){var t=Math.sqrt(n)/2;return\"M\"+-t+\",\"+-t+\"L\"+t+\",\"+-t+\" \"+t+\",\"+t+\" \"+-t+\",\"+t+\"Z\"},\"triangle-down\":function(n){var t=Math.sqrt(n/jl),e=t*jl/2;return\"M0,\"+e+\"L\"+t+\",\"+-e+\" \"+-t+\",\"+-e+\"Z\"},\"triangle-up\":function(n){var t=Math.sqrt(n/jl),e=t*jl/2;return\"M0,\"+-e+\"L\"+t+\",\"+e+\" \"+-t+\",\"+e+\"Z\"}});ao.svg.symbolTypes=Ul.keys();var jl=Math.sqrt(3),Fl=Math.tan(30*Yo);Co.transition=function(n){for(var t,e,r=Hl||++Zl,i=Ku(n),u=[],o=Ol||{time:Date.now(),ease:Nr,delay:0,duration:250},a=-1,l=this.length;++a<l;){u.push(t=[]);for(var c=this[a],f=-1,s=c.length;++f<s;)(e=c[f])&&Qu(e,f,i,r,o),t.push(e)}return Wu(u,i,r)},Co.interrupt=function(n){return this.each(null==n?Il:Bu(Ku(n)))};var Hl,Ol,Il=Bu(Ku()),Yl=[],Zl=0;Yl.call=Co.call,Yl.empty=Co.empty,Yl.node=Co.node,Yl.size=Co.size,ao.transition=function(n,t){return n&&n.transition?Hl?n.transition(t):n:ao.selection().transition(n)},ao.transition.prototype=Yl,Yl.select=function(n){var t,e,r,i=this.id,u=this.namespace,o=[];n=A(n);for(var a=-1,l=this.length;++a<l;){o.push(t=[]);for(var c=this[a],f=-1,s=c.length;++f<s;)(r=c[f])&&(e=n.call(r,r.__data__,f,a))?(\"__data__\"in r&&(e.__data__=r.__data__),Qu(e,f,u,i,r[u][i]),t.push(e)):t.push(null)}return Wu(o,u,i)},Yl.selectAll=function(n){var t,e,r,i,u,o=this.id,a=this.namespace,l=[];n=C(n);for(var c=-1,f=this.length;++c<f;)for(var s=this[c],h=-1,p=s.length;++h<p;)if(r=s[h]){u=r[a][o],e=n.call(r,r.__data__,h,c),l.push(t=[]);for(var g=-1,v=e.length;++g<v;)(i=e[g])&&Qu(i,g,a,o,u),t.push(i)}return Wu(l,a,o)},Yl.filter=function(n){var t,e,r,i=[];\"function\"!=typeof n&&(n=O(n));for(var u=0,o=this.length;o>u;u++){i.push(t=[]);for(var e=this[u],a=0,l=e.length;l>a;a++)(r=e[a])&&n.call(r,r.__data__,a,u)&&t.push(r)}return Wu(i,this.namespace,this.id)},Yl.tween=function(n,t){var e=this.id,r=this.namespace;return arguments.length<2?this.node()[r][e].tween.get(n):Y(this,null==t?function(t){t[r][e].tween.remove(n)}:function(i){i[r][e].tween.set(n,t)})},Yl.attr=function(n,t){function e(){this.removeAttribute(a)}function r(){this.removeAttributeNS(a.space,a.local)}function i(n){return null==n?e:(n+=\"\",function(){var t,e=this.getAttribute(a);return e!==n&&(t=o(e,n),function(n){this.setAttribute(a,t(n))})})}function u(n){return null==n?r:(n+=\"\",function(){var t,e=this.getAttributeNS(a.space,a.local);return e!==n&&(t=o(e,n),function(n){this.setAttributeNS(a.space,a.local,t(n))})})}if(arguments.length<2){for(t in n)this.attr(t,n[t]);return this}var o=\"transform\"==n?$r:Mr,a=ao.ns.qualify(n);return Ju(this,\"attr.\"+n,t,a.local?u:i)},Yl.attrTween=function(n,t){function e(n,e){var r=t.call(this,n,e,this.getAttribute(i));return r&&function(n){this.setAttribute(i,r(n))}}function r(n,e){var r=t.call(this,n,e,this.getAttributeNS(i.space,i.local));return r&&function(n){this.setAttributeNS(i.space,i.local,r(n))}}var i=ao.ns.qualify(n);return this.tween(\"attr.\"+n,i.local?r:e)},Yl.style=function(n,e,r){function i(){this.style.removeProperty(n)}function u(e){return null==e?i:(e+=\"\",function(){var i,u=t(this).getComputedStyle(this,null).getPropertyValue(n);return u!==e&&(i=Mr(u,e),function(t){this.style.setProperty(n,i(t),r)})})}var o=arguments.length;if(3>o){if(\"string\"!=typeof n){2>o&&(e=\"\");for(r in n)this.style(r,n[r],e);return this}r=\"\"}return Ju(this,\"style.\"+n,e,u)},Yl.styleTween=function(n,e,r){function i(i,u){var o=e.call(this,i,u,t(this).getComputedStyle(this,null).getPropertyValue(n));return o&&function(t){this.style.setProperty(n,o(t),r)}}return arguments.length<3&&(r=\"\"),this.tween(\"style.\"+n,i)},Yl.text=function(n){return Ju(this,\"text\",n,Gu)},Yl.remove=function(){var n=this.namespace;return this.each(\"end.transition\",function(){var t;this[n].count<2&&(t=this.parentNode)&&t.removeChild(this)})},Yl.ease=function(n){var t=this.id,e=this.namespace;return arguments.length<1?this.node()[e][t].ease:(\"function\"!=typeof n&&(n=ao.ease.apply(ao,arguments)),Y(this,function(r){r[e][t].ease=n}))},Yl.delay=function(n){var t=this.id,e=this.namespace;return arguments.length<1?this.node()[e][t].delay:Y(this,\"function\"==typeof n?function(r,i,u){r[e][t].delay=+n.call(r,r.__data__,i,u)}:(n=+n,function(r){r[e][t].delay=n}))},Yl.duration=function(n){var t=this.id,e=this.namespace;return arguments.length<1?this.node()[e][t].duration:Y(this,\"function\"==typeof n?function(r,i,u){r[e][t].duration=Math.max(1,n.call(r,r.__data__,i,u))}:(n=Math.max(1,n),function(r){r[e][t].duration=n}))},Yl.each=function(n,t){var e=this.id,r=this.namespace;if(arguments.length<2){var i=Ol,u=Hl;try{Hl=e,Y(this,function(t,i,u){Ol=t[r][e],n.call(t,t.__data__,i,u)})}finally{Ol=i,Hl=u}}else Y(this,function(i){var u=i[r][e];(u.event||(u.event=ao.dispatch(\"start\",\"end\",\"interrupt\"))).on(n,t)});return this},Yl.transition=function(){for(var n,t,e,r,i=this.id,u=++Zl,o=this.namespace,a=[],l=0,c=this.length;c>l;l++){a.push(n=[]);for(var t=this[l],f=0,s=t.length;s>f;f++)(e=t[f])&&(r=e[o][i],Qu(e,f,o,u,{time:r.time,ease:r.ease,delay:r.delay+r.duration,duration:r.duration})),n.push(e)}return Wu(a,o,u)},ao.svg.axis=function(){function n(n){n.each(function(){var n,c=ao.select(this),f=this.__chart__||e,s=this.__chart__=e.copy(),h=null==l?s.ticks?s.ticks.apply(s,a):s.domain():l,p=null==t?s.tickFormat?s.tickFormat.apply(s,a):m:t,g=c.selectAll(\".tick\").data(h,s),v=g.enter().insert(\"g\",\".domain\").attr(\"class\",\"tick\").style(\"opacity\",Uo),d=ao.transition(g.exit()).style(\"opacity\",Uo).remove(),y=ao.transition(g.order()).style(\"opacity\",1),M=Math.max(i,0)+o,x=Zi(s),b=c.selectAll(\".domain\").data([0]),_=(b.enter().append(\"path\").attr(\"class\",\"domain\"),ao.transition(b));v.append(\"line\"),v.append(\"text\");var w,S,k,N,E=v.select(\"line\"),A=y.select(\"line\"),C=g.select(\"text\").text(p),z=v.select(\"text\"),L=y.select(\"text\"),q=\"top\"===r||\"left\"===r?-1:1;if(\"bottom\"===r||\"top\"===r?(n=no,w=\"x\",k=\"y\",S=\"x2\",N=\"y2\",C.attr(\"dy\",0>q?\"0em\":\".71em\").style(\"text-anchor\",\"middle\"),_.attr(\"d\",\"M\"+x[0]+\",\"+q*u+\"V0H\"+x[1]+\"V\"+q*u)):(n=to,w=\"y\",k=\"x\",S=\"y2\",N=\"x2\",C.attr(\"dy\",\".32em\").style(\"text-anchor\",0>q?\"end\":\"start\"),_.attr(\"d\",\"M\"+q*u+\",\"+x[0]+\"H0V\"+x[1]+\"H\"+q*u)),E.attr(N,q*i),z.attr(k,q*M),A.attr(S,0).attr(N,q*i),L.attr(w,0).attr(k,q*M),s.rangeBand){var T=s,R=T.rangeBand()/2;f=s=function(n){return T(n)+R}}else f.rangeBand?f=s:d.call(n,s,f);v.call(n,f,s),y.call(n,s,s)})}var t,e=ao.scale.linear(),r=Vl,i=6,u=6,o=3,a=[10],l=null;return n.scale=function(t){return arguments.length?(e=t,n):e},n.orient=function(t){return arguments.length?(r=t in Xl?t+\"\":Vl,n):r},n.ticks=function(){return arguments.length?(a=co(arguments),n):a},n.tickValues=function(t){return arguments.length?(l=t,n):l},n.tickFormat=function(e){return arguments.length?(t=e,n):t},n.tickSize=function(t){var e=arguments.length;return e?(i=+t,u=+arguments[e-1],n):i},n.innerTickSize=function(t){return arguments.length?(i=+t,n):i},n.outerTickSize=function(t){return arguments.length?(u=+t,n):u},n.tickPadding=function(t){return arguments.length?(o=+t,n):o},n.tickSubdivide=function(){return arguments.length&&n},n};var Vl=\"bottom\",Xl={top:1,right:1,bottom:1,left:1};ao.svg.brush=function(){function n(t){t.each(function(){var t=ao.select(this).style(\"pointer-events\",\"all\").style(\"-webkit-tap-highlight-color\",\"rgba(0,0,0,0)\").on(\"mousedown.brush\",u).on(\"touchstart.brush\",u),o=t.selectAll(\".background\").data([0]);o.enter().append(\"rect\").attr(\"class\",\"background\").style(\"visibility\",\"hidden\").style(\"cursor\",\"crosshair\"),t.selectAll(\".extent\").data([0]).enter().append(\"rect\").attr(\"class\",\"extent\").style(\"cursor\",\"move\");var a=t.selectAll(\".resize\").data(v,m);a.exit().remove(),a.enter().append(\"g\").attr(\"class\",function(n){return\"resize \"+n}).style(\"cursor\",function(n){return $l[n]}).append(\"rect\").attr(\"x\",function(n){return/[ew]$/.test(n)?-3:null}).attr(\"y\",function(n){return/^[ns]/.test(n)?-3:null}).attr(\"width\",6).attr(\"height\",6).style(\"visibility\",\"hidden\"),a.style(\"display\",n.empty()?\"none\":null);var l,s=ao.transition(t),h=ao.transition(o);c&&(l=Zi(c),h.attr(\"x\",l[0]).attr(\"width\",l[1]-l[0]),r(s)),f&&(l=Zi(f),h.attr(\"y\",l[0]).attr(\"height\",l[1]-l[0]),i(s)),e(s)})}function e(n){n.selectAll(\".resize\").attr(\"transform\",function(n){return\"translate(\"+s[+/e$/.test(n)]+\",\"+h[+/^s/.test(n)]+\")\"})}function r(n){n.select(\".extent\").attr(\"x\",s[0]),n.selectAll(\".extent,.n>rect,.s>rect\").attr(\"width\",s[1]-s[0])}function i(n){n.select(\".extent\").attr(\"y\",h[0]),n.selectAll(\".extent,.e>rect,.w>rect\").attr(\"height\",h[1]-h[0])}function u(){function u(){32==ao.event.keyCode&&(C||(M=null,L[0]-=s[1],L[1]-=h[1],C=2),S())}function v(){32==ao.event.keyCode&&2==C&&(L[0]+=s[1],L[1]+=h[1],C=0,S())}function d(){var n=ao.mouse(b),t=!1;x&&(n[0]+=x[0],n[1]+=x[1]),C||(ao.event.altKey?(M||(M=[(s[0]+s[1])/2,(h[0]+h[1])/2]),L[0]=s[+(n[0]<M[0])],L[1]=h[+(n[1]<M[1])]):M=null),E&&y(n,c,0)&&(r(k),t=!0),A&&y(n,f,1)&&(i(k),t=!0),t&&(e(k),w({type:\"brush\",mode:C?\"move\":\"resize\"}))}function y(n,t,e){var r,i,u=Zi(t),l=u[0],c=u[1],f=L[e],v=e?h:s,d=v[1]-v[0];return C&&(l-=f,c-=d+f),r=(e?g:p)?Math.max(l,Math.min(c,n[e])):n[e],C?i=(r+=f)+d:(M&&(f=Math.max(l,Math.min(c,2*M[e]-r))),r>f?(i=r,r=f):i=f),v[0]!=r||v[1]!=i?(e?a=null:o=null,v[0]=r,v[1]=i,!0):void 0}function m(){d(),k.style(\"pointer-events\",\"all\").selectAll(\".resize\").style(\"display\",n.empty()?\"none\":null),ao.select(\"body\").style(\"cursor\",null),q.on(\"mousemove.brush\",null).on(\"mouseup.brush\",null).on(\"touchmove.brush\",null).on(\"touchend.brush\",null).on(\"keydown.brush\",null).on(\"keyup.brush\",null),z(),w({type:\"brushend\"})}var M,x,b=this,_=ao.select(ao.event.target),w=l.of(b,arguments),k=ao.select(b),N=_.datum(),E=!/^(n|s)$/.test(N)&&c,A=!/^(e|w)$/.test(N)&&f,C=_.classed(\"extent\"),z=W(b),L=ao.mouse(b),q=ao.select(t(b)).on(\"keydown.brush\",u).on(\"keyup.brush\",v);if(ao.event.changedTouches?q.on(\"touchmove.brush\",d).on(\"touchend.brush\",m):q.on(\"mousemove.brush\",d).on(\"mouseup.brush\",m),k.interrupt().selectAll(\"*\").interrupt(),C)L[0]=s[0]-L[0],L[1]=h[0]-L[1];else if(N){var T=+/w$/.test(N),R=+/^n/.test(N);x=[s[1-T]-L[0],h[1-R]-L[1]],L[0]=s[T],L[1]=h[R]}else ao.event.altKey&&(M=L.slice());k.style(\"pointer-events\",\"none\").selectAll(\".resize\").style(\"display\",null),ao.select(\"body\").style(\"cursor\",_.style(\"cursor\")),w({type:\"brushstart\"}),d()}var o,a,l=N(n,\"brushstart\",\"brush\",\"brushend\"),c=null,f=null,s=[0,0],h=[0,0],p=!0,g=!0,v=Bl[0];return n.event=function(n){n.each(function(){var n=l.of(this,arguments),t={x:s,y:h,i:o,j:a},e=this.__chart__||t;this.__chart__=t,Hl?ao.select(this).transition().each(\"start.brush\",function(){o=e.i,a=e.j,s=e.x,h=e.y,n({type:\"brushstart\"})}).tween(\"brush:brush\",function(){var e=xr(s,t.x),r=xr(h,t.y);return o=a=null,function(i){s=t.x=e(i),h=t.y=r(i),n({type:\"brush\",mode:\"resize\"})}}).each(\"end.brush\",function(){o=t.i,a=t.j,n({type:\"brush\",mode:\"resize\"}),n({type:\"brushend\"})}):(n({type:\"brushstart\"}),n({type:\"brush\",mode:\"resize\"}),n({type:\"brushend\"}))})},n.x=function(t){return arguments.length?(c=t,v=Bl[!c<<1|!f],n):c},n.y=function(t){return arguments.length?(f=t,v=Bl[!c<<1|!f],n):f},n.clamp=function(t){return arguments.length?(c&&f?(p=!!t[0],g=!!t[1]):c?p=!!t:f&&(g=!!t),n):c&&f?[p,g]:c?p:f?g:null},n.extent=function(t){var e,r,i,u,l;return arguments.length?(c&&(e=t[0],r=t[1],f&&(e=e[0],r=r[0]),o=[e,r],c.invert&&(e=c(e),r=c(r)),e>r&&(l=e,e=r,r=l),e==s[0]&&r==s[1]||(s=[e,r])),f&&(i=t[0],u=t[1],c&&(i=i[1],u=u[1]),a=[i,u],f.invert&&(i=f(i),u=f(u)),i>u&&(l=i,i=u,u=l),i==h[0]&&u==h[1]||(h=[i,u])),n):(c&&(o?(e=o[0],r=o[1]):(e=s[0],r=s[1],c.invert&&(e=c.invert(e),r=c.invert(r)),e>r&&(l=e,e=r,r=l))),f&&(a?(i=a[0],u=a[1]):(i=h[0],u=h[1],f.invert&&(i=f.invert(i),u=f.invert(u)),i>u&&(l=i,i=u,u=l))),c&&f?[[e,i],[r,u]]:c?[e,r]:f&&[i,u])},n.clear=function(){return n.empty()||(s=[0,0],h=[0,0],o=a=null),n},n.empty=function(){return!!c&&s[0]==s[1]||!!f&&h[0]==h[1]},ao.rebind(n,l,\"on\")};var $l={n:\"ns-resize\",e:\"ew-resize\",s:\"ns-resize\",w:\"ew-resize\",nw:\"nwse-resize\",ne:\"nesw-resize\",se:\"nwse-resize\",sw:\"nesw-resize\"},Bl=[[\"n\",\"e\",\"s\",\"w\",\"nw\",\"ne\",\"se\",\"sw\"],[\"e\",\"w\"],[\"n\",\"s\"],[]],Wl=ga.format=xa.timeFormat,Jl=Wl.utc,Gl=Jl(\"%Y-%m-%dT%H:%M:%S.%LZ\");Wl.iso=Date.prototype.toISOString&&+new Date(\"2000-01-01T00:00:00.000Z\")?eo:Gl,eo.parse=function(n){var t=new Date(n);return isNaN(t)?null:t},eo.toString=Gl.toString,ga.second=On(function(n){return new va(1e3*Math.floor(n/1e3))},function(n,t){n.setTime(n.getTime()+1e3*Math.floor(t))},function(n){return n.getSeconds()}),ga.seconds=ga.second.range,ga.seconds.utc=ga.second.utc.range,ga.minute=On(function(n){return new va(6e4*Math.floor(n/6e4))},function(n,t){n.setTime(n.getTime()+6e4*Math.floor(t))},function(n){return n.getMinutes()}),ga.minutes=ga.minute.range,ga.minutes.utc=ga.minute.utc.range,ga.hour=On(function(n){var t=n.getTimezoneOffset()/60;return new va(36e5*(Math.floor(n/36e5-t)+t))},function(n,t){n.setTime(n.getTime()+36e5*Math.floor(t))},function(n){return n.getHours()}),ga.hours=ga.hour.range,ga.hours.utc=ga.hour.utc.range,ga.month=On(function(n){return n=ga.day(n),n.setDate(1),n},function(n,t){n.setMonth(n.getMonth()+t)},function(n){return n.getMonth()}),ga.months=ga.month.range,ga.months.utc=ga.month.utc.range;var Kl=[1e3,5e3,15e3,3e4,6e4,3e5,9e5,18e5,36e5,108e5,216e5,432e5,864e5,1728e5,6048e5,2592e6,7776e6,31536e6],Ql=[[ga.second,1],[ga.second,5],[ga.second,15],[ga.second,30],[ga.minute,1],[ga.minute,5],[ga.minute,15],[ga.minute,30],[ga.hour,1],[ga.hour,3],[ga.hour,6],[ga.hour,12],[ga.day,1],[ga.day,2],[ga.week,1],[ga.month,1],[ga.month,3],[ga.year,1]],nc=Wl.multi([[\".%L\",function(n){return n.getMilliseconds()}],[\":%S\",function(n){return n.getSeconds()}],[\"%I:%M\",function(n){return n.getMinutes()}],[\"%I %p\",function(n){return n.getHours()}],[\"%a %d\",function(n){return n.getDay()&&1!=n.getDate()}],[\"%b %d\",function(n){return 1!=n.getDate()}],[\"%B\",function(n){return n.getMonth()}],[\"%Y\",zt]]),tc={range:function(n,t,e){return ao.range(Math.ceil(n/e)*e,+t,e).map(io)},floor:m,ceil:m};Ql.year=ga.year,ga.scale=function(){return ro(ao.scale.linear(),Ql,nc)};var ec=Ql.map(function(n){return[n[0].utc,n[1]]}),rc=Jl.multi([[\".%L\",function(n){return n.getUTCMilliseconds()}],[\":%S\",function(n){return n.getUTCSeconds()}],[\"%I:%M\",function(n){return n.getUTCMinutes()}],[\"%I %p\",function(n){return n.getUTCHours()}],[\"%a %d\",function(n){return n.getUTCDay()&&1!=n.getUTCDate()}],[\"%b %d\",function(n){return 1!=n.getUTCDate()}],[\"%B\",function(n){return n.getUTCMonth()}],[\"%Y\",zt]]);ec.year=ga.year.utc,ga.scale.utc=function(){return ro(ao.scale.linear(),ec,rc)},ao.text=An(function(n){return n.responseText}),ao.json=function(n,t){return Cn(n,\"application/json\",uo,t)},ao.html=function(n,t){return Cn(n,\"text/html\",oo,t)},ao.xml=An(function(n){return n.responseXML}),\"function\"==typeof define&&define.amd?(this.d3=ao,define(ao)):\"object\"==typeof module&&module.exports?module.exports=ao:this.d3=ao}();"
  },
  {
    "path": "html/assets/js/lib/jc.js",
    "content": "!function(a){var b;if(\"function\"==typeof define&&define.amd&&(define(a),b=!0),\"object\"==typeof exports&&(module.exports=a(),b=!0),!b){var c=window.Cookies,d=window.Cookies=a();d.noConflict=function(){return window.Cookies=c,d}}}(function(){function a(){for(var a=0,b={};a<arguments.length;a++){var c=arguments[a];for(var d in c)b[d]=c[d]}return b}function b(a){return a.replace(/(%[0-9A-Z]{2})+/g,decodeURIComponent)}function c(d){function e(){}function f(b,c,f){if(\"undefined\"!=typeof document){f=a({path:\"/\"},e.defaults,f),\"number\"==typeof f.expires&&(f.expires=new Date(1*new Date+864e5*f.expires)),f.expires=f.expires?f.expires.toUTCString():\"\";try{var g=JSON.stringify(c);/^[\\{\\[]/.test(g)&&(c=g)}catch(j){}c=d.write?d.write(c,b):encodeURIComponent(c+\"\").replace(/%(23|24|26|2B|3A|3C|3E|3D|2F|3F|40|5B|5D|5E|60|7B|7D|7C)/g,decodeURIComponent),b=encodeURIComponent(b+\"\").replace(/%(23|24|26|2B|5E|60|7C)/g,decodeURIComponent).replace(/[\\(\\)]/g,escape);var h=\"\";for(var i in f)f[i]&&(h+=\"; \"+i,!0!==f[i]&&(h+=\"=\"+f[i].split(\";\")[0]));return document.cookie=b+\"=\"+c+h}}function g(a,c){if(\"undefined\"!=typeof document){for(var e={},f=document.cookie?document.cookie.split(\"; \"):[],g=0;g<f.length;g++){var h=f[g].split(\"=\"),i=h.slice(1).join(\"=\");c||'\"'!==i.charAt(0)||(i=i.slice(1,-1));try{var j=b(h[0]);if(i=(d.read||d)(i,j)||b(i),c)try{i=JSON.parse(i)}catch(k){}if(e[j]=i,a===j)break}catch(k){}}return a?e[a]:e}}return e.set=f,e.get=function(a){return g(a,!1)},e.getJSON=function(a){return g(a,!0)},e.remove=function(b,c){f(b,\"\",a(c,{expires:-1}))},e.defaults={},e.withConverter=c,e}return c(function(){})});"
  },
  {
    "path": "html/assets/js/lib/jotobaChoices.js",
    "content": "document.querySelectorAll(\".choices__item--choice.choices__item--selectable\").forEach(e=>{e.addEventListener(\"click\",t=>{let n=t.target.parentElement.parentElement.parentElement.children[0].children;n[0].children[0].innerHTML=t.target.innerHTML,n[0].children[0].value=e.dataset.value,n[1].children[0].innerHTML=t.target.innerHTML;let r=n[0].dataset.onchange;void 0!==r&&window[r](t.target.innerHTML,e.dataset.value),t.target.parentElement.parentElement.classList.remove(\"is-active\"),t.target.parentElement.parentElement.parentElement.classList.remove(\"is-open\")})}),document.querySelectorAll(\".choices__inner\").forEach(e=>{e.addEventListener(\"click\",()=>{e.parentElement.children[1].classList.toggle(\"is-active\"),e.parentElement.classList.toggle(\"is-open\")})}),document.querySelectorAll(\".choices\").forEach(e=>{e.addEventListener(\"blur\",()=>{e.children[1].classList.remove(\"is-active\"),e.classList.remove(\"is-open\")})});"
  },
  {
    "path": "html/assets/js/locales/collection.js",
    "content": "const locales = {\n    \"en-US\": {\n        \"LANG_JAP\": \"Japanese\",\n        \"LANG_GER\": \"German\",\n        \"LANG_ENG\": \"English\",\n        \"LANG_RUS\": \"Russian\",\n        \"LANG_SPA\": \"Spanish\",\n        \"LANG_SWE\": \"Swedish\",\n        \"LANG_FRE\": \"French\",\n        \"LANG_DUT\": \"Dutch\",\n        \"LANG_HUN\": \"Hungarian\",\n        \"LANG_SLV\": \"Slovenian\",\n        \"SETTINGS_COOKIE_ACCEPT\": \"Thanks for helping to improve Jotoba!\",\n        \"SETTINGS_COOKIE_REJECT\": \"We will no longer collect <b>any</b> data.\",\n        \"UPLOAD_NO_INPUT\": \"You need to enter a URL or upload a file!\",\n        \"RADICAL_API_UNREACHABLE\": \"Could not reach Radical API.\",\n        \"SPEECH_LISTEN_YES\": \"Yes\",\n        \"SPEECH_LISTEN_NO\": \"No\",\n        \"SPEECH_NO_PERMISSION\": \"Need permissions to perform speech recognition!\",\n        \"SPEECH_ABORT\": \"Speech recognition aborted.\",\n        \"SPEECH_NO_VOICE\": \"No voice input received!\",\n        \"SPEECH_NOT_SUPPORTED\": \"Your browser does not support speech recognition!\",\n        \"QOL_FURI_COPIED\": \"furigana copied to clipboard.\",\n        \"QOL_FURI_COPIED_ALL\": \"<b>full</b> furigana copied to clipboard\",\n        \"QOL_KANJI_COPIED\": \"kanji copied to clipboard.\",\n        \"QOL_KANA_COPIED\": \"kana copied to clipboard.\",\n        \"QOL_SENTENCE_COPIED\": \"copied to clipboard.\",\n        \"QOL_AUDIO_COPIED\": \"Audio URL copied to clipboard\",\n        \"QOL_LINK_COPIED\": \"Link URL copied to clipboard\",\n    },\n    \"de-DE\": {\n        \"LANG_JAP\": \"Japanisch\",\n        \"LANG_GER\": \"Deutsch\",\n        \"LANG_ENG\": \"Englisch\",\n        \"LANG_RUS\": \"Russisch\",\n        \"LANG_SPA\": \"Spanisch\",\n        \"LANG_SWE\": \"Schwedisch\",\n        \"LANG_FRE\": \"Französisch\",\n        \"LANG_DUT\": \"Niederländisch\",\n        \"LANG_HUN\": \"Ungarisch\",\n        \"LANG_SLV\": \"Slowenisch\",\n        \"SETTINGS_COOKIE_ACCEPT\": \"Vielen Dank für die Unterstützung!\",\n        \"SETTINGS_COOKIE_REJECT\": \"Es werden keine Daten mehr gesammelt!\",\n        \"UPLOAD_NO_INPUT\": \"Du musst entweder eine Datei hochladen oder eine URL einfügen, welche auf ein Bild zeigt!\",\n        \"RADICAL_API_UNREACHABLE\": \"Konnte die Radikal-API nicht erreichen.\",\n        \"SPEECH_LISTEN_YES\": \"Ja\",\n        \"SPEECH_LISTEN_NO\": \"Nein\",\n        \"SPEECH_NO_PERMISSION\": \"Jotoba benötigt Berechtigungen für die Spracherkennung!\",\n        \"SPEECH_ABORT\": \"Spracherkennung abgebrochen.\",\n        \"SPEECH_NO_VOICE\": \"Wir konnten Deine Stimme nicht hören!\",\n        \"SPEECH_NOT_SUPPORTED\": \"Dein Browser unterstützt dieses Feature leider nicht!\",\n        \"QOL_FURI_COPIED\": \"Furigana in Zwischenablage kopiert\",\n        \"QOL_FURI_COPIED_ALL\": \"<b>Vollständiges</b> Furigana in Zwischenablage kopiert\",\n        \"QOL_KANJI_COPIED\": \"Kanji in Zwischenablage kopiert.\",\n        \"QOL_KANA_COPIED\": \"Kana in Zwischenablage kopiert\",\n        \"QOL_SENTENCE_COPIED\": \"Text in Zwischenablage kopiert\",\n        \"QOL_AUDIO_COPIED\": \"Audio URL in Zwischenablage kopiert\",\n        \"QOL_LINK_COPIED\": \"Link URL in Zwischenablage kopiert\",\n    },\n    \"hu\": {\n        \"LANG_JAP\": \"Japán\",\n        \"LANG_GER\": \"Német\",\n        \"LANG_ENG\": \"Angol\",\n        \"LANG_RUS\": \"Orosz\",\n        \"LANG_SPA\": \"Spanyol\",\n        \"LANG_SWE\": \"Svéd\",\n        \"LANG_FRE\": \"Francia\",\n        \"LANG_DUT\": \"Holland\",\n        \"LANG_HUN\": \"Magyar\",\n        \"LANG_SLV\": \"Szlovák\",\n        \"SPEECH_LISTEN_YES\": \"Igen\",\n        \"SPEECH_LISTEN_NO\": \"Nem\",\n    },\n};\n\n// Returns the text with the given identifier from the currently selected language\nfunction getText(identifier) {\n    let lang = Cookies.get(\"page_lang\") || \"en-US\";\n\n    return locales[lang][identifier] || locales[\"en-US\"][identifier] || identifier;\n}"
  },
  {
    "path": "html/assets/js/mobile.js",
    "content": "/**\n * This JS-File contains some Improvements specifically for mobile views\n */\n\n// Mark the currently selected search type (only used for mobile so far)\nmarkCurrentSearchType();\n\n// On Start, check if mobile view is enabled. If yes, activate the btn\nUtil.awaitDocumentReady(prepareMobilePageBtn);\n\n// Variables used in mobiles' easy-use btn\nvar jmpBtn;\nvar kanjiDiv;\nvar jmpBtnPointsTop;\n\n// Marks the current search's type, so it can be displayed in another color\nfunction markCurrentSearchType() {\n    let searchType = $('#search-type').val();\n\n    for (let i = 0; i < 4; i ++) {\n        if (i == searchType) {\n            $('.choices__item[data-value=\"'+i+'\"]').addClass('selected');\n        } else {\n            $('.choices__item[data-value=\"'+i+'\"]').removeClass('selected');\n        }\n    }\n}\n\n// Prepares the easy-use Btn for mobile devices\nfunction prepareMobilePageBtn() {\n    // The Jmp Btn and Kanji elements\n    jmpBtn = $(\"#jmp-btn\");\n    kanjiDiv = document.getElementById(\"secondaryInfo\"); \n\n    // Variables used in the following two functions\n    jmpBtnPointsTop = false;\n\n    if (kanjiDiv !== null) {\n        // Prepare the Kanji jmp and its button\n        var kanjiPos = kanjiDiv.offsetTop; \n        jmpBtn.removeClass(\"hidden\");\n\n        // Window Scroll checks\n        window.onscroll = function() {\n            if (Util.getBrowserWidth() < 600 && (document.body.scrollTop > kanjiPos - 500 || document.documentElement.scrollTop > kanjiPos - 500)) {\n                jmpBtn.css(\"transform\", \"rotate(0deg)\");\n                jmpBtnPointsTop = true;\n            } else {\n                jmpBtn.css(\"transform\", \"rotate(180deg)\");\n                jmpBtnPointsTop = false;\n            }\n        }\n    }\n}\n\n// Jumps to the top or kanji part\nfunction jumpToTop() {\n    if (jmpBtnPointsTop) {\n        (!window.requestAnimationFrame) ? window.scrollTo(0, 0) : Util.scrollTo(0, 400);\n    } else {\n        let topOffset = kanjiDiv.offsetTop; \n        (!window.requestAnimationFrame) ? window.scrollTo(0, topOffset) : Util.scrollTo(topOffset, 400);\n    }\n}\n\n// Toggles the options for different input and page jumping on / off\nfunction toggleMobileNav() {\n    $('.mobile-nav').toggleClass('hidden');\n}\n\n"
  },
  {
    "path": "html/assets/js/page/infoPage.js",
    "content": "\n// On load, check if Shortcuts should be shown. They are useless for mobile devices\nif( /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent) ) {\n    document.getElementById(\"shortcutInfo\").classList.add(\"hidden\");\n    document.getElementsByClassName(\"help-cat\")[0].classList.remove(\"help-cat\");\n}"
  },
  {
    "path": "html/assets/js/page/kanjiPage.js",
    "content": "/**\n * This JS-File implements the Kanji Animation and compound dropdown features\n */\n\n// Kanji settings\nvar kanjiSettings = [];\nconst Animation = { none: 0, forward: 1, backwards: 2 };\n\n// Default kanji speed (only used on init)\nlet speed = localStorage.getItem(\"kanji_speed\") || 1;\n\n// Initially set the speed tags according to the settings\nUtil.awaitDocumentReady(() => {\n    $(\".speed-tag\").each((i, e) => {\n        e.children[1].innerHTML = (Math.round(Settings.display.kanjiAnimationSpeed.val * 100) + \"%\");\n        e.nextElementSibling.value = Settings.display.kanjiAnimationSpeed.val;\n    });\n});\n\n// Initially prepare svg-settings\n$(\".anim-container\").each((i, e) => {\n    // The Kanji\n    let kanjiLiteral = e.id.split(\"_\")[0];\n\n    // Figure out how many paths there are\n    let paths = getPaths(kanjiLiteral);\n\n    // Specific settings\n    kanjiSettings[kanjiLiteral] = {\n        strokeCount: paths.length,\n        speed: speed,\n        timestamp: 0,\n        index: 0,\n        showNumbers: false,\n        animationDirection: Animation.none,\n        isAutomated: false,\n    }\n\n    // Needs the settings to be loaded first\n    Util.awaitDocumentReady(() => {\n        kanjiSettings[kanjiLiteral].index = Settings.display.showKanjiOnLoad.val ? paths.length : 0;\n        kanjiSettings[kanjiLiteral].showNumbers = Settings.display.showKanjiNumbers.val;\n\n        // If the user wants to hide Kanji on load\n        if (!Settings.display.showKanjiOnLoad.val) {\n            $(\"#\" + kanjiLiteral + \"_svg > svg path:not(.bg)\").each((i, e) => {\n                e.classList.add(\"hidden\");\n                e.style.strokeDashoffset = e.getTotalLength();\n            });\n        }\n\n        // If user wants to hide numbers: hide them\n        if (!Settings.display.showKanjiNumbers.val) {\n            $(e).find(\"text\").addClass(\"hidden\");\n        }\n    });\n});\n\n// Adjust svg's draw speed using the slider\n$('.speedSlider:not(.settings)').on('input', function () {\n    let kanjiLiteral = this.dataset.kanji;\n\n    kanjiSettings[kanjiLiteral].speed = this.value;\n\n    let ident = kanjiLiteral + \"_speed\";\n    let speed = Math.round((parseFloat(this.value) * 100));\n\n    $(\"#\" + ident).html(speed + \"%\");\n    sessionStorage.setItem(ident, speed);\n\n    let playBtnState = document.getElementById(kanjiLiteral + \"_play\").dataset.state;\n\n    if (kanjiSettings[kanjiLiteral].animationDirection !== Animation.none && playBtnState === \"pause\") {\n        refreshAnimations(kanjiLiteral);\n    }\n});\n\n// Returns the paths related to the kanji\nfunction getPaths(kanjiLiteral) {\n    let svg = document.getElementById(kanjiLiteral + \"_svg\").firstElementChild;\n    return svg.querySelectorAll(\"path:not(.bg)\");\n}\n\n// Refresh the currently running animation. Used for changing the current animation speed\nasync function refreshAnimations(kanjiLiteral) {\n    let paths = getPaths(kanjiLiteral);\n    let startTime = prepareAutoplay(kanjiLiteral);\n\n    // Iterate all strokes that are potentially animating\n    for (let i = 0; i < paths.length; i++) {\n        let len = paths[i].getTotalLength();\n        let currentLen = $(paths[i]).css(\"stroke-dashoffset\");\n\n        // Stroke is currently animating\n        if (len !== currentLen && currentLen !== \"0px\") {\n            // Reset current animation\n            $(paths[i]).css(\"stroke-dashoffset\", $(paths[i]).css(\"stroke-dashoffset\"));\n\n            // Animate and wait if the animations was automated\n            let animationPromise = doAnimationStep(kanjiLiteral, paths[i], kanjiSettings[kanjiLiteral].animationDirection === Animation.forward, false);\n            if (kanjiSettings[kanjiLiteral].isAutomated) {\n                kanjiSettings[kanjiLiteral].index = i + 1;\n                await animationPromise;\n\n                if (startTime < kanjiSettings[kanjiLiteral].timestamp) {\n                    return;\n                }\n            }\n\n            toggleNumbers(kanjiLiteral);\n        }\n    }\n\n    // Conclude potential autoplay\n    if (kanjiSettings[kanjiLiteral].isAutomated) {\n        concludeAutoplay(kanjiLiteral);\n    }\n}\n\n// Prepares the required steps to start auto-playing an animation\nfunction prepareAutoplay(kanjiLiteral) {\n    let startTime = Date.now();\n\n    kanjiSettings[kanjiLiteral].timestamp = startTime;\n    kanjiSettings[kanjiLiteral].isAutomated = true;\n\n    let playBtn = document.getElementById(kanjiLiteral + \"_play\");\n\n    playBtn.dataset.state = \"pause\";\n    playBtn.children[0].classList.add(\"hidden\");\n    playBtn.children[1].classList.remove(\"hidden\");\n\n    return startTime;\n}\n\n// Prepares the last steps to end auto-playing an animation\nfunction concludeAutoplay(kanjiLiteral) {\n    let playBtn = document.getElementById(kanjiLiteral + \"_play\");\n\n    kanjiSettings[kanjiLiteral].isAutomated = false;\n\n    playBtn.dataset.state = \"play\";\n    playBtn.children[0].classList.remove(\"hidden\");\n    playBtn.children[1].classList.add(\"hidden\");\n}\n\n// Based on the current state, show or pause the animation\nasync function doOrPauseAnimation(kanjiLiteral) {\n    let playBtn = document.getElementById(kanjiLiteral + \"_play\");\n\n    if (playBtn.dataset.state === \"play\") {\n        if (kanjiSettings[kanjiLiteral].index == kanjiSettings[kanjiLiteral].strokeCount) {\n            await undoAnimation(kanjiLiteral, true);\n        }\n\n        doAnimation(kanjiLiteral);\n        return;\n    }\n\n    pauseAnimation(kanjiLiteral);\n}\n\n// Automatically draws the whole image\nasync function doAnimation(kanjiLiteral) {\n    let startTime = prepareAutoplay(kanjiLiteral);\n\n    let paths = getPaths(kanjiLiteral);\n\n    for (let index = kanjiSettings[kanjiLiteral].index; index < paths.length; index++) {\n        if (startTime < kanjiSettings[kanjiLiteral].timestamp) {\n            return;\n        }\n\n        kanjiSettings[kanjiLiteral].index++;\n        kanjiSettings[kanjiLiteral].animationDirection = Animation.forward;\n\n        await doAnimationStep(kanjiLiteral, paths[index], true);\n\n        if (startTime < kanjiSettings[kanjiLiteral].timestamp) {\n            return;\n        }\n\n        toggleNumbers(kanjiLiteral);\n        kanjiSettings[kanjiLiteral].animationDirection = Animation.none;\n    }\n\n    concludeAutoplay(kanjiLiteral);\n}\n\n// Automatically removes the whole image\nasync function undoAnimation(kanjiLiteral, awaitLast) {\n    let startTime = prepareAutoplay(kanjiLiteral);\n\n    let paths = getPaths(kanjiLiteral);\n\n    for (kanjiSettings[kanjiLiteral].index > -1; kanjiSettings[kanjiLiteral].index--;) {\n        if (startTime < kanjiSettings[kanjiLiteral].timestamp) {\n            return;\n        }\n\n        kanjiSettings[kanjiLiteral].animationDirection = Animation.backwards;\n\n        let awaitAnimationStep = awaitLast && kanjiSettings[kanjiLiteral].index === 0;\n        await doAnimationStep(kanjiLiteral, paths[kanjiSettings[kanjiLiteral].index], false, !awaitAnimationStep);\n\n        if (startTime < kanjiSettings[kanjiLiteral].timestamp) {\n            return;\n        }\n\n        toggleNumbers(kanjiLiteral);\n        kanjiSettings[kanjiLiteral].animationDirection = Animation.none;\n    }\n\n    kanjiSettings[kanjiLiteral].index = 0;\n    concludeAutoplay(kanjiLiteral);\n}\n\n// Pauses the animation midway\nasync function pauseAnimation(kanjiLiteral) {\n    kanjiSettings[kanjiLiteral].timestamp = Date.now();\n\n    let playBtn = document.getElementById(kanjiLiteral + \"_play\");\n\n    playBtn.dataset.state = \"play\";\n    playBtn.children[0].classList.remove(\"hidden\");\n    playBtn.children[1].classList.add(\"hidden\");\n}\n\n// Draws or removes the given path\nasync function doAnimationStep(kanjiLiteral, path, forward, fastReset) {\n    path.classList.remove(\"hidden\");\n\n    let len = path.getTotalLength();\n    let drawTime = len * 10 * (!fastReset ? (1 / kanjiSettings[kanjiLiteral].speed) : 0.5);\n\n    let transition = \"transition: stroke-dashoffset \" + drawTime + \"ms ease 0s, stroke \" + (forward ? 0 : drawTime) + \"ms ease 0s;\";\n    let dashArray = \"stroke-dasharray: \" + len + \",\" + len + \";\";\n    let strokeDashoffset = \"stroke-dashoffset: \" + (forward ? \"0;\" : (len + \";\"));\n\n    path.style = transition + dashArray + strokeDashoffset + (forward ? \"\" : \"stroke: var(--danger);\");\n\n    return new Promise(resolve => setTimeout(resolve, !fastReset ? drawTime : 0));\n}\n\n// Draws or removes the given path based on the button clicked\nasync function doAnimationStep_onClick(kanjiLiteral, direction) {\n    let startTime = Date.now();\n    kanjiSettings[kanjiLiteral].timestamp = startTime;\n    concludeAutoplay(kanjiLiteral);\n\n    if (kanjiSettings[kanjiLiteral].index + direction == -1 || kanjiSettings[kanjiLiteral].index + direction > kanjiSettings[kanjiLiteral].strokeCount) {\n        return;\n    }\n\n    let path = getPaths(kanjiLiteral)[direction > 0 ? kanjiSettings[kanjiLiteral].index : kanjiSettings[kanjiLiteral].index - 1];\n\n    kanjiSettings[kanjiLiteral].index += direction;\n    kanjiSettings[kanjiLiteral].animationDirection = direction > 0 ? Animation.forward : Animation.backwards;\n\n    await doAnimationStep(kanjiLiteral, path, direction > 0);\n\n    if (startTime <= kanjiSettings[kanjiLiteral].timestamp) {\n        kanjiSettings[kanjiLiteral].animationDirection = Animation.none;\n    }\n\n    toggleNumbers(kanjiLiteral);\n}\n\n// Sets the SVG numbers visible / invisible or updates them if the param was not provided\nfunction toggleNumbers(kanjiLiteral, visible) {\n    let svg = document.getElementById(kanjiLiteral + \"_svg\").firstElementChild;\n    let texts = svg.querySelectorAll(\"text\");\n\n    if (visible !== undefined && !Settings.display.showKanjiNumbers.val) {\n        kanjiSettings[kanjiLiteral].showNumbers = visible;\n    }\n\n    if (kanjiSettings[kanjiLiteral].showNumbers) {\n        for (let i = 0; i < texts.length; i++) {\n            if (i < kanjiSettings[kanjiLiteral].index) {\n                texts[i].classList.remove(\"hidden\");\n            } else {\n                texts[i].classList.add(\"hidden\");\n            }\n        }\n    } else {\n        for (let i = 0; i < texts.length; i++) {\n            texts[i].classList.add(\"hidden\");\n        }\n    }\n}\n\n// Toggles compounds visible / hidden\nfunction toggleCompounds(event) {\n    let compoundParent = event.target.parentElement.parentElement;\n    compoundParent.children[compoundParent.children.length - 1].classList.toggle(\"hidden\");\n    event.target.parentElement.children[0].classList.toggle(\"closed\");\n}\n\n// Toggle all compounds on keypress\n$(document).on(\"keypress\", (event) => {\n    if ($('input:text').is(\":focus\")) return;\n\n    if (event.key == \"c\") {\n        $(\".compounds-dropdown\").toggleClass(\"closed\");\n        $(\".compounds-parent\").toggleClass(\"hidden\");\n    }\n});\n\n/* -- Kanji decomposition tree -- */\nvar pendingRequests = 0;\nvar lastTreeLiteral = \"\";\n\n// Generates the tree diagram\nasync function generateTreeDiagram(kanjiLiteral) {\n    var width = 1000,\n        height = 1000,\n        i = 0;\n    lastTreeLiteral = kanjiLiteral;\n\n    var tree = d3.layout.tree()\n        .size([height, width]);\n\n    // set visible\n    document.getElementById(\"tree-target\").innerHTML = \"\";\n    document.getElementById(\"backdrop\").classList.remove(\"hidden\");\n\n    // Add the SVG to the body\n    var svg = d3.select(\"#tree-target\").append(\"svg\")\n        .classed(\"svg-content-responsive\", true)\n        .classed(\"svg-container\", true)\n        .attr(\"preserveAspectRatio\", \"xMinYMin meet\")\n        .attr(\"viewBox\", \"0 0 \" + width + \" \" + height)\n        .append(\"g\");\n\n    // Build the tree\n    let treeData = await API.getGraphData(kanjiLiteral);\n    root = treeData.tree;\n\n    // Compute the new tree layout\n    var nodes = tree.nodes(root).reverse(),\n        links = tree.links(nodes);\n\n    // Normalize for fixed-depth\n    nodes.forEach((d) => { d.y = d.depth * 100; });\n\n    // Declare the nodes\n    var node = svg.selectAll(\"g.node\")\n        .data(nodes, (d) => { return d.id || (d.id = ++i); });\n\n    // Declare the links\n    var link = svg.selectAll(\"path.link\")\n        .data(links, (d) => { return d.target.id; });\n\n    // Enter the nodes\n    var nodeEnter = node.enter().append(\"g\")\n        .attr(\"class\", \"node\")\n        .attr(\"transform\", (d) => {\n            return \"translate(\" + d.x + \",\" + d.y + \")\";\n        });\n\n    // Circle style, color, fill\n    nodeEnter.append(\"circle\")\n        .attr(\"r\", 25)\n        .style(\"fill\", \"rgba(222,227,231,255)\");\n\n    // Text\n    nodeEnter.append(\"text\")\n        .attr(\"y\", (d) => { // Text offset\n            return d.children || d._children ? 5 : 5;\n        })\n        .attr(\"text-anchor\", \"middle\")\n        .text((d) => { return d.name; })\n        .style(\"fill-opacity\", 1)\n        .attr(\"has_data\", (d) => {return d.literal_available});\n\n    // Straight lines\n    link.enter().insert(\"line\")\n        .attr(\"class\", \"link\")\n        .attr(\"x1\", (d) => { return d.source.x; })\n        .attr(\"y1\", (d) => { return d.source.y; })\n        .attr(\"x2\", (d) => { return d.target.x; })\n        .attr(\"y2\", (d) => { return d.target.y; });\n\n    // Move lines in front of circle to hide the lines (only needed for straight lines)\n    document.querySelectorAll(\"#tree-target .link\").forEach(e => {\n        var node = e;\n        var parent = e.parentNode;\n        parent.removeChild(node);\n        parent.prepend(e);\n    });\n\n    // Figure out how many requests are required\n    const srcUrl = \"/assets/svg/glyphes/\";\n    document.querySelectorAll(\"#tree-target text\").forEach((e) => {\n        getSvgContent(e, srcUrl + e.innerHTML + \".svg\");\n        pendingRequests++;\n    });\n\n    svg = document.querySelector('#tree-target svg');\n\n    // Calculate new Viewbox of SVG containing all children\n    const { xMin, xMax, yMin, yMax } = [...svg.children].reduce((acc, el) => {\n        const { x, y, width, height } = el.getBBox();\n        if (!acc.xMin || x < acc.xMin) acc.xMin = x;\n        if (!acc.xMax || x + width > acc.xMax) acc.xMax = x + width;\n        if (!acc.yMin || y < acc.yMin) acc.yMin = y;\n        if (!acc.yMax || y + height > acc.yMax) acc.yMax = y + height;\n        return acc;\n    }, {});\n\n    // Update viewbox\n    const viewbox = `${xMin} ${yMin} ${xMax - xMin} ${yMax - yMin}`;\n    svg.setAttribute('viewBox', viewbox);\n\n    // Set toggler content if available\n    if (treeData.has_big) {\n        let toggler = document.getElementById(\"tree-toggle\"); \n        toggler.classList.remove(\"hidden\");\n        if (Settings.search.showFullGraph.val) {\n            toggler.classList.add(\"detailed\");\n        }\n    }\n}\n\n// Tries to replace the given target with an SVG using the given URL\nfunction getSvgContent(target, url) {\n    $.ajax({ \n        type : \"GET\", \n        url : url, \n\n        // Called upon server reponse\n        success : function(result) { \n            \n            // Check if the result is actually an SVG or rather the 404 page\n            if (typeof result !== \"object\") {\n                return;\n            }\n\n            // Add action btn to the circle if possible\n            if (target.getAttribute(\"has_data\") === \"true\") {\n                target.previousElementSibling.classList.add(\"clickable\");\n                target.previousElementSibling.addEventListener(\"click\", () => {\n                    location.href = JotoTools.createUrl(target.innerHTML, 1);\n                });\n            }\n\n            // Replace text element with SVG\n            target.replaceWith(result.firstElementChild.firstElementChild);\n        }, \n\n        // Handle unexpected request errors\n        error : function(result) { \n            console.log(\"caught error on decomposition tree:\", result);\n        } \n    }); \n}\n\n// Called upon clicking on the toggle checkbox for a decomposition graph: rerenders the graph in the toggled complexity\nfunction onGraphToggleCheckboxClick(event) {\n    if (window.plausible) {\n        plausible(\"toggle\", {props: {name: \"Tree toggle\"}});\n    }\n    \n    Settings.alterSearch('showFullGraph', !Settings.search.showFullGraph.val);\n    generateTreeDiagram(lastTreeLiteral);\n\n    let toggler = document.getElementById(\"tree-toggle\"); \n    toggler.classList.toggle(\"detailed\");\n}"
  },
  {
    "path": "html/assets/js/page/newsPage.js",
    "content": "// [news] is declared directly in the html\n\nprepareNews();\n\nfunction prepareNews() {\n    let list = document.getElementById(\"news-list\");\n    \n    for (info of news) {\n        list.innerHTML += '<div class=\"news-container\"><div class=\"news-head\"><span></span></div><div class=\"news-date\"></div><div class=\"news-body\"></div></div>';\n        list.lastChild.firstChild.firstChild.innerHTML = info.title;\n        list.lastChild.children[1].innerHTML =  Util.toLocaleDateString(info.creation_time * 1000);\n        list.lastChild.lastChild.innerHTML = Util.decodeHtml(info.html);\n    }\n}"
  },
  {
    "path": "html/assets/js/page/overlay/notifications.js",
    "content": "// On Start -> Try and load the latest data\nrequestShortData();\n\n// Start a query to receive current notifications\nasync function requestShortData() {\n    if (!localStorage) { return; }\n\n    var data = {\"after\": parseInt(localStorage.getItem(\"notification_timestamp\") || 00000000)};\n    $.ajax({ \n        type : \"POST\", \n        url : \"/api/news/short\", \n        data: JSON.stringify(data),\n        headers: {\n            'Content-Type': 'application/json'\n         },\n        success : function(result) { \n            parseShortNotificationResults(result);\n        }, \n        error : function(result) { \n            console.log(result);\n        } \n    });\n}\n\n// Parses the results of /api/news/short API calls and displays them\nasync function parseShortNotificationResults(results) {\n    \n    // If nothing was received, show a message that there are no new updates\n    if (results.entries.length == 0) {\n        $(\"#no-result\").removeClass(\"hidden\");\n        return;\n    }\n\n    // Else, show the results\n    let notifiContent = document.getElementById(\"notification-content\");\n    for (let result of results.entries) {\n        let creationDateString = Util.toLocaleDateString(result.creation_time * 1000);\n\n        var entryHtml = '<div class=\"notification-entry\" onclick=\"requestLongData(event,'+result.id+');\">'\n                            + '<div class=\"entry-title\">' + result.title + '</div>'\n                            + '<div class=\"date-tag\">' + creationDateString + '</div>'\n                            + '<div class=\"content\">' + result.html + '</div>'\n                        +'</div>';\n\n        notifiContent.innerHTML = entryHtml + notifiContent.innerHTML;\n        document.getElementsByClassName(\"notificationBtn\")[0].classList.add(\"update\");\n    }\n}\n\n// Shows the detailed information of the target element using its ID\nfunction requestLongData(event, id) {\n    if (event.target.nodeName === \"IMG\") {\n        return;\n    }\n\n    var data = {\"id\": id};\n    \n    $.ajax({ \n        type : \"POST\", \n        url : \"/api/news/detailed\", \n        data: JSON.stringify(data),\n        headers: {\n            'Content-Type': 'application/json'\n         },\n        success : function(result) { \n            parseDetailedNotificationResults(result);\n        }, \n        error : function(result) { \n            console.log(result);\n        } \n    });\n}\n\n// Parses the results of /api/news/detailed API calls and displays them\nasync function parseDetailedNotificationResults(result) {\n    $(\"#notification-detail-head\").html(result.entry.title);\n    $(\"#notification-detail-body\").html(result.entry.html);\n\n    $(\"#notificationModal\").modal('show');\n}\n\n// Opens the short-informations for notifications\nfunction toggleNotifications(event) {\n    let container = $('#notifications-container');\n    \n    // Check if notification is opened already\n    if (!container.hasClass(\"hidden\")) {\n        closeNotifications();\n        return;\n    }\n\n    // Prevent click event to pass through to the body\n    event.stopPropagation();    \n\n    // Set the timestamp\n    localStorage.setItem(\"notification_timestamp\", Math.floor(Date.now() / 1000));\n    container[0].classList.remove(\"hidden\");\n    \n    // Make clicks outside the element close it \n    $(document).one(\"click\", function() {\n        closeNotifications();\n        container.off(\"click\");\n    });\n    container.click(function(event){\n        event.stopPropagation();\n    });\n}\n\n// Closes the short-informations for notifications\nfunction closeNotifications() {\n    document.getElementById(\"notifications-container\").classList.add(\"hidden\");\n    document.getElementsByClassName(\"notificationBtn\")[0].classList.remove(\"update\");\n}\n\n// Calls a page that displays (more-or-less) all past notifications\nfunction showAllNotifications() {\n    Util.loadUrl(JotoTools.getPageUrl(\"news\"));\n}\n"
  },
  {
    "path": "html/assets/js/page/overlay/settings.js",
    "content": "/*\n* This JS-File everything related to the settings overlay\n*/\n\nfunction Settings() { }\n\n// Analytics. Use your own or leave empty\nvar analyticsUrl = '';\nvar analyticsAttributes = null;\n\n// Default \"language\" settings\nSettings.language = {\n    searchLang: { isCookie: true, id: \"default_lang\", dataType: \"string\", val: JotoTools.toJotobaLanguage(Cookies.get(\"default_lang\") || navigator.language || navigator.userLanguage || \"en-US\") },\n    pageLang: { isCookie: true, id: \"page_lang\", dataType: \"string\", val: Cookies.get(\"page_lang\") || \"en-US\" },\n}\n\n// Default \"search\" settings\nSettings.search = {\n    alwaysShowEnglish: { isCookie: true, id: \"show_english\", dataType: \"boolean\", val: true },\n    showEnglishOnTop: { isCookie: true, id: \"show_english_on_top\", dataType: \"boolean\", val: false },\n    showExampleSentences: { isCookie: true, id: \"show_sentences\", dataType: \"boolean\", val: true },\n    showFurigana: { isCookie: true, id: \"sentence_furigana\", dataType: \"boolean\", val: true },\n    focusSearchbar: { isCookie: false, id: \"focus_searchbar\", dataType: \"boolean\", val: false },\n    selectSearchbarContent: { isCookie: false, id: \"select_searchbar_content\", dataType: \"boolean\", val: false },\n    itemsPerPage: { isCookie: true, id: \"items_per_page\", dataType: \"int\", val: 10 },\n    kanjiPerPage: { isCookie: true, id: \"kanji_page_size\", dataType: \"int\", val: 4 },\n    showFullGraph: { isCookie: false, id: \"show_full_graph\", dataType: \"boolean\", val: true },\n}\n\n// Default \"display\" settings\nSettings.display = {\n    theme: { isCookie: false, id: \"theme\", dataType: \"string\", val: \"light\" },\n    kanjiAnimationSpeed: { isCookie: false, id: \"kanji_speed\", dataType: \"float\", val: 1 },\n    showKanjiOnLoad: { isCookie: false, id: \"show_kanji_on_load\", dataType: \"boolean\", val: true },\n    showKanjiNumbers: { isCookie: false, id: \"show_kanji_numbers\", dataType: \"boolean\", val: false },\n}\n\n// Default \"other\" settings\nSettings.other = {\n    enableDoubleClickCopy: { isCookie: false, id: \"dbl_click_copy\", dataType: \"boolean\", val: true },\n    trackingAllowed: { isCookie: false, id: \"tracking_allowed\", dataType: \"boolean\", val: true },\n    firstVisit: { isCookie: false, id: \"first_time\", dataType: \"boolean\", val: true }\n}\n\n// Saves a settings-object into localStorage / Cookies\nSettings.saveSettings = function (object) {\n    for (let [key, entry] of Object.entries(object)) {\n        if (entry.isCookie) {\n            Cookies.set(entry.id, entry.val, { path: '/', expires: 365 });\n        } else {\n            localStorage.setItem(entry.id, entry.val);\n        }\n    }\n}\n\n// Loads a settings-object from localStorage / Cookies\nSettings.loadSettings = function (object) {\n    for (let [key, entry] of Object.entries(object)) {\n        let data = \"\";\n\n        // Try to get the data\n        if (entry.isCookie) {\n            data = Cookies.get(entry.id, entry.val);\n        } else {\n            data = localStorage.getItem(entry.id);\n        }\n\n        // Not found => ignore\n        if (!data) {\n            continue;\n        }\n\n        // Found => parse and overwrite\n        switch (entry.dataType) {\n            case \"boolean\":\n                object[key].val = Util.toBoolean(data);\n                break;\n            case \"int\":\n                object[key].val = parseInt(data);\n                break;\n            case \"float\":\n                object[key].val = parseFloat(data);\n                break;\n            default:\n                object[key].val = data;\n        }\n    }\n}\n\n// Alters a \"language\" setting and reloads if needed\nSettings.alterLanguage = function (key, value, reloadPage) {\n    Settings.language[key].val = value;\n    Settings.saveSettings(Settings.language);\n\n    if (reloadPage) {\n        location.reload();\n    }\n}\n\n// Used for the Choices-Hook on function calls\nalterLanguage_search = function (html, value) {\n    let reloadPage = window.location.href.includes(\"/search\");\n    Settings.alterLanguage(\"searchLang\", value, reloadPage);\n}\n\n// Used for the Choices-Hook on function calls\nalterLanguage_page = function (html, value) {\n    Settings.alterLanguage(\"pageLang\", value, true);\n}\n\n// Alters a \"search\" setting and reloads if needed\nSettings.alterSearch = function (key, value, updateSub) {\n    Settings.search[key].val = value;\n    Settings.saveSettings(Settings.search);\n\n    if (updateSub) {\n        OverlaySettings.updateSubEntries();\n    }\n}\n\n// Alters a \"display\" setting and reloads if needed\nSettings.alterDisplay = function (key, value) {\n    Settings.display[key].val = value;\n    Settings.saveSettings(Settings.display);\n}\n\n// Alters a \"other\" setting and reloads if needed\nSettings.alterOther = function (key, value) {\n    Settings.other[key].val = value;\n    Settings.saveSettings(Settings.other);\n}\n\n// Opens the Settings Overlay and accepts cookie usage\nSettings.trackingAccepted = function (manuallyCalled) {\n    if (manuallyCalled)\n        Util.showMessage(\"success\", getText(\"SETTINGS_COOKIE_ACCEPT\"));\n\n    Settings.alterOther(\"trackingAllowed\", true);\n    loadAnalytics();\n    Util.setMdlCheckboxState(\"tracking_settings\", true);\n}\n\n// Revokes the right to store user Cookies\nSettings.trackingDeclined = function (manuallyCalled) {\n    if (manuallyCalled)\n        Util.showMessage(\"success\", getText(\"SETTINGS_COOKIE_REJECT\"));\n\n    Settings.alterOther(\"trackingAllowed\", false);\n    Util.setMdlCheckboxState(\"tracking_settings\", false);\n}\n\n// Special handling for tracking_allowed\nSettings.onTrackingAcceptChange = function (allowed) {\n    if (allowed) {\n        Settings.trackingAccepted(true);\n    } else {\n        Settings.trackingDeclined(true);\n    }\n}\n\n// Prepare the settings overlay's data initially\nasync function prepareSettingsOverlay() {\n\n    // Prepare the Settings Overlay\n    OverlaySettings.updateDropdowns();\n    OverlaySettings.updateCheckboxes();\n    OverlaySettings.updateSubEntries();\n    OverlaySettings.updateSliders();\n    OverlaySettings.updateInputs();\n};\n\n// Load Settings on initial load\nUtil.awaitDocumentInteractive(() => {\n    Settings.loadSettings(Settings.search);\n    Settings.loadSettings(Settings.display);\n    Settings.loadSettings(Settings.other);\n});\n\nUtil.awaitDocumentReady(() => {\n    Settings.loadSettings(Settings.language);\n    prepareSettingsOverlay();\n\n    // Add the info-icon on initial page load if needed\n    if (Settings.other.firstVisit.val) {\n        $(\".infoBtn\").addClass(\"new\");\n    }\n\n    // Load analytics if allowed -> At this points any external source with high prio has already been loaded in and should have overwritten the analytics vars\n    if (Settings.other.trackingAllowed.val && analyticsUrl.length > 0) {\n        loadAnalytics();\n    }\n});\n\nfunction loadAnalytics() {\n    Util.awaitDocumentReady(() => {\n        Util.loadScript(analyticsUrl, true, analyticsAttributes, () => {\n            // Prepare any css-based events after the script is ready\n            let buttons = document.querySelectorAll(\".p\");\n\n            for (var i = 0; i < buttons.length; i++) {\n                buttons[i].addEventListener('click', handleEvent);\n            }\n\n            function handleEvent(event) {\n                if (window.plausible) {\n                    let attribute =  event.target.getAttribute('data-p');\n                    if (!attribute) return;\n\n                    let eventData = attribute.split(/,(.+)/);\n                    let events = [JSON.parse(eventData[0]), JSON.parse(eventData[1] || '{}')];\n                    plausible(...events);\n                }\n            }\n        });\n    });\n}"
  },
  {
    "path": "html/assets/js/page/overlay/settings_overlay.js",
    "content": "/** This JS file is used for the connection between the settings \"backend\" and \"frontend\" */\n\nfunction OverlaySettings() {}\n\n\n// Toggles a single element visible / hidden\nvar toggleSubEntry = function(id, show) {\n    if (show) {\n        $(id).removeClass(\"hidden\");\n    } else {\n        $(id).addClass(\"hidden\");\n    }\n}\n\n// Sets a slider to the given value\nvar setSliderEntry = function (sliderId, textId, value) {\n    $(sliderId).val(Settings.display.kanjiAnimationSpeed.val);\n    $(textId).html(Math.round(Settings.display.kanjiAnimationSpeed.val * 100) + \"%\");\n}\n\n// Sets a specific input's value\nvar setInput = function (id, value) {\n    let kanjiInput = $(id);\n    kanjiInput.val(value);\n    \n    if (value) {\n        kanjiInput.parent().addClass(\"is-dirty\");\n    }\n}\n\n// Updates all dropdowns\nOverlaySettings.updateDropdowns = function() {\n    // \"Language\" page\n    document.querySelectorAll(\"#search-lang-select > .choices__item--choice\").forEach((e) => {\n        if (e.dataset.value == Settings.language.searchLang.val) {\n            let choicesInner = e.parentElement.parentElement.parentElement.children[0].children;\n\n            choicesInner[0].children[0].innerHTML = e.innerHTML;\n            choicesInner[1].children[0].innerHTML = e.innerHTML;\n        }\n    });\n    document.querySelectorAll(\"#page-lang-select > .choices__item--choice\").forEach((e) => {\n        if (e.dataset.value == Settings.language.pageLang.val) {\n            let choicesInner = e.parentElement.parentElement.parentElement.children[0].children;\n\n            choicesInner[0].children[0].innerHTML = e.innerHTML;\n            choicesInner[1].children[0].innerHTML = e.innerHTML;\n        }\n    });\n}\n\n// Updates all checkboxes\nOverlaySettings.updateCheckboxes = function() {\n    // \"Search\" page\n    Util.setMdlCheckboxState(\"show_eng_settings\", Settings.search.alwaysShowEnglish.val);\n    Util.setMdlCheckboxState(\"show_eng_on_top_settings\", Settings.search.showEnglishOnTop.val);\n    Util.setMdlCheckboxState(\"show_example_sentences_settings\", Settings.search.showExampleSentences.val);\n    Util.setMdlCheckboxState(\"show_sentence_furigana_settings\", Settings.search.showFurigana.val);\n    Util.setMdlCheckboxState(\"focus_search_bar_settings\", Settings.search.focusSearchbar.val);\n    Util.setMdlCheckboxState(\"select_searchbar_content_settings\", Settings.search.selectSearchbarContent.val);\n   \n    // \"Display\" page\n    Util.setMdlCheckboxState(\"use_dark_mode_settings\", Settings.display.theme.val === \"dark\");\n    Util.setMdlCheckboxState(\"show_kanji_on_load_settings\", Settings.display.showKanjiOnLoad.val);\n    Util.setMdlCheckboxState(\"show_kanji_numbers_settings\", Settings.display.showKanjiNumbers.val);\n    \n    // \"Other\" page\n    Util.setMdlCheckboxState(\"dbl_click_copy_settings\", Settings.other.enableDoubleClickCopy.val);\n    Util.setMdlCheckboxState(\"tracking_settings\", Settings.other.trackingAllowed.val);\n}\n\n// Updates all Sub entries\nOverlaySettings.updateSubEntries = function() {\n    // \"Search\" page\n    toggleSubEntry(\"#eng_on_top_parent\", Settings.search.alwaysShowEnglish.val);\n    toggleSubEntry(\"#select_searchbar_content_parent\", Settings.search.focusSearchbar.val);\n}\n\n// Updates all sliders\nOverlaySettings.updateSliders = function() {\n    // \"Display\" page\n    setSliderEntry(\"#show_anim_speed_settings\", \"#show_anim_speed_settings_slider\", Settings.display.kanjiAnimationSpeed.val);\n}\n\n// Updates all inputs\nOverlaySettings.updateInputs = function() {\n    setInput(\"#items_per_page_input\", Settings.search.itemsPerPage.val);\n    setInput(\"#kanji_per_page_input\", Settings.search.kanjiPerPage.val);\n}"
  },
  {
    "path": "html/assets/js/page/sentencePage.js",
    "content": "// Toggles the given translation visible / invisible\nfunction toggleTranslation(element) {\n    let parent = $(element.parentElement);\n\n    parent.find(\".sentence-translation\").toggle(\"hidden\");\n    parent.find(\".lang-separator\").toggle(\"hidden\");\n    parent.find(\".sentence-toggle\").toggleClass(\"hidden\");\n}"
  },
  {
    "path": "html/assets/js/page/wordPage.js",
    "content": "// Object reference for sentence reader\nconst sr = document.getElementById(\"sr\");\n\n// Enable sentence-example expander\n$(\".expander\").on(\"click\", (event) => {\n    event.target.classList.toggle(\"on\");\n    event.target.parentElement.children[0].classList.toggle(\"collapsed\");\n});\n\n// On first load and on every page resize: check where the expander-triangle is needed & whether sentence reader should be centered\nhideUnusedExpanders();\ncenterSentenceReaderIfNeeded();\nvar screenWidth = $(window).width();\n\n$(window).resize(() => {\n    // Mobile scrolling sends resize events because of the (dis-)appearing url input. Simple fix: ignore height changes.\n    if ($(window).width() == screenWidth) {\n        return;\n    }\n\n    screenWidth = $(window).width();\n    hideUnusedExpanders();\n    centerSentenceReaderIfNeeded();\n});\n\n// If the reader is overflown, remove the center to avoid weird style errors\nfunction centerSentenceReaderIfNeeded() {\n    if (sr === undefined || sr === null)\n        return;\n        \n    if (Util.checkOverflow(sr)) {\n        sr.parentElement.classList.add(\"no-center\");\n    } else {\n        sr.parentElement.classList.remove(\"no-center\");\n    }\n}\n\n// Scrolls the sentence reader onto the selected element\nUtil.awaitDocumentReady(scrollSentenceReaderIntoView);\nfunction scrollSentenceReaderIntoView() {\n    let selected = $(\".sentence-part.selected\")[0];\n    if (selected !== undefined) {\n        $(\".search-annotation\").scrollLeft(selected.offsetLeft - $(\".search-annotation\")[0].offsetLeft);\n        $(\".search-annotation\").scrollTop(selected.offsetTop - $(\".search-annotation\")[0].offsetTop);\n    }\n}\n\n// Check if the expander-triangle should be hidden\nfunction hideUnusedExpanders() {\n    $(\".expander\").each((i,e) => {\n        if (e.parentElement.children[0].scrollHeight < 40) {\n            e.classList.add(\"hidden\");\n        } else {\n            e.classList.remove(\"hidden\");\n        }\n    });\n}"
  },
  {
    "path": "html/assets/js/qol.js",
    "content": "/**\n * This JS-File contains some Quality of Life improvements for the website\n */\n\nvar shiftPressed = false;\n\n// Prevent random dragging of <a> elements\n$('a').mousedown((event) => {\n    event.preventDefault();\n});\n\n$(document).on('keyup keydown keypress', function (e) { shiftPressed = e.shiftKey });\n\n// Key Events for easy usability\n$(document).on(\"keypress\", (event) => {\n    if ($('input:text').is(\":focus\")) return;\n\n    switch (event.key) {\n        case '/': // Focus search bar\n            event.preventDefault();\n            $('#search').focus();\n            $('#search').select();\n            if (window.plausible)\n                plausible(\"shortcut\", {props: {key: \"/\"}});\n            break\n        case 'w': // Focus search bar\n            changeSearchType(null, \"0\");\n            if (window.plausible && Util.isIndexPage())\n                plausible(\"shortcut\", {props: {key: \"w\"}});\n            break;\n        case 'k': // Change to Word Tab\n            changeSearchType(null, \"1\");\n            if (window.plausible && !Util.isIndexPage())\n                plausible(\"shortcut\", {props: {key: \"k\"}});\n            break;\n        case 's': // Change to Sentence Tab\n            changeSearchType(null, \"2\");\n            if (window.plausible && !Util.isIndexPage())\n                plausible(\"shortcut\", {props: {key: \"s\"}});\n            break;\n        case 'n': // Change to Names Tab\n            changeSearchType(null, \"3\");\n            if (window.plausible && !Util.isIndexPage()) {\n                plausible(\"shortcut\", {props: {key: \"n\"}});\n            }\n            break;\n        case 'N': // Open index in new tab\n            window.open(location.origin, \"_blank\");\n            break;\n        case 'p': // Play first Audio on page\n            $(\".audioBtn\").first().trigger(\"click\");\n            if (window.plausible && !Util.isIndexPage())\n                plausible(\"shortcut\", {props: {key: \"p\"}});\n            break;\n        case \"Enter\": // Do a search while rad-picker is opened\n            if (!$(\".overlay.radical\").hasClass(\"hidden\")) {\n                $(\".btn-search\").click();\n            }\n            break;\n        default:\n            if (event.key > 0 && event.key < 10) {\n                let kanji = $('.kanji-preview.large.black')[event.key - 1]\n                if (kanji !== undefined) {\n                    kanji.click();\n                }\n            }\n    }\n});\n\n// Copies Furigana to clipboard on click\n$('.furigana-preview').on(\"click\", (event) => {\n    // Check if element should not be copied\n    if (!shouldCopyFurigana(event))\n        return;\n\n    // Copy and show message\n    preventDefaultHighlight(event, 100, true, false);\n    JotoTools.copyTextAndEcho($(event.target).html().trim(), \"QOL_FURI_COPIED\");\n});\n\n// Copies full Furigana to clipboard on dblclick\n$('.furigana-preview').on(\"dblclick\", (event) => {\n    // Check if element should not be copied\n    if (!shouldCopyFurigana(event))\n        return;\n\n    // Find all furigana\n    let parent = $(event.target.parentElement.parentElement);\n    let furi = \"\";\n    parent.find('.furigana-preview, .inline-kana-preview').each((i, element) => {\n        furi += element.innerHTML.trim();\n    });\n\n    // Copy and show the correct message\n    preventDefaultHighlight(event, 100, false);\n    Util.copyToClipboard(furi);\n    $('.msg-message.msg-success.msg-visible').last().remove();\n    $('.msg-message.msg-success.msg-visible').last().html(getText(\"QOL_FURI_COPIED_ALL\"));\n});\n\n// Copies translations to clipboard on double click\n$('.kanji-preview').on(\"dblclick\", (event) => {\n    // Check if element should not be copied\n    if (!shouldCopyKanji())\n        return;\n\n    // Copy\n    preventDefaultHighlight(event, 500, false);\n    copyTranslationAndShowMessage(event.target.parentElement.parentElement);\n});\n\n// Prevent double click highlight\ndocument.querySelectorAll(\".furigana-kanji-container\").forEach(container => {\n    container.addEventListener('mousedown', function (event) {\n        if (event.detail > 1) {\n            event.preventDefault();\n        }\n    }, false);\n});\n\n// Copies translations to clipboard on double click\n$('.inline-kana-preview').on(\"dblclick\", (event) => {\n    // Check if element should not be copied\n    if (!shouldCopyKanji())\n        return;\n\n    // Copy\n    preventDefaultHighlight(event, 500, false);\n    copyTranslationAndShowMessage(event.target.parentElement);\n});\n\n// <rub>-tag Fix for standard double click \ndocument.querySelectorAll(\".furigana-kanji-container\").forEach(container => {\n    container.addEventListener(\"dblclick\", () => {\n        // Dont do anything if auto-copy is turned on\n        if (shouldCopyKanji()) {\n            return;\n        }\n\n        // Get and clear the selection\n        let selection = window.getSelection();\n        selection.removeAllRanges();\n\n        // Select all non-furigana children #1 Firefox exclusive: Multiple selection ranges\n        if (navigator.userAgent.search(\"Firefox\") > -1) {\n            container.childNodes.forEach((child) => {\n                var range = document.createRange();\n                range.setStartBefore(child);\n\n                if (child.tagName === \"RUBY\") {\n                    range.setEndAfter(child.children[0]);\n                } else {\n                    range.setEndAfter(child);\n                }\n\n                selection.addRange(range);\n            });\n\n            // Select all non-furigana children #2\n        } else {\n            var range = document.createRange();\n            range.setStartBefore(container);\n            let lastChild = container.lastChild;\n\n            if (lastChild.tagName === \"RUBY\") {\n                range.setEndAfter(lastChild.children[0]);\n            } else {\n                range.setEndAfter(lastChild);\n            }\n\n            selection.addRange(range);\n        }\n    });\n\n});\n\n// Check conditions for copying Furigana \nfunction shouldCopyFurigana(event) {\n    // Prevent copying if the text was just a placeholder\n    if (event.target.innerHTML == \"&nbsp;\")\n        return false;\n\n    // Prevent if furigana is part of the sentence reader\n    if ($(event.target).parents().toArray().includes($(\"#sr\")[0])) {\n        return false;\n    }\n\n    // Prevent if user has removed the feature\n    return Settings.other.enableDoubleClickCopy.val;\n}\n\n// Check conditions for copying Kanji \nfunction shouldCopyKanji() {\n    // Prevent if user has removed the feature\n    return Settings.other.enableDoubleClickCopy.val;\n}\n\n// Prevents the default User highlighting\nfunction preventDefaultHighlight(event, timeoutDurationMs, disableClick, disableDoubleClick) {\n    startEventTimeout(event.target, timeoutDurationMs, disableClick, disableDoubleClick);\n    event.preventDefault();\n    Util.deleteSelection();\n}\n\n// Disbaled onclick events for a short period of time\nfunction startEventTimeout(targetElement, durationMs, disableClick = true, disableDoubleClick = true) {\n    // Disbale events for single clicks\n    if (disableClick) {\n        let eventFunc = $._data(targetElement, \"events\").click[0].handler;\n        $._data(targetElement, \"events\").click[0].handler = () => { };\n        setTimeout(() => {\n            $._data(targetElement, \"events\").click[0].handler = eventFunc;\n        }, durationMs);\n    }\n\n    // Disable events for double clicks\n    if (disableDoubleClick) {\n        let eventFuncDbl = $._data(targetElement, \"events\").dblclick[0].handler;\n        $._data(targetElement, \"events\").dblclick[0].handler = () => { };\n\n        setTimeout(() => {\n            $._data(targetElement, \"events\").dblclick[0].handler = eventFuncDbl;\n        }, durationMs);\n    }\n}\n\n// Used by kanji/kana copy to combine all parts, starts from the flex (parent)\nfunction copyTranslationAndShowMessage(textParent) {\n    let fullContent = \"\";\n    let onlyKanji = true;\n    let onlyKana = true;\n\n    // Find all childs that are of interest\n    $(textParent).find('.kanji-preview, .inline-kana-preview').each((i, element) => {\n        let txt = element.innerHTML.trim();\n        fullContent += txt\n\n        for (char of txt) {\n            let isKanji = char.match(kanjiRegEx);\n            if (isKanji) {\n                onlyKana = false;\n            } else {\n                onlyKanji = false;\n            }\n        }\n    });\n\n    // Copy and visual feedback\n    JotoTools.copyTextAndEcho(fullContent, onlyKanji ? getText(\"QOL_KANJI_COPIED\") : (onlyKana ? getText(\"QOL_KANA_COPIED\") : getText(\"QOL_SENTENCE_COPIED\")))\n}\n\n// Changes the search type in the upper row depending on the users input\nfunction changeSearchType(html, newType) {\n    var search_value = $('#search').val();\n    if (search_value.length > 0) {\n        Util.loadUrl(JotoTools.createUrl(search_value, newType));\n    }\n}\n\n// Hides the backdrop if clicked directly on it\nfunction onBackdropClick(event) {\n    if (event.target.id === \"backdrop\") {\n        event.target.classList.add(\"hidden\");\n    }\n}\n\n// Focus Search Bar on load if the user wants it to (or on index page)\nUtil.awaitDocumentReady(() => {\n    let is_index = Util.isIndexPage();\n\n    if (Settings.search.focusSearchbar.val && !is_index) {\n        preventNextApiCall = true;\n    }\n\n    if (Settings.search.focusSearchbar.val || is_index) {\n        let s = $('#search');\n        s.focus();\n        Util.setCaretPosition(\"search\", -1);\n        if (Settings.search.selectSearchbarContent.val) {\n            s[0].setSelectionRange(0, s[0].value.length);\n        }\n    }\n});\n\n// Wait for the Document to load completely\nUtil.awaitDocumentReady(() => {\n\n    // Iterate all audio Btns on the page (if any) and enable their audio feature\n    $('.audioBtn').each((e, i) => {\n        let audioParent = $(i);\n\n        audioParent.click((e) => {\n            let audio = $(e.target).children()[0];\n            audio.play();\n        });\n    });\n\n    // Allow right-click on \"Play audio\" buttons to copy the proper asset-url\n    $(\".audioBtn\").contextmenu((event) => {\n        event.preventDefault();\n        var url = window.location.origin + $(event.target).attr('data');\n        JotoTools.copyTextAndEcho(url, \"QOL_AUDIO_COPIED\");\n    });\n\n    // Disables the dropdown's animation until the first onclick event\n    $(\".input-field.first-wrap\").one(\"click\", (event) => {\n        $('.choices__list.choices__list--dropdown.index').addClass('animate');\n    })\n\n    // Install the serviceWorker for PWA\n    if ('serviceWorker' in navigator) {\n        navigator.serviceWorker.register('/service-worker.js', {\n            scope: \".\"\n        })\n            .catch(function (error) {\n                console.log('Service worker registration failed, error:', error);\n            });\n    }\n\n    // Change URL to contain the language code\n    if (Util.isInPath(\"search\")) {\n        let currentParams = new URLSearchParams(document.location.search);\n\n        let txt = document.getElementById(\"search\").value;\n        let index = currentParams.get(\"i\") || undefined;\n        let type = currentParams.get(\"t\") || $('#search-type').val();\n        let lang = currentParams.get(\"l\") || Settings.language.searchLang.val;\n        let page = currentParams.get(\"p\") || $(\".pagination-circle.active\").html();\n\n        history.replaceState({}, 'Jotoba', JotoTools.createUrl(txt, type, page || 1, lang, index));\n    }\n});\n"
  },
  {
    "path": "html/assets/js/search/api.js",
    "content": "function API() {};\n\n// Used to store old Requests so they can be cancelled when no longer needed\nAPI.lastRequest = undefined;\n// Numbers > -1 mean that no API call will be made when input.length is above the value\nAPI.suggestionStop = -1;\n\n/** \n *  Calls the API to get input suggestions\n *  @param radicalArray {[]} containing radicals that need to be contained in searched kanji\n*/\nAPI.getSuggestionApiData = function(radicalArray, successFn, errorFn) {\n    // Check if API call should be prevented\n    if (preventNextApiCall) {\n        preventNextApiCall = false;\n        return;\n    }\n    // Prevent if a request failed and the input is >= the text it failed against\n    if (API.suggestionStop > -1 && input.value.length > API.suggestionStop) {\n        return;\n    }\n    else {\n        API.suggestionStop = -1;\n    }\n    \n    // Create the JSON\n    let lang = Cookies.get(\"default_lang\");\n    let type = JotoTools.getCurrentSearchType();\n    let txt = input.value;\n    \n    if (txt.length == 0) {\n        return;\n    }\n\n    let inputJSON = {\n        \"input\": txt,\n        \"search_type\": type,\n        \"lang\": lang === undefined ? \"en-US\" : lang,\n        \"radicals\": radicalArray || []\n    }\n\n    // Abort any requests sent earlier\n    if (API.lastRequest !== undefined) {\n        API.lastRequest.abort();\n    }\n\n    // Send Request to backend\n    API.lastRequest = $.ajax({ \n        type : \"POST\", \n        url : \"/api/suggestion\", \n        data: JSON.stringify(inputJSON),\n        headers: {\n            'Content-Type': 'application/json'\n        },\n        success : function(result) { \n            successFn(result);\n        }, \n        error : function(result) { \n            if (result.statusText !== \"abort\") {\n                errorFn(result);\n            }\n        } \n    }); \n}\n\n/**\n * Emulates the API behaviour for suggestions; returning Hashtag values instead\n * @param currentText {string} a single word without spaces, representing the #-value\n * @param callback {function} function to call after collecting suggestions\n */\nAPI.getHashtagData = function(currentText, callback) {\n    let suggestions = [];\n    for (let i = 0; i < hashtags.length; i++) {\n        if (hashtags[i].toLowerCase().includes(currentText.toLowerCase())) {\n            suggestions.push({\"primary\": hashtags[i]});\n\n            if (suggestions.length == 10) {\n                break;\n            }\n        }\n    }\n\n    let resultJSON =  {\n        \"suggestions\": suggestions,\n        \"suggestion_type\": \"hashtag\"\n    }\n\n    callback(resultJSON);\n}\n\n/**\n * Returns the kanji decomposition tree's data of the given literal\n * \n * @param {string} targetLiteral literal to search for\n * @returns the API result\n */\nAPI.getGraphData = async function(targetLiteral) {\n    // Generate input\n    let inputJSON = {\n        \"literal\": targetLiteral,\n        \"full\": Settings.search.showFullGraph.val\n    };\n\n    // Get the data result from the server\n    let result = await $.ajax({ \n        type : \"POST\", \n        url : \"/api/kanji/decompgraph\", \n        data: JSON.stringify(inputJSON),\n        headers: {\n            'Content-Type': 'application/json'\n        }\n    }); \n\n    return result;\n}"
  },
  {
    "path": "html/assets/js/search/eventHandler.js",
    "content": "/*\n*   Made to Handle search related events. Loads after search.js!\n*/\n\n// Key Events focussing on the search\n$(document).on(\"keydown\", (event) => {\n    if (!$('#search').is(\":focus\")) return;\n\n    // Switch the key code for potential changes\n    switch (event.key) {\n        case \"ArrowUp\": // Use suggestion above current\n            event.preventDefault();\n            Suggestions.overlay.changeSuggestionIndex(-1);\n            break;\n        case \"ArrowDown\": // Use suggestion beneath current\n        case \"Tab\":\n            event.preventDefault();\n            var direction = 1;\n            if (event.key == \"Tab\" && shiftPressed) {\n              direction = -1;\n            }\n            Suggestions.overlay.changeSuggestionIndex(direction);\n            break;\n        case \"Enter\": // Start the search\n            if (currentSuggestionIndex > 0) {\n                event.preventDefault();\n                Suggestions.overlay.activateSelection();\n            } else {\n                $('#searchBtn').click();\n            }\n            break;\n    }\n});\n\n// Adding listeners\nUtil.awaitDocumentReady(() => {\n\n    // Also show shadow text if user clicked before focus event could be caught\n    if ($(input).is(\":focus\")) {\n        Suggestions.updateSuggestions();\n    }\n\n    // Event whenever the user types into the search bar\n    document.getElementById(\"search\").addEventListener(\"input\", e => {\n        Suggestions.updateSuggestions();\n        toggleSearchIcon(200);\n    });\n\n    // Check if input was focussed / not focussed to show / hide overlay\n    document.getElementById(\"search\").addEventListener(\"focus\", e => {\n        Suggestions.updateSuggestions();\n    });\n\n    // Event whenever the user types into the search bar\n    document.querySelector(\"#kanji-search\").addEventListener(\"input\", e => {\n        getRadicalSearchResults();\n    });\n\n    // When clicking anything but the search bar or dropdown (used to hide overlays)\n    document.addEventListener(\"click\", e => {\n        if (!Util.isChildOf(searchRow, e.target)) {\n            sContainer.parentElement.classList.add(\"hidden\");\n        }\n    });\n\n    // Check on resize if shadow text would overflow the search bar and show / hide it\n    window.addEventListener(\"resize\", e => {\n        setShadowText();\n    });\n});\n\n\n// Scroll sentence-reader to display selected index\nUtil.awaitDocumentReady(() => {\n    let sentencePart = $('.sentence-part.selected');\n\n    if (sentencePart.length > 0) {\n        $('#sr')[0].scrollTop = (sentencePart.offset().top);\n    }\n});\n\n// Initialize Pagination Buttons\nUtil.awaitDocumentReady(() => {\n    $('.pagination-item:not(.disabled) > button').on(\"click\", (e) => {\n        var searchValue = JotoTools.getCurrentSearch();\n        var searchType = JotoTools.getCurrentSearchType();\n        var targetPage = $(e.target.parentNode).attr(\"target-page\");\n        Util.loadUrl(JotoTools.createUrl(searchValue, searchType, targetPage));\n    });\n});\n"
  },
  {
    "path": "html/assets/js/search/overlay/imageSearch.js",
    "content": "/**\n *  This file handles everything related to image-search requests\n */ \n\n// Quick image search for STRG + V\ndocument.onpaste = (evt) => {\n    let dT = evt.clipboardData || window.clipboardData;\n    let file = dT.files[0];\n\n    if (file !== undefined && file.name.includes(\".png\")) {\n        disableUploadUrlInput(file.name);\n        openImageCropOverlay(file);\n    }\n};\n\n// Shows / Hides the image search overlay\nfunction toggleImageSearchOverlay() {\n    let overlay = $('.overlay.image');\n    overlay.toggleClass('hidden');\n\n    // Reset on close\n    if (urlInputDisabled) {\n        document.getElementById(\"imgUploadFile\").value = null;\n        resetUploadUrlInput();\n    }\n    \n    closeAllSubSearchbarOverlays(\"image\");\n}\n\n// Clicks on the upload SVG should trigger the underlying function\nfunction imgUploadAltClick() {\n    document.getElementById(\"imgUploadFile\").click();    \n}\n\n// Blocks the URL input upon file selection\nfunction imgSearchFileSelected() {\n    let fileInput = document.getElementById(\"imgUploadFile\").files[0];\n    if (fileInput !== undefined) {\n        disableUploadUrlInput(fileInput.name);\n        openImageCropOverlay();\n    } else {\n        resetUploadUrlInput();\n    }\n}\n\n// Toggles the URL input active / disabled\nvar urlInputDisabled = false;\nvar originalMsg, cropTarget;\n\nUtil.awaitDocumentReady(() => {\n    originalMsg = document.getElementById(\"imgUploadUrl\").placeholder;\n});\n\nfunction resetUploadUrlInput() {\n    let urlInput = document.getElementById(\"imgUploadUrl\")\n    urlInput.classList.remove(\"disabled\");\n    urlInput.disabled = false;\n    urlInputDisabled = false;\n    urlInput.placeholder = originalMsg; \n\n    document.getElementById(\"imgUploadFile\").value = null;\n\n    if (cropTarget !== null) {\n        cropTarget.croppie(\"destroy\");\n    }\n    toggleCroppingModal();\n}\n\nfunction disableUploadUrlInput(newMessage) {\n    let urlInput = document.getElementById(\"imgUploadUrl\")\n    urlInput.classList.add('disabled');\n    urlInput.disabled = true;\n    urlInputDisabled = true;\n\n    urlInput.value = null;\n    urlInput.placeholder = newMessage; \n}\n\n// Opens the Image Cropping Overlay\nfunction openImageCropOverlay(pastedFile) {\n    var selectedFiles = document.getElementById(\"imgUploadFile\").files;\n    var inputUrl = document.getElementById(\"imgUploadUrl\").value;\n\n    if (selectedFiles.length > 0 || pastedFile !== undefined) {\n        let reader = new FileReader();\n        reader.onload = function(e) {\n            initCroppie(e.target.result);\n        }\n        reader.readAsDataURL(selectedFiles[0] || pastedFile);\n        toggleCroppingModal();\n    }\n    else if (inputUrl.length > 0) {  \n        Util.checkUrlIsImage(inputUrl, () => {\n            initCroppie(inputUrl);\n        });\n        toggleCroppingModal();\n    } else {\n        Util.showMessage(\"error\", getText(\"UPLOAD_NO_INPUT\"));\n    }\n}\n\n// Receives the image from Croppie, sends it to the server and starts the search\nfunction uploadCroppedImage(dataUrl) {\n    cropTarget.croppie('result', {\n        type: 'canvas',\n        size: 'viewport'\n    }).then(function (resp) {\n        // Generate a file from the Base64 String\n        let generatedFile = Util.convertDataURLtoFile(resp);\n\n        // Block Screen until Server responded\n        $(\"#loading-screen\").toggleClass(\"show\", true);\n\n        // Send the Request and handle it\n        Util.sendFilePostRequest(generatedFile, \"/api/img_scan\", function(responseText) {\n            let response = JSON.parse(responseText);\n            if (response.code !== undefined) { // JSON doesnt have a code when the text is given\n                Util.showMessage(\"error\", response.message);\n                $(\"#loading-screen\").toggleClass(\"show\", false);\n            } else {\n                if (response.text.length == 1 && response.text.match(kanjiRegEx)) {\n                    Util.loadUrl(JotoTools.createUrl(response.text, 1));\n                } else {\n                    Util.loadUrl(JotoTools.createUrl(response.text));\n                }\n            }\n        });\n    });\n    \n    resetUploadUrlInput();\n}\n\n// Loads the Image Cropper\nfunction initCroppie(inputUrl) {\n    cropTarget = $('#croppingTarget').croppie({\n    showZoomer: false,\n    enableResize: true,\n    enableOrientation: true,\n    mouseWheelZoom: 'ctrl'\n    });\n    cropTarget.croppie('bind', {\n        url: inputUrl,\n    });\n\n    cropTarget.croppie('result', 'html').then(function(html) { });\n}\n\n// Custom Modal Toggle function for the custom Modal\nvar modalIsVisible = false;\nfunction toggleCroppingModal() {\n    if (modalIsVisible) {\n        $(\".modal-backdrop\").remove();\n        $(\"#imageCroppingModal\").css(\"display\", \"none\");\n    } else {\n        $(\"body\").append('<div class=\"modal-backdrop fade show\"></div>');\n        $(\"#imageCroppingModal\").css(\"display\", \"block\");\n    }\n\n    modalIsVisible = !modalIsVisible;\n    $(\"#imageCroppingModal\").modal();\n    $(\"#imageCroppingModal\").toggleClass(\"show\");\n}"
  },
  {
    "path": "html/assets/js/search/overlay/radicalSearch.js",
    "content": "/**\n *  Used to handle the radical search\n*/\nconst radicals = [\n    [\"一\", \"｜\", \"丶\", \"ノ\", \"乙\", \"亅\"],\n    [\"二\", \"亠\", \"人\", \"⺅\", \"𠆢\", \"儿\", \"入\", \"ハ\", \"丷\", \"冂\", \"冖\", \"冫\", \"几\", \"凵\", \"刀\", \"⺉\", \"力\", \"勹\", \"匕\", \"匚\", \"十\", \"卜\", \"卩\", \"厂\", \"厶\", \"又\", \"マ\", \"九\", \"ユ\", \"乃\", \"𠂉\"],\n    [\"⻌\", \"口\", \"囗\", \"土\", \"士\", \"夂\", \"夕\", \"大\", \"女\", \"子\", \"宀\", \"寸\", \"小\", \"⺌\", \"尢\", \"尸\", \"屮\", \"山\", \"川\", \"巛\", \"工\", \"已\", \"巾\", \"干\", \"幺\", \"广\", \"廴\", \"廾\", \"弋\", \"弓\", \"ヨ\", \"彑\", \"彡\", \"彳\", \"⺖\", \"⺘\", \"⺡\", \"⺨\", \"⺾\", \"⻏\", \"⻖\", \"也\", \"亡\", \"及\", \"久\"],\n    [\"⺹\", \"心\", \"戈\", \"戸\", \"手\", \"支\", \"攵\", \"文\", \"斗\", \"斤\", \"方\", \"无\", \"日\", \"曰\", \"月\", \"木\", \"欠\", \"止\", \"歹\", \"殳\", \"比\", \"毛\", \"氏\", \"气\", \"水\", \"火\", \"⺣\", \"爪\", \"父\", \"爻\", \"爿\", \"片\", \"牛\", \"犬\", \"⺭\", \"王\", \"元\", \"井\", \"勿\", \"尤\", \"五\", \"屯\", \"巴\", \"毋\"],\n    [\"玄\", \"瓦\", \"甘\", \"生\", \"用\", \"田\", \"疋\", \"疒\", \"癶\", \"白\", \"皮\", \"皿\", \"目\", \"矛\", \"矢\", \"石\", \"示\", \"禸\", \"禾\", \"穴\", \"立\", \"⻂\", \"世\", \"巨\", \"冊\", \"母\", \"⺲\", \"牙\"],\n    [\"瓜\", \"竹\", \"米\", \"糸\", \"缶\", \"羊\", \"羽\", \"而\", \"耒\", \"耳\", \"聿\", \"肉\", \"自\", \"至\", \"臼\", \"舌\", \"舟\", \"艮\", \"色\", \"虍\", \"虫\", \"血\", \"行\", \"衣\", \"西\"],\n    [\"臣\", \"見\", \"角\", \"言\", \"谷\", \"豆\", \"豕\", \"豸\", \"貝\", \"赤\", \"走\", \"足\", \"身\", \"車\", \"辛\", \"辰\", \"酉\", \"釆\", \"里\", \"舛\", \"麦\"],\n    [\"金\", \"長\", \"門\", \"隶\", \"隹\", \"雨\", \"青\", \"非\", \"奄\", \"岡\", \"免\", \"斉\"],\n    [\"面\", \"革\", \"韭\", \"音\", \"頁\", \"風\", \"飛\", \"食\", \"首\", \"香\", \"品\"],\n    [\"馬\", \"骨\", \"高\", \"髟\", \"鬥\", \"鬯\", \"鬲\", \"鬼\", \"竜\", \"韋\"],\n    [\"魚\", \"鳥\", \"鹵\", \"鹿\", \"麻\", \"亀\", \"啇\", \"黄\", \"黒\"],\n    [\"黍\", \"黹\", \"無\", \"歯\"],\n    [\"黽\", \"鼎\", \"鼓\", \"鼠\"],\n    [\"鼻\", \"齊\"],\n    [\"龠\"]\n];\n\nvar radicalMask = [\n    [0, 0, 0, 0, 0, 0],\n    [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n    [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n    [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n    [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n    [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n    [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n    [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n    [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n    [0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n    [0, 0, 0, 0, 0, 0, 0, 0, 0],\n    [0, 0, 0, 0],\n    [0, 0, 0, 0],\n    [0, 0],\n    [0]\n];\n\nvar baseRadResult;\nvar currentSearchInput;\nvar lastRadicalSearchResult;\n\nUtil.awaitDocumentReady(() => {\n    baseRadResult = $('.rad-results')[0].innerHTML;\n    loadRadicals(0);\n\n    // Used to re-focus searchbar upon using Radical Btns\n    $(\"#kanji-search\").focus(e => {\n        currentSearchInput = $(\"#kanji-search\");\n    });\n    $(\"#search\").focus(e => {\n        currentSearchInput = $(\"#search\");\n    });\n});\n\n// Opens | Closes the Radical overlay\nfunction toggleRadicalOverlay() {\n    closeAllSubSearchbarOverlays(\"radical\");\n\n    let overlay = $('.overlay.radical');\n    overlay.toggleClass('hidden');\n    sContainer.parentElement.classList.add(\"hidden\");\n\n    // Reset on close\n    if (overlay.hasClass(\"hidden\")) {\n        resetRadPicker()\n        rContainer.classList.add(\"hidden\");\n        Suggestions.overlay.show();\n    } else {\n        $('.rad-results').html(baseRadResult);\n        $('.rad-results').removeClass(\"hidden\");\n        Suggestions.updateSuggestions();\n        scrollSearchIntoView();\n        $('#kanji-search').focus();\n    }\n}\n\n// Called by reset btn. Deselects all\nfunction resetRadPicker() {\n    $('.rad-btn.selected').each((i, e) => {\n        $(e).removeClass(\"selected\");\n    });\n\n    $('.rad-btn.disabled').each((i, e) => {\n        $(e).removeClass(\"disabled\");\n    });\n\n    iterateMaskAsync((i, j) => {\n        radicalMask[i][j] = 0;\n    });\n\n    $('.rad-results').html(baseRadResult);\n    resetAllTabs();\n\n    currentSearchInput.focus();\n}\n\n// Adds the selected Kanji to the search bar\nfunction handleKanjiSelect(event) {\n    // Insert Kanji in search bar\n    $('#search').val($('#search').val() + event.target.innerHTML);\n\n    // Update search bar\n    Suggestions.updateSuggestions();\n    toggleSearchIcon(200);\n\n    // Focus the last search bar\n    currentSearchInput.focus();\n}\n\n// Toggles Radicals on Input and loads the results\nfunction handleRadicalSelect(event) {\n    let target = $(event.target);\n\n    // Dont do anything if disabled\n    if (target.hasClass(\"disabled\")) {\n        return;\n    }\n\n    // Update Radical Map\n    if (target.hasClass(\"selected\")) {\n        radicalMask[target.attr(\"index\")][target.attr(\"position\")] = 0;\n    } else {\n        radicalMask[target.attr(\"index\")][target.attr(\"position\")] = 1;\n    }\n        \n    // Make results visible again if they were hidden\n    $('.rad-results').removeClass(\"hidden\");\n\n    // Toggle the \"selected\" class\n    target.toggleClass('selected');\n\n    // Get possible Kanji / Radicals from selection\n    getRadicalInfo();\n    \n    // Focus the last search bar\n    currentSearchInput.focus();\n\n    // Update search bar\n    Suggestions.updateSuggestions(getSelectedRadicalArray());\n}\n\n// Opens the Radical Page at the given index\nlet lastRadicalPage;\nfunction openRadicalPage(index) {\n    // Handle special pages\n    if (index == -1) {\n        if (lastRadicalPage !== undefined) {\n            index = lastRadicalPage;\n        } \n        else {\n            openRadicalPage(0);\n            return;\n        }\n    }\n\n    // Iterate buttons and update whether to hightlight them or not\n    $(\".rad-page-toggle > span\").each((i, e) => {\n        if (i == index) {\n            e.classList.add(\"selected\");\n            lastRadicalPage = index;\n        } else\n            e.classList.remove(\"selected\");\n    });\n    \n    // Load Radicals of new page\n    loadRadicals(index);\n\n     // Focus the last search bar\n     currentSearchInput.focus();\n}\n\n// Clears the shown Radical list\nfunction clearRadicals() {\n    // Clear Radicals\n    $(\".rad-btn.picker:not(.num)\").each((i, e) => {\n        if (e.classList.contains(\"selected\")) {\n            radicalMask[e.getAttribute(\"index\")][e.getAttribute(\"position\")] = 1;\n        }\n    });\n    $(\".rad-picker\").html(\"\");\n}\n\n// Loads the Radicals of the specific tab\nfunction loadRadicals(tabIndex) {\n    // Clear Radicals\n    clearRadicals();\n\n    // Add Radicals\n    if (tabIndex == 0) {\n        addRadicals(0);\n        addRadicals(1);\n    }\n    else if (tabIndex == 9) {\n        for (let i = 10; i < radicals.length; i++) {\n            addRadicals(i);\n        }\n    }\n    else if (tabIndex == 10) {\n        loadRadicalSearchResults(lastRadicalSearchResult);\n    }\n    else {\n        addRadicals(tabIndex+1);\n    }\n}\n\n// Loads the given Array into the Select Radicals Tab\nfunction addRadicals(arrayIndex) {\n    let html = $(\".rad-picker\").html();\n    html += '<span class=\"rad-btn picker num\">'+(arrayIndex+1)+'</span>';\n\n    for (let i = 0; i < radicals[arrayIndex].length; i++) {\n        html += '<span class=\"rad-btn picker'+(radicalMask[arrayIndex][i] == 1 ? \" selected\" : \"\")+(radicalMask[arrayIndex][i] == -1 ? \" disabled\" : \"\")+'\" index='+arrayIndex+' position='+i+' onClick=\"handleRadicalSelect(event)\">'+radicals[arrayIndex][i]+'</span>';\n    }\n\n    $(\".rad-picker\").html(html);\n}\n\n// Appends radicals contained in an array\nfunction addRadicalsFromArray(index, array) {\n    let html = $(\".rad-picker\").html();\n    html += '<span class=\"rad-btn picker num\">'+index+'</span>';\n    index -= 1;\n\n    for (let a = 0; a < array.length; a++) {\n        for (let j = 0; j < radicals[index].length; j++) {\n            if (radicals[index][j] == array[a]) {\n                html += '<span class=\"rad-btn picker'+(radicalMask[index][j] == 1 ? \" selected\" : \"\")+(radicalMask[index][j] == -1 ? \" disabled\" : \"\")+'\" index='+index+' position='+j+' onClick=\"handleRadicalSelect(event)\">'+radicals[index][j]+'</span>';\n            }\n        }\n    }\n\n    $(\".rad-picker\").html(html);\n}\n\n// Loads Kanji / Radical result from API into frontend\nfunction loadRadicalResults(info) {\n    var rrHtml = \"\";\n\n    // Get and Iterate Kanji Keys\n    let kanjiKeys = Object.keys(info.kanji)\n\n    // Iterate all and add\n    for (let i = 0; i < kanjiKeys.length; i++) {\n\n        // Get the data\n        let key = kanjiKeys[i];\n        let possibleKanji = info.kanji[key];\n\n        // Create the stroke-count btn\n        rrHtml += '<span class=\"rad-btn result num noselect\">' + key + '</span>';\n\n        let kanjiBtns = \"\";\n\n        // Create the btn for each entry\n        for (let j = 0; j < possibleKanji.length; j++) {\n            kanjiBtns += '<span class=\"rad-btn result noselect\" onClick=\"handleKanjiSelect(event)\">' + possibleKanji[j] + '</span>';\n        }\n\n        rrHtml += kanjiBtns;\n    }\n\n    $('.rad-results').html(rrHtml);\n\n    // Only activate possible radicals\n    let currentRadicals = $('.rad-btn.picker:not(.num)').toArray();\n    for (let i = 0; i < currentRadicals.length; i++) {\n        let rad = $(currentRadicals[i]);\n        if (info.possible_radicals.includes(rad.html()) || rad.hasClass(\"selected\")) {\n            rad.removeClass(\"disabled\");\n        } else {\n            rad.addClass(\"disabled\");\n        }\n    }\n\n    // Apply changes to mask\n    iterateMaskAsync((i, j) => {\n        if (!info.possible_radicals.includes(radicals[i][j])) {\n            radicalMask[i][j] = -1;\n        } else if (radicalMask[i][j] == -1) {\n            radicalMask[i][j] = 0;\n        }\n    });\n\n}\n\n// Calls the given function on every iteration of the array. Passes i (outer) and j (inner) as params.\nfunction iterateMaskAsync(functionToCall, startIndex, endIndex) {\n    if (startIndex == undefined) {\n        let middle = Math.floor(radicals.length / 2); \n        iterateMaskAsync(functionToCall, middle, radicals.length);\n        startIndex = 0;\n        endIndex = middle;\n    }\n\n    for (let i = startIndex; i < endIndex; i++) {\n        for (let j = 0; j < radicals[i].length; j++) {\n           functionToCall(i, j);\n        }\n    }\n\n    updateTabVisuals();\n}\n\n// Checks whether the Page-Tabs have to be colored in a specific way (None possible, element selected...)\nasync function updateTabVisuals() {\n    for (let i = 0; i < 10; i++) {\n        let tabStatus = -1;\n\n        // First Tab\n        if (i == 0) {\n            tabStatus = checkRadicalsInTab(0);\n            let tabStatus2 = checkRadicalsInTab(1);\n            if (tabStatus2 == 0 && tabStatus == -1)\n                tabStatus = 0;\n            else if (tabStatus2 == 1)\n                tabStatus = 1;\n        }\n        // Last Tab\n        else if (i == 9) {\n            for (let j = 10; j < radicals.length; j++) {\n                tabStatus = checkRadicalsInTab(j);\n                $(\"#r-t\"+j).toggleClass(\"disabled\", tabStatus == -1);\n                $(\"#r-t\"+j).toggleClass(\"highlighted\", tabStatus == 1);\n            }\n            break;\n        }\n        // Any other Tab\n        else {\n            tabStatus = checkRadicalsInTab(i+1);\n        }\n\n        $(\"#r-t\"+i).toggleClass(\"disabled\", tabStatus == -1);\n        $(\"#r-t\"+i).toggleClass(\"highlighted\", tabStatus == 1);\n    }\n}\n\n// Called by updateTabVisuals. Checks for tabDisabled (-1) | normal (0) | highlighted (1)\nfunction checkRadicalsInTab(arrayIndex) {\n    let status = -1;\n\n    for (let i = 0; i < radicals[arrayIndex].length; i++) {\n        if (radicalMask[arrayIndex][i] == 0) {\n            status = 0;\n        } else if (radicalMask[arrayIndex][i] == 1) {\n            status = 1; \n            break;\n        }\n    }\n\n    return status;\n}\n\n// Resets all Radical-Tabs by removing class-modifiers\nfunction resetAllTabs() {\n    for (let i = 0; i < 10; i++) {\n        $(\"#r-t\"+i).removeClass(\"disabled\");\n        $(\"#r-t\"+i).removeClass(\"highlighted\");\n    }\n}\n\n// Resets all Radical-Tabs by removing class-modifiers including the selected tab\nfunction closeAllTabs() {\n    for (let i = 0; i < 10; i++) {\n        $(\"#r-t\"+i).removeClass(\"disabled\");\n        $(\"#r-t\"+i).removeClass(\"selected\");\n    }\n}\n\n// Returns an array only containing selected radicals\nfunction getSelectedRadicalArray() {\n    let arr = [];\n\n    // Populate radicals within JSON with all selected radicals\n    for (let i = 0; i < radicalMask.length; i++) {\n        for (let j = 0; j < radicalMask[i].length; j++) {\n            if (radicalMask[i][j] == 1) {\n                arr.push(radicals[i][j]);\n            }\n        }\n    }\n\n    return arr;\n}\n\n// Calls the API to get all kanji and radicals that are still possible\nfunction getRadicalInfo() {\n    // Create the JSON\n    let radicalJSON = {\n        \"radicals\": getSelectedRadicalArray()\n    }\n\n    // No Radicals selected, Reset\n    if (radicalJSON.radicals.length == 0) {\n        $('.rad-btn.disabled').each((i, e) => {\n            $(e).removeClass(\"disabled\");\n        });\n        iterateMaskAsync((i, j) => {\n            if (radicalMask[i][j] == -1) {\n                radicalMask[i][j] = 0;\n            }\n        });\n        resetAllTabs();\n\n        return;\n    }\n\n    // Send Request to backend\n    $.ajax({\n        type: \"POST\",\n        url: \"/api/kanji/by_radical\",\n        data: JSON.stringify(radicalJSON),\n        headers: {\n            'Content-Type': 'application/json'\n        },\n        success: function (result) {\n            // Load the results into frontend\n            loadRadicalResults(result);\n        },\n        error: function (result) {\n            // Print Error\n            Util.showMessage(\"error\", getText(\"RADICAL_API_UNREACHABLE\"))\n        }\n    });\n}\n\n// Calls the API to get input suggestions\nvar lastRadRequest;\nfunction getRadicalSearchResults() {\n\n    // Get value for the input\n    let query = $(\"#kanji-search\").val();\n    if (query.length == 0) {\n        return;\n    }\n\n    // Create the JSON\n    let inputJSON = {\n        \"query\": query\n    }\n\n    // Abort any requests sent earlier\n    if (lastRadRequest !== undefined) {\n        lastRadRequest.abort();\n    }\n\n    // Send Request to backend\n    lastRadRequest = $.ajax({ \n        type : \"POST\", \n        url : \"/api/radical/search\", \n        data: JSON.stringify(inputJSON),\n        headers: {\n            'Content-Type': 'application/json'\n        },\n        success : function(result) { \n            console.log(result);\n            // Load the results into frontend\n            loadRadicalSearchResults(result);\n            lastRadicalSearchResult = result;\n        }, \n        error : function(result) { \n            $(\"#r-tc\").removeClass(\"show\");\n            $(\"#r-tc\").removeClass(\"selected\");\n        } \n    }); \n}\n\n// Visualizes the results of getRadicalSearchResults\nfunction loadRadicalSearchResults(results) {\n    let firstFound = false;\n\n    for (let i = 1; i <= 15; i++) {\n        if (results.radicals[i] !== undefined) {\n            if (!firstFound) {\n                firstFound = true;\n                \n                clearRadicals();\n                closeAllTabs();\n\n                $(\"#r-tc\").addClass(\"show\");\n                $(\"#r-tc\").addClass(\"selected\");\n            }\n\n            addRadicalsFromArray(i, results.radicals[i]);\n        }\n    }\n\n    if (!firstFound) {\n        $(\"#r-tc\").removeClass(\"show\")\n        openRadicalPage(-1);\n    }\n}\n"
  },
  {
    "path": "html/assets/js/search/overlay/speechSearch.js",
    "content": "/**\n * This JS-File implements the Speech to Text functionality for text input\n */\n\nvar SpeechRecognition, recognition;\n\ntry {\n    SpeechRecognition = SpeechRecognition || webkitSpeechRecognition;\n    recognition = new SpeechRecognition();\n\n    recognitionSetup();\n} catch (e) {}\n\n// Handles the initial setup of the recognition lib \nfunction recognitionSetup() {\n    recognition.lang = 'en-US';\n    recognition.continuous = false;\n    recognition.interimResults = false;\n    recognition.maxAlternatives = 1;\n    \n    // On recognition start\n    recognition.onstart = function() {\n        $('#currentlyListening').html(getText(\"SPEECH_LISTEN_YES\"));\n        $('.voiceSvg').toggleClass(\"active\");\n    };\n    \n    // On recognition error\n    recognition.onerror  = function(event) { \n        console.log(event.error);\n        switch(event.error) {\n            case \"not-allowed\":\n                Util.showMessage(\"error\", getText(\"SPEECH_NO_PERMISSION\"));\n                break;\n            case \"aborted\":\n                Util.showMessage(\"info\", getText(\"SPEECH_ABORT\"));\n                break;\n            case \"no-speech\":\n                Util.showMessage(\"info\", getText(\"SPEECH_NO_VOICE\"));\n                break;\n            default:\n                Util.showMessage(\"error\", getText(\"SPEECH_NOT_SUPPORTED\"));\n        }\n        $('#currentlyListening').html(getText(\"SPEECH_LISTEN_NO\"));\n        $('.voiceSvg').toggleClass(\"active\");\n    }\n    \n    // On speech end\n    recognition.onspeechend = function() {\n        recognition.stop();\n        $('#currentlyListening').html(getText(\"SPEECH_LISTEN_NO\"));\n        $('.voiceSvg').toggleClass(\"active\");\n    }\n    \n    // On recognition result\n    recognition.onresult = function(event) {\n        let transcript = event.results[0][0].transcript;\n        $('#search').val(transcript);\n    };\n}\n\n// Toggles the overlay on and off\nfunction toggleSpeakOverlay() {\n    if (recognition == undefined) {\n        Util.showMessage(\"error\", getText(\"SPEECH_NOT_SUPPORTED\"));\n        return;\n    }\n\n    closeAllSubSearchbarOverlays(\"speech\");\n\n    let overlay = $('.overlay.speech');\n    overlay.toggleClass('hidden');\n\n    if (overlay.hasClass(\"hidden\")) {\n        recognition.abort();\n        recognition.stop();\n    }\n}\n\n// Activate the given language for speech recognition TODO save in cookie\nfunction setRecognitionLang(lang) {\n    recognition.abort();\n\n    switch(lang) {\n        case \"jap\":\n            recognition.lang = \"ja\";\n            $('#currentSpeechLang').html(getText(\"LANG_JAP\"));\n            break\n        case \"ger\":\n            recognition.lang = \"de-DE\";\n            $('#currentSpeechLang').html(getText(\"LANG_GER\"));\n            break\n        case \"eng\":\n            recognition.lang = \"en-US\";\n            $('#currentSpeechLang').html(getText(\"LANG_ENG\"));\n            break\n        case \"rus\":\n            recognition.lang = \"ru\";\n            $('#currentSpeechLang').html(getText(\"LANG_RUS\"));\n            break\n        case \"spa\":\n            recognition.lang = \"es-ES\";\n            $('#currentSpeechLang').html(getText(\"LANG_SPA\"));\n            break\n        case \"swe\":\n            recognition.lang = \"sv-SE\";\n            $('#currentSpeechLang').html(getText(\"LANG_SWE\"));\n            break\n        case \"fre\":\n            recognition.lang = \"fr-FR\";\n            $('#currentSpeechLang').html(getText(\"LANG_FRE\"));\n            break\n        case \"dut\":\n            recognition.lang = \"nl-NL\";\n            $('#currentSpeechLang').html(getText(\"LANG_DUT\"));\n            break\n        case \"hun\":\n            recognition.lang = \"hu\";\n            $('#currentSpeechLang').html(getText(\"LANG_HUN\"));\n            break\n        case \"slv\":\n            recognition.lang = \"sl-SI\";\n            $('#currentSpeechLang').html(getText(\"LANG_SLV\"));\n            break\n    }\n\n    setTimeout(function(){ recognition.start(); }, 400);\n}"
  },
  {
    "path": "html/assets/js/search/overlay/suggestionOverlay.js",
    "content": "/*\n*   Handles functions related to the suggestion Overlay. Load before search.js!\n*/\n\nSuggestions.overlay = function () {}; \n\n// Shows the suggestions overlay\nSuggestions.overlay.show = function() {\n    if (availableSuggestions > 0 && input.value.length > 0) {\n        sContainer.parentElement.classList.remove(\"hidden\");\n        if (typeof scrollSearchIntoView === \"function\") {\n            scrollSearchIntoView();\n        }\n    } else {\n        sContainer.parentElement.classList.add(\"hidden\");\n    } \n}\n\n// Searches for the currently selected suggestion\nSuggestions.overlay.activateSelection = function() {\n    $(\"#suggestion-container > .search-suggestion\")[currentSuggestionIndex-1].click();\n}\n\n// Selects the suggestion at the index above (-1) or beneath (1)\nSuggestions.overlay.changeSuggestionIndex = function(direction) {\n\n    // Remove highlight from last suggestion\n    if (currentSuggestionIndex != 0) { \n        $(\"#suggestion-container > .search-suggestion\")[currentSuggestionIndex-1].classList.remove(\"selected\");\n    }\n    \n    // Calculate new suggestion index\n    currentSuggestionIndex = Math.positiveMod(currentSuggestionIndex + direction, availableSuggestions + 1);\n    \n    // Set new highlight\n    if (currentSuggestionIndex != 0) { \n        \n        // Get current suggestion\n        let suggestion = $(\"#suggestion-container > .search-suggestion\")[currentSuggestionIndex-1];\n        let s_children = suggestion.children;\n        \n        // Add Furigana. If Kanji are used, select the secondary suggestion. If user types kanji, show him kanji instead\n        if (s_children[1].innerHTML.length > 0 && input.value.match(kanjiRegEx) === null) {\n            currentSuggestion = s_children[1].innerHTML.substring(1, s_children[1].innerHTML.length - 1);\n        } else {\n            currentSuggestion = s_children[0].innerHTML;\n        }\n\n        // Mark the suggestion's row\n        suggestion.classList.add(\"selected\");\n    }\n    else {\n        currentSuggestion = \"\";\n    }\n\n    // Update shadow text\n    setShadowText();\n}\n\n"
  },
  {
    "path": "html/assets/js/search/search.js",
    "content": "/**\n * This JS-File contains functions handling the website search (e.g. Search suggestions)\n */\n\n// Prepare Search / Voice Icon when loading the page\nUtil.awaitDocumentReady(() => {\n    toggleSearchIcon(0);\n});\n\n// Shows the Voice / Search Icon when possible\nfunction toggleSearchIcon(duration) {\n    if (document.getElementById(\"search\").value.length == 0) {\n        $('#searchBtn.search-embedded-btn').hide(duration);\n        $('#voiceBtn.search-embedded-btn').show(duration);\n    } else {\n        $('#searchBtn.search-embedded-btn').show(duration);\n        $('#voiceBtn.search-embedded-btn').hide(duration);\n    }\n}\n\n// Resets the value of the search input\nfunction emptySearchInput() {\n    $('#search').val(\"\");\n    $('#search').focus();\n    toggleSearchIcon(200);\n}\n\n// Returns the substring of what the user already typed for the current suggestion\n// If target is not empty, the substring of target will be searched instead\nfunction getCurrentSubstring(target) {\n    let currentSubstr = \"\";\n    let foundSubstr = false;\n\n    if (target === undefined) {\n        target = currentSuggestion;\n    }\n\n  for (let i = target.length; i > 0; i--) {\n        currentSubstr = target.substring(0, i).toLowerCase();\n        let index = input.value.toLowerCase().lastIndexOf(currentSubstr)\n\n        if (index == -1) {\n            continue;\n        }\n\n        if (index + currentSubstr.length === input.value.length) {\n            foundSubstr = true;\n            break;\n        }\n    }\n\n    return foundSubstr ? currentSubstr : \"\";\n}\n\n// Interrupts the form's submit and makes the user visit the correct page\nfunction onSearchStart() {\n    var search_value = $('#search').val();\n    var search_type = $('#search-type').val();\n\n    if (window.plausible) {\n        plausible('search', {props: {query: search_value, origin: location.pathname, language: Settings.language.searchLang.val}});\n    }\n\n    if (search_value.length == 0) {\n        Util.loadUrl(JotoTools.createUrl());\n    } else {\n        Util.loadUrl(JotoTools.createUrl(search_value, search_type));\n    }\n\n    return false;\n}\n\n// When opening an overlay, scroll it into view\nfunction scrollSearchIntoView() {\n    if (document.location.origin+\"/\" === document.location.href) {\n        var top = $('#search').offset().top;\n        Util.scrollTo(top, 500);\n    }\n}\n\n// Closes all overlays connected to the search bar\nfunction closeAllSubSearchbarOverlays(overlayToIgnore) {\n    if (overlayToIgnore !== \"speech\")\n        $('.overlay.speech').addClass('hidden');\n    if (overlayToIgnore !== \"radical\") \n        $('.overlay.radical').addClass('hidden');\n    if (overlayToIgnore !== \"image\")\n        $('.overlay.image').addClass('hidden');\n}\n\n// Opens the Help Page\nfunction openHelpPage() {\n    document.getElementsByClassName(\"infoBtn\")[0].classList.remove(\"new\");\n    Settings.alterOther(\"firstVisit\", false, );\n\n    Util.loadUrl(\"/help\");\n}\n\nfunction onHomeClick(event) {\n    event.preventDefault();\n\n    switch (event.which) {\n        case 1:\n            location.href = location.origin;\n            break;\n        case 2:\n            window.open(location.origin, \"_blank\");\n            break;\n        default:\n            break;\n    }\n}"
  },
  {
    "path": "html/assets/js/search/shared.js",
    "content": "/**\n * This JS-File contains variables shared between files to improve performance\n */\n\nconst kanjiRegEx = '([一-龯|々|𥝱|𩺊])';\nconst hashtags = [\n  \"#adverb\", \"#auxilary\", \"#conjunction\", \"#noun\", \"#prefix\", \"#suffix\", \"#particle\", \"#sfx\",\n  \"#verb\", \"#adjective\", \"#counter\", \"#expression\", \"#interjection\", \"#pronoun\", \"#numeric\", \"#transitive\", \"#intransitive\",\n  \"#unclassified\", \"#word\", \"#sentence\", \"#name\", \"#kanji\", \"#abbreviation\",\"#katakana\", \"#N5\", \"#N4\", \"#N3\", \"#N2\", \"#N1\", \"#JLPT5\", \"#JLPT4\", \"#JLPT3\", \"#JLPT2\", \"#JLPT1\", \"#hidden\", \"#Irregular-Ichidan\",\n  \"#Abbreviation\", \"#Archaism\", \"#ChildrensLanguage\", \"#Colloquialism\", \"#Dated\", \"#Derogatory\", \"#Familiarlanguage\",\n  \"#Femaleterm\", \"#Honorific\", \"#Humblelanguage\", \"#Idomatic\", \"#Legend\", \"#Formal\", \"#MangaSlang\", \"#Maleterm\", \"#InternetSlang\",\n  \"#Obsolete\", \"#Obscure\", \"#Onomatopoeic\", \"#PersonName\", \"#Placename\", \"#Poeticalterm\", \"#PoliteLanguage\", \"#Proverb\", \"#Quotation\", \"#Rare\", \"#Religion\", \"#Sensitive\",\n  \"#Slang\", \"#UsuallyKana\", \"#Vulgar\", \"#Artwork\", \"#Yojijukugo\",\n];\n\nvar currentSuggestion = \"\";\nvar currentSuggestionIndex = 0; // 0 => nothing\nvar availableSuggestions = 0;\nvar preventNextApiCall = false;\n\nvar input, searchRow, shadowText, sContainer, rContainer;\n\nUtil.awaitDocumentInteractive(() => {\n  input = document.getElementById(\"search\");\n  searchRow = document.getElementById(\"search-row\");\n  shadowText = document.getElementById(\"shadow-text\");\n  sContainer = document.getElementById(\"suggestion-container\");\n  rContainer = document.getElementById(\"suggestion-container-rad\");\n});\n"
  },
  {
    "path": "html/assets/js/search/suggestions.js",
    "content": "function Suggestions() {};\n\n/**\n * Updates the suggestions help and respects selected radicals if given some\n * \n * @param radicalArray {[]} containing radicals that need to be contained in searched kanji\n */\nSuggestions.updateSuggestions = function(radicalArray) {\n\n    // Tooltips for # - searches\n    let lastWord = Util.getLastWordOfString(input.value);\n    if (lastWord.includes(\"#\")) {\n        API.getHashtagData(lastWord, loadSuggestionApiData);\n        \n    // Tooltips for everything else\n    } else if (input.value.length > 0) {\n        API.getSuggestionApiData(radicalArray, loadSuggestionApiData, removeSuggestions);\n\n    // Remove suggestions if the input is empty\n    } else {\n        removeSuggestions();\n    }\n\n    // Set shadow text\n    setShadowText();    \n}\n\n// Sets the shadow's text whenever possible\nfunction setShadowText() {\n    // If input is overflown, dont show text\n    if (Util.checkOverflow(shadowText) && shadowText.innerHTML != \"\") {\n        shadowText.innerHTML = \"\";\n        return\n    }\n\n    // Make invisible temporarily\n    shadowText.style.opacity = 0;\n\n    // Check how much of suggestion is typed already\n    let currentSubstr = getCurrentSubstring();\n\n    // Add missing suggestion to shadow text\n    if (currentSubstr.length > 0) {\n        shadowText.innerHTML = input.value + currentSuggestion.substring(currentSubstr.length);\n    } else {\n        shadowText.innerHTML = \"\";\n    }   \n\n    // If it would overflow with new text, don't show\n    if (Util.checkOverflow(shadowText)) {\n        shadowText.innerHTML = \"\";\n    }\n\n    // Make visible again\n    shadowText.style.opacity = 0.4;\n}\n\n// Called only by [getSuggestionApiData]. Loads data called from the API into the frontend\nfunction loadSuggestionApiData(result) {\n\n    // Remove current suggestions\n    removeSuggestions();\n\n    // Return if no suggestions were found\n    if (result.suggestions.length == 0) {\n\n        // Prevent future requests if no result was found and input was > 8 chars\n        if (input.value >= 8) { \n            API.suggestionStop = input.value.length;\n        }\n\n        // Return\n        return;\n    } else {\n        // Show Suggestions Containers\n        if ($(\".overlay.radical\").hasClass(\"hidden\")) {\n            sContainer.parentElement.classList.remove(\"hidden\");\n        } else {\n            rContainer.classList.remove(\"hidden\");\n        }\n    }\n\n    // Set suggestion type\n    currentSuggestionType = result.suggestion_type;\n\n    // Set the amount of possible suggestions\n    availableSuggestions = result.suggestions.length;\n    if (availableSuggestions > 10) {\n        availableSuggestions = 10;\n    }\n\n    // Add suggestions\n    for (let i = 0; i < availableSuggestions; i++) {\n\n        // Result variables\n        let primaryResult = \"\";\n        let secondaryResult = \"\";\n\n        // Only one result\n        if (result.suggestions[i].secondary === undefined) {\n            primaryResult = result.suggestions[i].primary;\n        }\n        // Two results, kanji needs to be in the first position here\n        else {\n            primaryResult = result.suggestions[i].secondary;\n            secondaryResult = \"(\" + result.suggestions[i].primary + \")\";\n        }\n\n        // Get target page\n        var currentPage = JotoTools.getCurrentSearchType();\n\n        // Generate the /search/\n        let searchValue = \"\";\n\n        switch (currentSuggestionType) {\n            case \"kanji_reading\":\n                searchValue = encodeURIComponent(primaryResult) + \" \" + encodeURIComponent(result.suggestions[i].primary);\n                break;\n            case \"hashtag\":\n                let s = input.value.split(\" \");\n                searchValue = encodeURIComponent(s.slice(0, s.length-1).join(\" \")) + \" \" + encodeURIComponent(primaryResult);\n                break;\n            default:\n                searchValue = encodeURIComponent(primaryResult);\n        }\n\n        // Add to Page\n        sContainer.innerHTML += \n            ` <a onclick='${window.plausible ? `plausible(\"suggestions\", {props: {index: \"#${i}\", container: \"search\"}})` : \"\"}' href=\"/search/${searchValue}?t=${currentPage}\" class=\"search-suggestion\"> ` +\n            '   <span class=\"primary-suggestion\">'+primaryResult+'</span> ' +\n            '   <span class=\"secondary-suggestion\">'+secondaryResult+'</span> ' +\n            ' </a> ';    \n\n        rContainer.innerHTML += \n            ` <a onclick='${window.plausible ? `plausible(\"suggestions\", {props: {index: \"#${i}\", container: \"radicals\"}});` : \"\"}' href=\"/search/${searchValue}?t=${currentPage}\" class=\"search-suggestion\"> ` +\n            '   <span class=\"primary-suggestion\">'+primaryResult+'</span> ' +\n            ' </a> ';      \n    }\n}\n\n// Removes all current suggestions including shadowText\nfunction removeSuggestions() {\n    sContainer.innerHTML = \"\";\n    rContainer.innerHTML = \"\";\n    shadowText.innerHTML = \"\";\n    currentSuggestion = \"\";\n    currentSuggestionIndex = 0;\n    availableSuggestions = 0;\n    sContainer.parentElement.classList.add(\"hidden\");\n    rContainer.classList.add(\"hidden\");\n}"
  },
  {
    "path": "html/assets/js/tools/jotoTools.js",
    "content": "/*\n*   Collection-File like utils.js but that are made specifically for Jotoba\n*/\n\n// The JotoTools \"parent\"\nfunction JotoTools () {};\n\n// Creates a Jotoba-Search URL using the given parameters\nJotoTools.createUrl = function(searchText, searchType, targetPage, languageCode, sentenceIndex) {\n    let url = window.location.origin;\n\tlet hasQ = false;\n\n    if (searchText !== undefined) {\n        url += \"/search/\" + encodeURIComponent(searchText);\n    } \n\n    if (searchType !== undefined) {\n        url += \"?t=\" + searchType;\n\t\thasQ = true;\n    }\n\n    if (targetPage !== undefined) {\n        url += (!hasQ ? \"?p=\" : \"&p=\") + targetPage;\n\t\thasQ = true;\n    }\n\n    if (languageCode !== undefined) {\n        url += (!hasQ ? \"?l=\" :\"&l=\") + languageCode;\n\t\thasQ = true;\n    } else {\n        url = Util.addPageParameterIfNotNull(url, \"l\", !hasQ);\n    }\n\n    if (sentenceIndex !== undefined) {\n        url += (!hasQ ? \"?i=\" :\"&i=\") + sentenceIndex;\n\t\thasQ = true;\n    } else {\n        url = Util.addPageParameterIfNotNull(url, \"i\", !hasQ);\n    }\n\n    return url;\n}\n\n// Takes a link path starting with / and appends it to the Joto-URL (https://jotoba.de {/path})\nJotoTools.pathToUrl = function(path) {\n    return window.location.origin + path;\n}\n\n// Creates a Jotoba URL for the given page\nJotoTools.getPageUrl = function(pageName) {\n    let url = window.location.origin;\n    url += \"/\" + pageName;\n\n    return url;\n}\n\n// Returns the currently searched string\nJotoTools.getCurrentSearch = function() {\n    return document.location.pathname.split(\"/\")[2];\n}\n\n// Returns the value of the current Search [Words, Sentence...]\nJotoTools.getCurrentSearchType = function() {\n    return $('#search-type').val();\n}\n\n// Parses a language code into the Joto needs\nJotoTools.toJotobaLanguage = function(code) {\n    code = code.toLowerCase().substr(0, 2);\n    switch (code) {\n        case \"en\":\n            code = \"en-US\";\n            break;\n        case \"sv\":\n            code = \"sv-SE\";\n            break;\n        case \"ru\":\n            code = \"ru\";\n            break;\n        case \"hu\":\n            code = \"hu\";\n            break;\n        default:\n            code += \"-\"+code.toUpperCase();\n            if (!JotoTools.isSupportedSearchLang(code))\n                code = \"en-US\";\n    }\n    return code;\n}\n\n// Checks if a given language code is supported as a search lang\nJotoTools.isSupportedSearchLang = function(code) {\n    switch (code) {\n        case \"en-US\":\n        case \"de-DE\":\n        case \"es-ES\":\n        case \"fr-FR\":\n        case \"nl-NL\":\n        case \"sv-SE\":\n        case \"ru\":\n        case \"hu\":\n        case \"sl-SI\":\n            return true;\n        default:\n            return false;\n    }\n}\n\n// Copies the given text and echoes the given Message\nJotoTools.copyTextAndEcho = function(text, messageID) {\n    Util.copyToClipboard(text);\n    Util.showMessage(\"success\", getText(messageID));\n}"
  },
  {
    "path": "html/assets/js/tools/ripple.js",
    "content": "!function(a,b,c){a.ripple=function(d,e){var f=this,g=f.log=function(){f.defaults.debug&&console&&console.log&&console.log.apply(console,arguments)};f.selector=d,f.defaults={debug:!1,on:\"mousedown\",opacity:.4,color:\"auto\",multi:!1,duration:.7,rate:function(a){return a},easing:\"linear\"},f.defaults=a.extend({},f.defaults,e);var h=function(b){var d,e,h=a(this);if(h.addClass(\"has-ripple\"),e=a.extend({},f.defaults,h.data()),e.multi||!e.multi&&0===h.find(\".ripple-a\").length){if(d=a(\"<span></span>\").addClass(\"ripple-a\"),d.appendTo(h),g(\"Create: Ripple\"),!d.height()&&!d.width()){var i=c.max(h.outerWidth(),h.outerHeight());d.css({height:i,width:i}),g(\"Set: Ripple size\")}if(e.rate&&\"function\"==typeof e.rate){var j=c.round(d.width()/e.duration),k=e.rate(j),l=d.width()/k;e.duration.toFixed(2)!==l.toFixed(2)&&(g(\"Update: Ripple Duration\",{from:e.duration,to:l}),e.duration=l)}var m=\"auto\"==e.color?h.css(\"color\"):e.color,n={animationDuration:e.duration.toString()+\"s\",animationTimingFunction:e.easing,background:m,opacity:e.opacity};g(\"Set: Ripple CSS\",n),d.css(n)}e.multi||(g(\"Set: Ripple Element\"),d=h.find(\".ripple-a\")),g(\"Destroy: Ripple Animation\"),d.removeClass(\"ripple-animate\");var o=b.pageX-h.offset().left-d.width()/2,p=b.pageY-h.offset().top-d.height()/2;e.multi&&(g(\"Set: Ripple animationend event\"),d.one(\"animationend webkitAnimationEnd oanimationend MSAnimationEnd\",function(){g(\"Note: Ripple animation ended\"),g(\"Destroy: Ripple\"),a(this).remove()})),g(\"Set: Ripple location\"),g(\"Set: Ripple animation\"),d.css({top:p+\"px\",left:o+\"px\"}).addClass(\"ripple-animate\")};a(b).on(f.defaults.on,f.selector,h)}}(jQuery,document,Math);$.ripple.version = \"1.2.1\";\n\n$.ripple(\".ripple\", {\n\tdebug: false, // Turn Ripple.js logging on/off\n\ton: 'mousedown', // The event to trigger a ripple effect\n\n\topacity: 0.4, // The opacity of the ripple\n\tcolor: \"auto\", // Set the background color. If set to \"auto\", it will use the text color\n\tmulti: false, // Allow multiple ripples per element\n\n\tduration: 0.7, // The duration of the ripple\n\n\t// Filter function for modifying the speed of the ripple\n\trate: function(pxPerSecond) {\n        return pxPerSecond;\n    },\n\n\teasing: 'linear' // The CSS3 easing function of the ripple\n});"
  },
  {
    "path": "html/assets/js/tools/service-worker.js",
    "content": "// Currently unused.\n\nself.addEventListener('install', event => {\n});\n\nself.addEventListener('activate', event => {\n});\n\nself.addEventListener('fetch', event => {\n}); "
  },
  {
    "path": "html/assets/js/tools/theme.js",
    "content": "const themeEvent = new Event(\"theme-changed\");\n\n// Sets the color Theme to the given Value by passing a class to the :root element\nconst setTheme = (theme) => {\n  document.documentElement.className = theme;\n  localStorage.setItem('theme', theme);\n\n  Util.setMdlCheckboxState(\"use_dark_mode_settings\", theme === \"dark\")\n  document.dispatchEvent(themeEvent);\n}\n\n// Updates theme when changed by another tab (or console)\nwindow.addEventListener(\"storage\", () => {\n  let targetTheme = localStorage.getItem(\"theme\");\n  if (targetTheme) {\n    setTheme(targetTheme);\n  }\n})\n\n// Set theme from localStorage (if set)\nif (localStorage.getItem('theme')) {\n  setTheme(localStorage.getItem('theme'));\n}\n\n// Else, set based on prefered color scheme\nelse {\n  Util.awaitDocumentReady(() => {\n    window.matchMedia(\"(prefers-color-scheme: dark)\").matches ? setTheme(\"dark\") : setTheme(\"light\");\n  });\n}\n\n// listen for prefers-color-scheme changes\nwindow.matchMedia(\"(prefers-color-scheme: dark)\").addEventListener(\n  \"change\",\n  e => setTheme(e.matches ? \"dark\" : \"light\")\n);"
  },
  {
    "path": "html/assets/js/tools/utils.js",
    "content": "/**\n * This JS-File contains some functions that are commonly used\n */\n\n// Constants\nconst dateSettings = { year: 'numeric', month: 'short', day: 'numeric' };\n\n// The util \"parent\"\nfunction Util () {};\n\n// Runs callback fn when document is done loading DOM-Elements\nUtil.awaitDocumentInteractive = function(callback) {\n  let readyWait = window.setInterval(() => {\n      if (document.readyState == \"interactive\" || document.readyState == \"complete\") {\n          callback();\n          window.clearInterval(readyWait);\n      }\n  }, 10);\n}\n\n// Runs callback fn when document is done loading\nUtil.awaitDocumentReady = function(callback) {\n    let readyWait = window.setInterval(() => {\n        if (document.readyState == \"complete\") {\n            callback();\n            window.clearInterval(readyWait);\n        }\n    }, 10);\n}\n\n// Loads a script dynamically\nUtil.loadScript = function(url, async, attributes, callback) {\n    // Called without url? Return.\n    if (url.length == 0) {\n        return;\n    }\n\n    // Create the element\n    var s = document.createElement('script');\n    s.setAttribute('src', url);\n    s.onload = callback;\n    if (async) {\n        s.async = true;\n    }\n    \n    // Add specific attributes\n    for (let i = 0; i < attributes.length; i++) {\n        s.setAttribute(attributes[i][0], attributes[i][1]);\n    }\n\n    // Append and load\n    document.head.appendChild(s);\n}\n\n// Checks if a given element is overflown\nUtil.checkOverflow = function(el) {\n  var curOverflow = el.style.overflow;\n\n  if (!curOverflow || curOverflow === \"visible\")\n     el.style.overflow = \"hidden\";\n\n  var isOverflowing = el.clientWidth < el.scrollWidth || el.clientHeight < el.scrollHeight;\n\n  el.style.overflow = curOverflow;\n\n  return isOverflowing;\n}\n\n// Re-Encodes a decoded HTML\nUtil.decodeHtml = function(html) {\n  var doc = new DOMParser().parseFromString(html, \"text/html\");\n  return doc.documentElement.textContent;\n}\n\n// Changes the state of an MDL checkbox\nUtil.setMdlCheckboxState = function(id, state) {\n  if (state === undefined) {\n    return;\n  }\n\n  let element = $('label[for='+id+']');\n\n  // Only attempt to apply change if element exists.\n  if (element[0]){\n    if(state) {\n      element[0].MaterialCheckbox.check();\n    } else {\n      element[0].MaterialCheckbox.uncheck();\n    }\n  }\n}\n\n// Parses the given Unix time to a date of the given language\nUtil.toLocaleDateString = function(unixTime) {\n  return new Date(unixTime).toLocaleDateString(\"de-DE\", dateSettings);\n}   \n\n// Returns whether the current page is index or not\nUtil.isIndexPage = function() {\n  return window.location.origin+\"/\" == document.location.href;\n}\n\n// Returns whether the current page is listed under {index}/{path}\nUtil.isInPath = function(path) {\n  return document.location.href.startsWith(window.location.origin+\"/\"+path);\n}"
  },
  {
    "path": "html/assets/js/tools/utils2.js",
    "content": "/**\n * This JS-File contains some functions that are commonly used\n * This file is supposed to be loaded asynchronously. Jotoba needs some things directly so they are located in a different file.\n */\n\n// Displays the given message of type \"succes\", \"error\" or \"info\"\nUtil.showMessage = function(type, message) {\n    switch (type) {\n        case \"success\":\n            alertify.success(message);\n            break;\n        case \"error\":\n            alertify.error(message);\n            break;\n        case \"info\":\n            alertify.warning(message);\n    }\n}\n\n// Copies the given string to clipboard\nUtil.copyToClipboard = function(text) {\n    const el = document.createElement('textarea');\n    el.value = text;\n    el.setAttribute('readonly', '');\n    el.style.position = 'absolute';\n    el.style.left = '-9999px';\n    document.body.appendChild(el);\n    el.select();\n    document.execCommand('copy');\n    document.body.removeChild(el);\n}\n\n// Convert a single 0-F to 0-15\nUtil.hex2num_single = function(hex) {\n    if (hex < 10)\n        return hex;\n    switch(hex.toUpperCase()) {\n        case \"A\":\n            return 10;\n        case \"B\":\n            return 11;\n        case \"C\":\n            return 12;\n        case \"D\":\n            return 13;\n        case \"E\":\n            return 14;\n        case \"F\":\n            return 15;\n    }\n}\n\n// Convert a single 0-15 to 0-F\nUtil.num2hex_single = function(num) {\n    if (num < 10)\n        return num;\n    switch(num) {\n        case 10:\n            return \"A\";\n        case 11:\n            return \"B\";\n        case 12:\n            return \"C\";\n        case 13:\n            return \"D\";\n        case 14:\n            return \"E\";\n        case 15:\n            return \"F\";\n    }\n}\n\n// Returns the browsers true width\nUtil.getBrowserWidth = function() {\n    return Math.max(\n      document.body.scrollWidth,\n      document.documentElement.scrollWidth,\n      document.body.offsetWidth,\n      document.documentElement.offsetWidth,\n      document.documentElement.clientWidth\n    );\n}\n  \n\n// Removes any current drag selection (not supported on IE)\nUtil.deleteSelection = function() {\n    if (window.getSelection) {\n        var selection = window.getSelection();\n        selection.empty();\n    }\n}\n\n// Scrolls to the destination in x miliseconds\nUtil.scrollTo = function (final, duration) {\n    var start = window.scrollY || document.documentElement.scrollTop,\n        currentTime = null;\n        \n    var animateScroll = function(timestamp) {\n        if (!currentTime) {\n            currentTime = timestamp;  \n        }      \n\n        let progress = timestamp - currentTime;\n\n        if(progress > duration) {\n            progress = duration;\n        }\n\n        let val = Math.easeInOutQuad(progress, start, final-start, duration);\n        window.scrollTo(0, val);\n\n        if(progress < duration) {\n            window.requestAnimationFrame(animateScroll);\n        }\n    };\n  \n    window.requestAnimationFrame(animateScroll);\n};\n  \n// Checks if child is contained in parent\nUtil.isChildOf = function (parent, child) {\n    var node = child.parentNode;\n    while (node != null) {\n        if (node == parent) {\n            return true;\n        }\n        node = node.parentNode;\n    }\n    return false;\n}\n\n\n// Splits the input by \" \" and returns the last result\nUtil.getLastWordOfString = function(s) {\n    let inputSplit = s.split(\" \");\n    return inputSplit[inputSplit.length-1];\n}\n\n// Converts a Base64 Url to a JS File\nUtil.convertDataURLtoFile = function(dataUrl, fileName) {\n    var arr = dataUrl.split(','),\n            mime = arr[0].match(/:(.*?);/)[1],\n            bstr = atob(arr[1]), \n            n = bstr.length, \n            u8arr = new Uint8Array(n);\n            \n    while(n--){\n        u8arr[n] = bstr.charCodeAt(n);\n    }\n        \n    return new File([u8arr], fileName, {type:mime});\n}\n\n// Sends a file to the given API endpoint; callback => function\nUtil.sendFilePostRequest = function(file, api, callback) {\n    var formData = new FormData();\n    formData.append(file.name, file);\n\n    var xhr = new XMLHttpRequest();\n    xhr.onreadystatechange = function() {\n        if (xhr.readyState == XMLHttpRequest.DONE) {\n            callback(xhr.responseText); \n        }\n    }\n\n    xhr.open(\"POST\", api);\n    xhr.send(formData);\n}\n\n// Checks if a given URL contains an image and call the corresponding callback function\nUtil.checkUrlIsImage = function(url, successCallback, errorCallback) {\n    var image = new Image();\n    image.onload = function() {\n      if (this.width > 0) {\n        successCallback();\n      }\n    }\n    image.onerror = function() {\n        errorCallback();\n    }\n    image.src = url;\n}\n\n// Used for animation curves\nMath.easeInOutQuad = function (t, b, c, d) {\n    t /= d/2;\n    if (t < 1) return c/2*t*t + b;\n    t--;\n    return -c/2 * (t*(t-2) - 1) + b;\n};\n\n// Returns the modulo of n and m but always makes them positive (-6, 4) = 2\nMath.positiveMod = function(n, m) {\n    return ((n % m) + m) % m;\n}\n\n// Opens the given URL in the current tab\nUtil.loadUrl = function(url) {\n    window.location = url;\n}\n\n// Tries to open URL in a new tab and keep focussed on current. Doesnt work in all browsers\nUtil.loadUrlInNewTab = function(url) {\n    window.open(url, '_blank').blur();\n    window.focus();\n}\n\n// Tries to find the given parameter in the url and returns its value\nUtil.getPageParameter = function(paramName) {\n    var url_string = window.location.href;\n    var url = new URL(url_string);\n    var p = url.searchParams.get(paramName);\n    return p;\n}\n\n// Adds a parameter to the given URL if location.href has a value set for it\nUtil.addPageParameterIfNotNull = function(url, parameter, useQuestionmark) {\n    let current = Util.getPageParameter(parameter);\n    if (current !== null) {\n        url += `${useQuestionmark ? \"?\" : \"&\"}${parameter}=${current}`;\n    }\n\n    return url;\n}\n\n// Sets a text field's cursor to the given position. -1 -> last position\nUtil.setCaretPosition = function(elemId, caretPos) {\n    var elem = document.getElementById(elemId);\n    if (caretPos == -1) {\n        caretPos = elem.value.length;\n    }\n    \n    if(elem != null) {\n        if(elem.createTextRange !== undefined) {\n            var range = elem.createTextRange();\n            range.move('character', caretPos);\n            range.select();\n        }\n        else {\n            if(elem.selectionStart !== undefined) {\n                elem.setSelectionRange(caretPos, caretPos);\n            }\n            else\n                elem.focus();\n        }\n    }\n}\n\n// Check if the current browsers doesn't want the user to be tracked\nUtil.checkTrackingAllowed = function() {\n    try {\n        if (window.doNotTrack || navigator.doNotTrack || navigator.msDoNotTrack || 'msTrackingProtectionEnabled' in window.external) {\n            if (window.doNotTrack == \"1\" || navigator.doNotTrack == \"yes\" || navigator.doNotTrack == \"1\" || navigator.msDoNotTrack == \"1\") {\n                return false;\n            } else {\n                return true;\n            }\n        } else {\n            return true;\n        }\n    } catch (e) {\n        return true;\n    }\n}\n\n// MDL doesn't show the scroll-arrows on start. This should help.\nUtil.mdlScrollFix = function(){\n    $(\".mdl-layout__tab-bar-right-button\").addClass(\"is-active\");\n}\n\n// Deletes all cookies whose names are within the given array\nUtil.deleteSelectedCookies = function(cookieArray) {\n    var allCookies = document.cookie.split(';');\n                \n    for (var i = 0; i < allCookies.length; i++) {\n        if (cookieArray.includes(allCookies[i])) {\n            document.cookie = allCookies[i] + \"=;expires=\"+ new Date(0).toUTCString()+\";path=/;\";\n        } else {\n            document.cookie = allCookies[i];\n        }\n    }\n}\n\n// Deletes all stored cookies\nUtil.deleteAllCookies = function() {\n    var allCookies = document.cookie.split(';');\n                \n    for (var i = 0; i < allCookies.length; i++) {\n        document.cookie = allCookies[i] + \"=;expires=\"+ new Date(0).toUTCString()+\";path=/;\";\n    }\n}\n\n// Parses the given value into a boolean\nUtil.toBoolean = function(value, defaultValue) {\n    switch (value) {\n        case 0:\n        case \"0\":\n        case \"false\":\n        case false:\n            return false;\n        case 1:\n        case \"1\":\n        case \"true\":\n        case true:\n            return true;\n        default:\n            if (defaultValue)\n                return defaultValue;\n            return false;\n    }\n}"
  },
  {
    "path": "html/assets/settings/manifest.json",
    "content": "{\n    \"name\": \"Jotoba\",\n    \"short_name\": \"Jotoba\",\n    \"start_url\": \"/\",\n    \"display\": \"standalone\",\n    \"background_color\": \"#fff\",\n    \"description\": \"Jotoba is a powerful and free Japanese dictionary.\",\n    \"icons\": [\n    {\n      \"src\": \"/assets/jotokun/JotoHead.png\",\n      \"sizes\": \"512x512\",\n      \"type\": \"image/png\"\n    }, {\n      \"src\": \"/assets/jotokun/favicon.png\",\n      \"sizes\": \"32x32\",\n      \"type\": \"image/png\"\n    }]\n  }\n  "
  },
  {
    "path": "html/assets/settings/opensearch.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<OpenSearchDescription xmlns=\"http://a9.com/-/spec/opensearch/1.1/\"\n        xmlns:suggestions=\"http://www.opensearch.org/specifications/opensearch/extensions/suggestions/1.1\">\n    <ShortName>Jotoba</ShortName>\n    <Description>Japanese dictionary search</Description>\n    <Image width=\"16\" height=\"16\" type=\"image/png\">https://jotoba.de/assets/jotokun/favicon.png</Image>\n    <Url template=\"https://jotoba.de/api/os-suggestions?q={searchTerms}\" rel=\"suggestions\" type=\"application/x-suggestions+json\"/>\n    <Url type=\"text/html\" template=\"https://jotoba.de/search?s={searchTerms}\" rel=\"results\"/>\n    <Url type=\"application/opensearchdescription+xml\" rel=\"self\" template=\"https://jotoba.de/assets/settings/opensearch.xml\" />\n</OpenSearchDescription>\n"
  },
  {
    "path": "jotoba_bin/Cargo.toml",
    "content": "[package]\nname = \"jotoba\"\nversion = \"0.1.0\"\nauthors = [\"jojii <jojii@gmx.net>\"]\nedition = \"2021\"\nlicense = \"GPLv3\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\nnews = { path = \"../lib/news\" }\ntypes = { path = \"../lib/types\" }\nsentence_reader = { path = \"../lib/sentence_reader\" }\nsearch = { path = \"../lib/search\" }\napi = { path = \"../lib/api\" }\nfrontend = { path = \"../lib/frontend\" }\nconfig = { path = \"../lib/config\" }\nerror = { path =\"../lib/error\"}\nlocalization = { path = \"../lib/localization\" }\nresources = { path = \"../lib/resources\" }\nindexes = { path = \"../lib/indexes\", features = [\"parallel\"] }\nactix-files = \"0.6.2\"\nactix-web = \"4.3.1\"\nargparse = \"0.2.2\"\nenv_logger = \"0.10.0\"\nlog = \"0.4.19\"\nsentry = { version = \"0.31.5\", optional = true }\nrayon = \"1.7.0\"\nsnmalloc-rs = \"0.3.4\"\n#ngindex = { path = \"../../ngindex\" }\nngindex = { git = \"https://github.com/JojiiOfficial/ngindex\"}\nactix-web-httpauth = \"*\"\n\n[features]\ndefault = [\"img_scan\"]\n\nsentry_error = [\"sentry\", \"frontend/sentry_error\"]\nimg_scan = [\"api/img_scan\"]\n\n[dev-dependencies]\ncriterion = \"0.5.1\"\njapanese = { path = \"../lib/japanese\" }\n\n[[bench]]\nname = \"my_benchmark\"\nharness = false\n\n[[bench]]\nname = \"resources\"\nharness = false\n"
  },
  {
    "path": "jotoba_bin/benches/my_benchmark.rs",
    "content": "use criterion::{criterion_group, criterion_main, Criterion};\nuse search::{\n    executor::SearchExecutor,\n    query::{parser::QueryParser, Query, UserSettings},\n    word,\n};\nuse types::jotoba::{language::Language, search::SearchTarget};\n\n#[global_allocator]\nstatic ALLOC: snmalloc_rs::SnMalloc = snmalloc_rs::SnMalloc;\n\nfn get_query(inp: &str, query_type: SearchTarget) -> Query {\n    let mut settings = UserSettings::default();\n    settings.user_lang = Language::German;\n    settings.show_english = true;\n    QueryParser::new(inp.to_string(), query_type, settings)\n        .parse()\n        .unwrap()\n}\n\nfn load() {\n    rayon::scope(move |s| {\n        s.spawn(move |_| {\n            resources::load(\"../resources/storage_data\").unwrap();\n        });\n        s.spawn(move |_| {\n            indexes::storage::load(\"../resources/indexes\").unwrap();\n        });\n        s.spawn(|_| {\n            // load ja nl parser since its lazy\n            sentence_reader::load_parser(\"../resources/unidic-mecab\")\n        });\n    });\n}\n\nfn criterion_benchmark(c: &mut Criterion) {\n    load();\n\n    c.bench_function(\"search word: kanji\", |b| {\n        let query = get_query(\"kanji\", SearchTarget::Words);\n        b.iter(|| search(&query))\n    });\n\n    c.bench_function(\"search word: jp\", |b| {\n        let query = get_query(\"おはよう\", SearchTarget::Words);\n        b.iter(|| search(&query))\n    });\n\n    c.bench_function(\"search kanji reading\", |b| {\n        let query = get_query(\"事 ジ\", SearchTarget::Words);\n        b.iter(|| search(&query))\n    });\n}\n\n#[inline]\nfn search(query: &Query) {\n    let _res = SearchExecutor::new(word::Search::new(&query)).run();\n}\n\ncriterion_group!(benches, criterion_benchmark);\ncriterion_main!(benches);\n"
  },
  {
    "path": "jotoba_bin/benches/resources.rs",
    "content": "use criterion::{black_box, criterion_group, criterion_main, Criterion};\n\n#[global_allocator]\nstatic ALLOC: snmalloc_rs::SnMalloc = snmalloc_rs::SnMalloc;\n\nfn load() {\n    resources::load(\"../resources/storage_data\").unwrap();\n}\n\nfn criterion_benchmark(c: &mut Criterion) {\n    load();\n\n    c.bench_function(\"Get Kanji\", |b| {\n        b.iter(|| {\n            //let  = resources::get().words();\n            resources::get().kanji().by_literal(black_box('跡'));\n        })\n    });\n\n    let tests: Vec<&'static [char]> = vec![&['囗'], &['一'], &['囗', '一'], &['口'], &['口', '一']];\n    c.bench_function(\"Find by radicals\", |b| {\n        b.iter(|| {\n            for i in &tests {\n                api::app::radical::kanji::find_kanji(black_box(i));\n            }\n        })\n    });\n\n    c.bench_function(\"Find by radicals light\", |b| {\n        b.iter(|| {\n            api::app::radical::kanji::find_kanji(black_box(&['首']));\n        })\n    });\n}\n\ncriterion_group!(benches, criterion_benchmark);\ncriterion_main!(benches);\n"
  },
  {
    "path": "jotoba_bin/src/check.rs",
    "content": "use crate::webserver::prepare_data;\nuse config::Config;\nuse ngindex::index_framework::traits::{backend::Backend, storage::IndexStorage};\nuse types::jotoba::language::Language;\n\n/// Checks resources and returns `true` if required features are available\npub fn resources() -> bool {\n    let res = resources::get();\n    if res.check() {\n        return true;\n    }\n\n    log::error!(\n        \"Missing required features: {:?}\",\n        res.missing_but_required()\n    );\n\n    false\n}\n\n/// Checks integrity of all resources. Jotoba (should) work perfectly\n/// if this function does not fail (ignoring all the bugs and ugly code)\npub fn check() {\n    let res = check_all();\n\n    if res {\n        println!(\"Success\");\n    } else {\n        println!(\"Failed\");\n    }\n}\n\nfn check_all() -> bool {\n    println!(\"Loading data\");\n    let config = Config::new(None).expect(\"Config invalid\");\n    prepare_data(&config);\n\n    println!(\"Testing resources\");\n    let res = resources();\n\n    println!(\"Testing indexes\");\n    let ind = indexes();\n\n    res && ind\n}\n\nfn indexes() -> bool {\n    words() && names() && sentences() && regex()\n}\n\nfn sentences() -> bool {\n    let sentence_retrieve = resources::get().sentences();\n\n    let fg_index = indexes::get().sentence().foreign();\n\n    for language in Language::iter_word() {\n        for id in fg_index.storage().iter().map(|i| *i.document()) {\n            if sentence_retrieve.by_id(id).is_none() {\n                println!(\"Sentence index ({language:?}) don't not match\");\n                return false;\n            }\n        }\n    }\n\n    let jp_index = indexes::get().sentence().native();\n    for id in jp_index.storage().iter().map(|i| *i.document()) {\n        if sentence_retrieve.by_id(id).is_none() {\n            println!(\"Sentence index (Japanese) don't not match\");\n            return false;\n        }\n    }\n\n    true\n}\n\nfn names() -> bool {\n    let name_retrieve = resources::get().names();\n\n    let transcr_index = indexes::get().name().foreign();\n    for i in transcr_index.storage().iter().map(|i| *i.item()) {\n        if name_retrieve.by_sequence(i).is_none() {\n            println!(\"Foreign name index does not match resources\");\n            return false;\n        }\n    }\n\n    let jp_index = indexes::get().name().native();\n    for i in jp_index.storage().iter().map(|i| *i.item()) {\n        if name_retrieve.by_sequence(i).is_none() {\n            println!(\"Japanese name index does not match resources\");\n            return false;\n        }\n    }\n\n    true\n}\n\nfn words() -> bool {\n    let word_retrieve = resources::get().words();\n\n    for language in Language::iter_word() {\n        let w_index = indexes::get()\n            .word()\n            .foreign(language)\n            .expect(&format!(\"Missing index {:?}\", language));\n\n        for doc_vec in w_index.storage().iter() {\n            let seq_id = *doc_vec.document();\n            if word_retrieve.by_sequence(seq_id).is_none() {\n                println!(\"Word and Index don't match\");\n                return false;\n            }\n        }\n    }\n\n    let jp_index = indexes::get().word().native();\n    for vec in jp_index.storage().iter() {\n        if word_retrieve.by_sequence(*vec.item()).is_none() {\n            println!(\"Word and (Japanese) Index don't match\");\n            return false;\n        }\n    }\n\n    true\n}\n\nfn regex() -> bool {\n    let w_retrieve = resources::get().words();\n\n    let regex_index = indexes::get().word().regex();\n    for (_, words) in regex_index.iter() {\n        if words.iter().any(|i| w_retrieve.by_sequence(*i).is_none()) {\n            println!(\"Regex index invalid\");\n            return false;\n        }\n    }\n\n    true\n}\n"
  },
  {
    "path": "jotoba_bin/src/cli.rs",
    "content": "use std::process::exit;\n\nuse argparse::{ArgumentParser, Print, StoreTrue};\n\n/// Command line arguments\n#[derive(Default)]\npub struct Options {\n    /// Start the server\n    pub start: bool,\n    pub debug: bool,\n    pub check_resources: bool,\n}\n\n// Parse CLI args\npub fn parse() -> Options {\n    let mut options = Options::default();\n    {\n        let mut ap = ArgumentParser::new();\n        ap.set_description(\"A multilang japanese dictionary\");\n\n        ap.add_option(\n            &[\"-V\", \"--version\"],\n            Print(env!(\"CARGO_PKG_VERSION\").to_string()),\n            \"Show version\",\n        );\n\n        ap.refer(&mut options.start)\n            .add_option(&[\"--start\", \"-s\"], StoreTrue, \"Start the server\");\n\n        ap.refer(&mut options.debug)\n            .add_option(&[\"--debug\", \"-d\"], StoreTrue, \"Run in debug mode\");\n\n        ap.refer(&mut options.check_resources).add_option(\n            &[\"--check\", \"-c\"],\n            StoreTrue,\n            \"Check resources\",\n        );\n\n        ap.parse_args_or_exit();\n    }\n\n    if options.check_resources && options.start {\n        println!(\"Can't use start and check_resources at once\");\n        exit(1);\n    }\n\n    options\n}\n"
  },
  {
    "path": "jotoba_bin/src/main.rs",
    "content": "#![allow(irrefutable_let_patterns)]\n\n// Benchmarks say this is up to 50% faster\n#[global_allocator]\nstatic ALLOC: snmalloc_rs::SnMalloc = snmalloc_rs::SnMalloc;\n\nmod check;\nmod cli;\nmod webserver;\n\n#[actix_web::main]\npub async fn main() {\n    let options = cli::parse();\n\n    // Check resources on --check/-c\n    if options.check_resources {\n        check::check();\n        return;\n    }\n\n    // Start the webserver on --stat/-s\n    if options.start {\n        webserver::start(options).await.expect(\"webserver failed\");\n        return;\n    }\n\n    // User didn't read the docs\n    println!(\"Nothing to do. Use `-s` to start the dictionary\");\n}\n"
  },
  {
    "path": "jotoba_bin/src/webserver.rs",
    "content": "use actix_files::NamedFile;\nuse actix_web_httpauth::{extractors::bearer::BearerAuth, middleware::HttpAuthentication};\nuse error::api_error::RestError;\nuse indexes::storage::suggestions;\nuse localization::TranslationDict;\n\nuse actix_web::{\n    dev::ServiceRequest,\n    http::{\n        header::{ACCESS_CONTROL_ALLOW_HEADERS, ACCESS_CONTROL_ALLOW_ORIGIN, CACHE_CONTROL},\n        StatusCode,\n    },\n    middleware::{self, Compat, Compress},\n    web::{self as actixweb, Data},\n    App, Error, HttpRequest, HttpResponse, HttpResponseBuilder, HttpServer,\n};\nuse config::Config;\nuse log::{debug, warn};\nuse std::{path::Path, sync::Arc, thread, time::Instant};\n\nuse crate::{check, cli::Options};\n\n/// How long frontend assets are going to be cached by the clients. Currently 1 week\nconst ASSET_CACHE_MAX_AGE: u64 = 604800;\n\n/// Start the webserver\npub(super) async fn start(options: Options) -> std::io::Result<()> {\n    if options.debug {\n        println!(\"DEBUG MODE ENABLED\");\n        rayon::ThreadPoolBuilder::new()\n            .num_threads(1)\n            .build_global()\n            .unwrap();\n    }\n\n    setup_logger();\n\n    let start = Instant::now();\n\n    let config = Config::new(None).expect(\"config failed\");\n    if options.debug {\n        println!(\"{config:#?}\");\n    }\n\n    prepare_data(&config);\n\n    let locale_dict_arc = load_translations(&config);\n\n    #[cfg(feature = \"sentry_error\")]\n    setup_sentry(&config);\n\n    let address = config.server.listen_address.clone();\n\n    if !check() {\n        return Ok(());\n    }\n\n    debug!(\"Resource loading took {:?}\", start.elapsed());\n    debug_info();\n\n    HttpServer::new(move || {\n        let app = App::new()\n            // Data\n            .app_data(Data::new(config.clone()))\n            .app_data(Data::new(locale_dict_arc.clone()))\n            // Middlewares\n            .wrap(middleware::Logger::default())\n            .service(\n                actixweb::resource(\"/\")\n                    .wrap(Compat::new(middleware::Compress::default()))\n                    .route(actixweb::get().to(frontend::index::index)),\n            )\n            .service(actixweb::resource(\"/robots.txt\").route(actixweb::get().to(robotstxt)))\n            .service(\n                actixweb::resource(\"/ready\").route(actixweb::get().to(frontend::liveness::ready)),\n            )\n            .service(\n                actixweb::resource(\"/healthy\")\n                    .route(actixweb::get().to(frontend::liveness::healthy)),\n            )\n            .service(\n                actixweb::resource(\"/docs.html\")\n                    .wrap(Compat::new(middleware::Compress::default()))\n                    .route(actixweb::get().to(docs)),\n            )\n            .service(\n                actixweb::resource(\"/sitemap.xml\")\n                    .wrap(Compat::new(middleware::Compress::default()))\n                    .route(actixweb::get().to(sitemap)),\n            )\n            .service(\n                actixweb::resource(\"/privacy\")\n                    .wrap(Compat::new(middleware::Compress::default()))\n                    .route(actixweb::get().to(privacy)),\n            )\n            .service(\n                actixweb::resource(\"/service-worker.js\")\n                    .wrap(Compat::new(middleware::Compress::default()))\n                    .route(actixweb::get().to(service_worker)),\n            )\n            .service(\n                actixweb::resource(\"/search/{query}\")\n                    .wrap(Compat::new(middleware::Compress::default()))\n                    .route(actixweb::get().to(frontend::search_ep::search_ep)),\n            )\n            .service(\n                actixweb::resource(\"/search\")\n                    .wrap(Compat::new(middleware::Compress::default()))\n                    .route(actixweb::get().to(frontend::search_ep::search_ep_no_js)),\n            )\n            .service(\n                actixweb::resource(\"/direct/{type}/{id}\")\n                    .wrap(Compat::new(middleware::Compress::default()))\n                    .route(actixweb::get().to(frontend::direct::direct_ep)),\n            )\n            .service(\n                actixweb::resource(\"/about\")\n                    .wrap(Compat::new(middleware::Compress::default()))\n                    .route(actixweb::get().to(frontend::about::about)),\n            )\n            .service(\n                actixweb::resource(\"/news\")\n                    .wrap(Compat::new(middleware::Compress::default()))\n                    .route(actixweb::get().to(frontend::news_ep::news)),\n            )\n            .service(\n                actixweb::resource(\"/help\")\n                    .wrap(Compat::new(middleware::Compress::default()))\n                    .route(actixweb::get().to(frontend::help_page::help)),\n            )\n            .default_service(actix_web::Route::new().to(frontend::web_error::not_found))\n            // API\n            .service(\n                actixweb::scope(\"/api\")\n                    .wrap(\n                        middleware::DefaultHeaders::new()\n                            .add((ACCESS_CONTROL_ALLOW_ORIGIN, \"*\"))\n                            .add((ACCESS_CONTROL_ALLOW_HEADERS, \"Content-Type\")),\n                    )\n                    .wrap(Compat::new(Compress::default()))\n                    .route(\"/\", actixweb::get().to(docs))\n                    .default_service(actix_web::Route::new().to(docs))\n                    .service(\n                        actixweb::scope(\"app\")\n                            .route(\n                                \"k_comps\",\n                                actixweb::post().to(api::app::search::kanji::reading_compounds),\n                            )\n                            .route(\n                                \"kanji\",\n                                actixweb::post().to(api::app::search::kanji::search),\n                            )\n                            .route(\n                                \"names\",\n                                actixweb::post().to(api::app::search::names::search),\n                            )\n                            .route(\n                                \"sentences\",\n                                actixweb::post().to(api::app::search::sentences::search),\n                            )\n                            .route(\n                                \"words\",\n                                actixweb::post().to(api::app::search::words::search),\n                            )\n                            .service(\n                                actixweb::scope(\"details\")\n                                    .route(\n                                        \"word\",\n                                        actixweb::post().to(api::app::details::word::details),\n                                    )\n                                    .route(\n                                        \"sentence\",\n                                        actixweb::post()\n                                            .to(api::app::details::sentences::details_ep),\n                                    ),\n                            ),\n                    )\n                    .service(\n                        actixweb::scope(\"search\")\n                            .route(\"words\", actixweb::post().to(api::search::word::word_search))\n                            .route(\n                                \"kanji\",\n                                actixweb::post().to(api::search::kanji::kanji_search),\n                            )\n                            .route(\"names\", actixweb::post().to(api::search::name::name_search))\n                            .route(\n                                \"sentences\",\n                                actixweb::post().to(api::search::sentence::sentence_search),\n                            ),\n                    )\n                    .service(\n                        actixweb::scope(\"internal\")\n                            .wrap(HttpAuthentication::bearer(internal_validator))\n                            .service(actixweb::scope(\"info\").route(\n                                \"words\",\n                                actixweb::post().to(api::internal::info::words::word_info),\n                            )),\n                    )\n                    .service(\n                        actixweb::scope(\"kanji\")\n                            .route(\n                                \"by_radical\",\n                                actixweb::post().to(api::app::radical::kanji_by_radicals),\n                            )\n                            .route(\n                                \"decompgraph\",\n                                actixweb::post().to(api::app::kanji::ids_tree::decomp_graph),\n                            ),\n                    )\n                    .route(\n                        \"/radical/search\",\n                        actixweb::post().to(api::app::radical::search::search_radical),\n                    )\n                    .route(\n                        \"/suggestion\",\n                        actixweb::post().to(api::app::completions::suggestion_ep),\n                    )\n                    .route(\n                        \"/os-suggestions\",\n                        actixweb::get().to(api::app::completions::opensearch::suggestion_ep),\n                    )\n                    .route(\"/img_scan\", actixweb::post().to(api::app::img::scan_ep))\n                    .route(\n                        \"/news/short\",\n                        actixweb::post().to(api::app::news::short::news),\n                    )\n                    .route(\n                        \"/news/detailed\",\n                        actixweb::post().to(api::app::news::detailed::news),\n                    ),\n            )\n            // Static files\n            .service(\n                actixweb::scope(\"/audio\")\n                    .wrap(\n                        middleware::DefaultHeaders::new()\n                            .add((CACHE_CONTROL, format!(\"max-age={}\", ASSET_CACHE_MAX_AGE))),\n                    )\n                    .service(\n                        actix_files::Files::new(\"\", config.server.get_audio_files())\n                            .show_files_listing(),\n                    ),\n            )\n            .service(\n                actixweb::scope(\"/assets\")\n                    .wrap(\n                        middleware::DefaultHeaders::new()\n                            .add((CACHE_CONTROL, format!(\"max-age={}\", ASSET_CACHE_MAX_AGE)))\n                            .add((ACCESS_CONTROL_ALLOW_ORIGIN, \"*\"))\n                            .add((ACCESS_CONTROL_ALLOW_HEADERS, \"Content-Type\")),\n                    )\n                    .wrap(Compat::new(Compress::default()))\n                    .service(\n                        actix_files::Files::new(\"\", config.server.get_html_files())\n                            .show_files_listing(),\n                    ),\n            )\n            .service(\n                actixweb::scope(\"/variable_assets/{oma}/assets\")\n                    .wrap(\n                        middleware::DefaultHeaders::new()\n                            .add((CACHE_CONTROL, format!(\"max-age={}\", ASSET_CACHE_MAX_AGE))),\n                    )\n                    .wrap(Compat::new(Compress::default()))\n                    .service(\n                        actix_files::Files::new(\"\", config.server.get_html_files())\n                            .show_files_listing(),\n                    ),\n            );\n\n        //#[cfg(feature = \"sentry_error\")]\n        //let app = app.wrap(sentry_actix::Sentry::new());\n\n        app\n    })\n    .bind(&address)?\n    .run()\n    .await\n}\n\nasync fn service_worker(config: Data<Config>, _req: HttpRequest) -> actix_web::Result<NamedFile> {\n    serve_html_file(config, \"js/tools/service-worker.js\").await\n}\n\nasync fn privacy(config: Data<Config>, _req: HttpRequest) -> actix_web::Result<NamedFile> {\n    serve_html_file(config, \"privacypolicy.html\").await\n}\n\nasync fn sitemap(config: Data<Config>, _req: HttpRequest) -> actix_web::Result<NamedFile> {\n    serve_html_file(config, \"sitemap.xml\").await\n}\n\nasync fn serve_html_file(config: Data<Config>, file: &str) -> actix_web::Result<NamedFile> {\n    let htmlpath = Path::new(config.server.get_html_files());\n    let path = htmlpath.join(file);\n    Ok(NamedFile::open(path)?)\n}\n\nasync fn robotstxt(_req: HttpRequest) -> HttpResponse {\n    HttpResponseBuilder::new(StatusCode::OK).body(\n        r#\"User-Agent: *\nAllow: /\nSitemap: https://jotoba.com/sitemap.xml\"#,\n    )\n}\n\nasync fn docs(config: Data<Config>, _req: HttpRequest) -> actix_web::Result<NamedFile> {\n    let htmlpath = Path::new(config.server.get_html_files());\n    let filepath = Path::new(\"docs.html\");\n    let path = htmlpath.join(filepath);\n    Ok(NamedFile::open(path)?)\n}\n\npub(crate) fn prepare_data(ccf: &Config) {\n    let cf = ccf.clone();\n    thread::spawn(move || {\n        suggestions::load(cf.get_suggestion_sources()).expect(\"Failed to load suggestions\");\n        log::debug!(\"Suggestions loaded\");\n    });\n\n    rayon::scope(move |s| {\n        let cf = ccf.clone();\n        s.spawn(move |_| {\n            log::debug!(\"Loading Resources\");\n            load_resources(&cf.get_storage_data_path());\n        });\n\n        let cf = ccf.clone();\n        s.spawn(move |_| {\n            log::debug!(\"Loading Indexes\");\n            load_indexes(&cf);\n        });\n\n        let cf = ccf.clone();\n        s.spawn(move |_| {\n            log::debug!(\"Loading tokenizer\");\n            load_tokenizer(&cf);\n        });\n\n        let cf = ccf.clone();\n        s.spawn(move |_| clean_img_scan_dir(&cf));\n\n        let cf = ccf.clone();\n        s.spawn(move |_| {\n            log::debug!(\"Loading News\");\n            if let Err(err) = news::News::init(cf.server.get_news_folder()) {\n                warn!(\"Failed to load news: {}\", err);\n            }\n        });\n    });\n}\n\nfn setup_logger() {\n    env_logger::init_from_env(env_logger::Env::new().default_filter_or(\"debug\"));\n}\n\npub fn load_tokenizer(config: &Config) {\n    sentence_reader::load_parser(&config.get_unidic_dict());\n}\n\n/// Clears uploaded images which haven't been cleared yet\nfn clean_img_scan_dir(config: &Config) {\n    let path = config.get_img_scan_upload_path();\n    let path = Path::new(&path);\n    if !path.exists() || !path.is_dir() {\n        return;\n    }\n    std::fs::remove_dir_all(&path).expect(\"Failed to clear img scan director\");\n}\n\nfn debug_info() {\n    log::debug!(\"All features: {:?}\", resources::Feature::all());\n    log::debug!(\"Supported: {:?}\", resources::get().get_features());\n    log::debug!(\"Not supported: {:?}\", resources::get().missing_features());\n}\n\npub fn load_resources(src: &str) {\n    let start = Instant::now();\n    resources::load(src).expect(\"Failed to load resource storage\");\n    debug!(\"Resources took: {:?}\", start.elapsed());\n}\n\nfn load_translations(config: &Config) -> Arc<TranslationDict> {\n    let locale_dict = TranslationDict::new(\n        config.server.get_locale_path(),\n        localization::language::Language::English,\n    )\n    .expect(\"Failed to load localization files\");\n\n    Arc::new(locale_dict)\n}\n\npub fn load_indexes(config: &Config) {\n    indexes::storage::load(config.get_indexes_source()).expect(\"Failed to load index files\");\n}\n\nfn check() -> bool {\n    if !check::resources() {\n        log::error!(\"Not all required data found! Exiting\");\n        return false;\n    }\n\n    if !indexes::get().check() {\n        log::error!(\"Not all indexes are available!\");\n        return false;\n    }\n\n    /*\n    if !indexes::get_suggestions().check() {\n        log::error!(\"Not all suggestion indexes are available!\");\n        //return false;\n    }\n    */\n\n    true\n}\n\n#[cfg(feature = \"sentry_error\")]\nfn setup_sentry(config: &Config) {\n    if let Some(ref sentry_config) = config.sentry {\n        use std::mem::ManuallyDrop;\n\n        // We want to run sentry all the time so don't drop here\n        let _guard = ManuallyDrop::new(sentry::init((\n            sentry_config.dsn.as_str(),\n            sentry::ClientOptions {\n                release: sentry::release_name!(),\n                ..Default::default()\n            },\n        )));\n\n        std::env::set_var(\"RUST_BACKTRACE\", \"1\");\n    }\n}\n\nasync fn internal_validator(\n    req: ServiceRequest,\n    credentials: BearerAuth,\n) -> Result<ServiceRequest, (Error, ServiceRequest)> {\n    let config = req.app_data::<Data<Config>>().unwrap();\n\n    let key = &config.server.internal_api_key;\n    if key.is_empty() || key != credentials.token() {\n        let err: Error = RestError::Unauthorized.into();\n        return Err((err, req));\n    }\n\n    Ok(req)\n}\n"
  },
  {
    "path": "lib/api/Cargo.toml",
    "content": "[package]\nname = \"api\"\nversion = \"0.1.0\"\nauthors = [\"jojii <jojii@gmx.net>\"]\nedition = \"2021\"\n\n[dependencies]\njapanese = { path = \"../japanese\" }\nengine = { path = \"../engine\" }\nnews = { path = \"../news\" }\nerror = { path = \"../error\", features = [\"web_error\"] }\nsentence_reader = { path = \"../sentence_reader\" }\nsearch = { path = \"../search\" }\nutils = { path = \"../utils\" }\nconfig = { path = \"../config\" }\nresources = { path = \"../resources\" }\nindexes = { path = \"../indexes\" }\ntypes = { path = \"../types\", default-features = false }\nactix-web = \"4.3.1\"\nactix-multipart = \"0.6.0\"\nitertools = \"0.11.0\"\nonce_cell = { version = \"1.18.0\", default-features = false }\nserde = \"1.0.171\"\nlog = \"0.4.19\"\nwana_kana = { git = \"https://github.com/WeDontPanic/wana_kana_rust\" }\n#jpeudex = { path = \"../../../jpeudex\"}\njpeudex = { git = \"https://github.com/JojiiOfficial/jpeudex\" }\nbincode = \"1.3.3\"\nregex = { version = \"1.7.1\", features = [\"std\"], default-features = false }\n#autocompletion = { path = \"../../../AutoCompletionFramework\" }\nautocompletion = { git = \"https://github.com/WeDontPanic/AutoCompletionFramework\" }\nintmap = { git = \"https://github.com/JojiiOfficial/rust-intmap\" }\n#priority_container = \"0.1.1\"\npriority_container = { git = \"https://github.com/JojiiOfficial/PrioContainer/\" }\nfutures = { version = \"0.3.28\", optional = true }\nleptess = { version = \"0.14.0\", optional = true }\norder_struct = { git = \"https://github.com/JojiiOfficial/OrderStruct\" }\n#ids_parser = { path = \"../../../ids_parser\" }\nids_parser = { git = \"https://github.com/JojiiOfficial/IDS-Parser\" }\n#index_framework = { path = \"../../../index_framework\" }\nindex_framework = { git = \"https://github.com/WeDontPanic/index_framework\" }\nserde_json = \"1.0.100\"\njp_utils = { git = \"https://github.com/JojiiOfficial/jp_utils\"}\n\n[features]\ndefault = []\nimg_scan = [\"leptess\", \"futures\"]\n"
  },
  {
    "path": "lib/api/src/app/completions/kanji/meaning.rs",
    "content": "use super::super::{convert_results, words::foreign::try_romaji, Response};\nuse autocompletion::suggest::{\n    extension::{ngram::NGramExtension, similar_terms::SimilarTermsExtension},\n    query::SuggestionQuery,\n    task::SuggestionTask,\n};\nuse search::query::Query;\n\n/// Returns kanji meaning suggestions\npub fn suggestions(query: &Query) -> Option<Response> {\n    let index = indexes::get_suggestions().kanji_meanings();\n\n    let mut suggestion_task = SuggestionTask::new(30);\n\n    let mut def_query = SuggestionQuery::new(index, &query.query_str);\n    let mut ng_ext = NGramExtension::new(index);\n    ng_ext.options.weights.freq_weight = 0.5;\n    ng_ext.options.weights.total_weight = 0.7;\n    def_query.add_extension(ng_ext);\n\n    suggestion_task.add_query(def_query);\n\n    if let Some(hira_query) = try_romaji(&query.query_str) {\n        let jp_index = indexes::get_suggestions().jp_words();\n        let mut rom_sug_query = SuggestionQuery::new(jp_index, hira_query);\n        rom_sug_query.weights.total_weight = 0.5;\n\n        let mut similar_terms = SimilarTermsExtension::new(jp_index, 4);\n        similar_terms.options.weights.total_weight = 0.2;\n        rom_sug_query.add_extension(similar_terms);\n\n        suggestion_task.add_query(rom_sug_query);\n    }\n\n    let suggestions = convert_results(suggestion_task.search());\n    Some(Response::new(suggestions))\n}\n"
  },
  {
    "path": "lib/api/src/app/completions/kanji/mod.rs",
    "content": "pub mod meaning;\npub mod reading;\n\nuse search::query::{Query, QueryLang};\nuse types::api::app::completions::Response;\nuse wana_kana::to_romaji::to_romaji;\n\n/// Returns kanji suggestions\npub(crate) fn suggestions(query: Query) -> Option<Response> {\n    match query.q_lang {\n        QueryLang::Foreign => meaning::suggestions(&query),\n        QueryLang::Japanese => japanese_suggestions(&query),\n        /*\n        QueryLang::Korean => todo!(),\n        QueryLang::Undetected => todo!(),\n        */\n        _ => None,\n    }\n}\n\nfn japanese_suggestions(query: &Query) -> Option<Response> {\n    let romaji = to_romaji(query.query_str.as_str());\n    let mut suggestions = super::words::native::suggestions(&query, &romaji, &[])?;\n\n    // romev entries without kanji\n    suggestions.retain(|i| i.secondary.is_some());\n\n    Some(Response {\n        suggestions,\n        ..Default::default()\n    })\n}\n"
  },
  {
    "path": "lib/api/src/app/completions/kanji/reading.rs",
    "content": "use engine::Engine;\nuse index_framework::traits::{\n    backend::Backend, dictionary::IndexDictionary, postings::IndexPostings,\n};\nuse japanese::ToKanaExt;\nuse order_struct::order_nh::OrderVal;\nuse priority_container::PrioContainerMax;\nuse search::engine::words::native::k_reading;\nuse types::{\n    api::app::completions::{Response, SuggestionType, WordPair},\n    jotoba::kanji,\n};\n\n/// Gets suggestions for kanji reading search eg: \"痛 いた.い\"\npub fn suggestions(kanji_reading: kanji::reading::ReadingSearch) -> Option<Response> {\n    let kanji_storage = resources::get().kanji();\n\n    let query_reading = kanji_reading\n        .reading\n        .replace(\"。\", \"\")\n        .replace(\".\", \"\")\n        .to_hiragana();\n\n    let kanji = kanji_storage.by_literal(kanji_reading.literal)?;\n\n    let mut queue = PrioContainerMax::new(30);\n\n    let iter = kanji\n        .kunyomi\n        .iter()\n        .chain(kanji.onyomi.iter())\n        .map(|i| WordPair::with_secondary(i.clone(), kanji.literal.to_string()))\n        .map(|wp| {\n            let score = score(kanji.literal, &wp.primary, &query_reading);\n            OrderVal::new(wp, score)\n        });\n    queue.extend(iter);\n\n    if queue.is_empty() {\n        return None;\n    }\n\n    let mut vec: Vec<_> = queue.into_iter().map(|i| i.0.into_inner()).collect();\n    vec.reverse();\n\n    Some(Response::with_type(vec, SuggestionType::KanjiReading))\n}\n\nfn score(literal: char, reading: &str, query: &str) -> usize {\n    let mut score = 0;\n\n    // Show written prefixes on top\n    if query.len() > 0 && starts_with(reading, query) {\n        score += 1000000;\n    }\n\n    // Show readings with more results first\n    let index = k_reading::Engine::get_index(None);\n    let score_qurey = format!(\"{}{}\", literal, reading);\n    if let Some(term_id) = index.dict().get_id(&score_qurey) {\n        let posting = index.postings(0).unwrap().get_posting(term_id);\n        score += (posting.len() as f32).log(1.01).floor() as usize;\n    }\n\n    score\n}\n\n#[inline]\nfn starts_with(word: &str, reading: &str) -> bool {\n    word.replace(\".\", \"\").to_hiragana().starts_with(reading)\n}\n"
  },
  {
    "path": "lib/api/src/app/completions/mod.rs",
    "content": "mod kanji;\nmod names;\npub mod opensearch;\nmod request;\nmod words;\n\nuse actix_web::web::Json;\nuse jp_utils::JapaneseExt;\nuse search::query::{Form, Query};\nuse types::{\n    api::app::completions::{Request, Response, SuggestionType, WordPair},\n    jotoba::{kanji::reading::ReadingSearch, search::SearchTarget},\n};\nuse words::hashtag;\n\npub async fn suggestion_ep(payload: Json<Request>) -> Result<Json<Response>, actix_web::Error> {\n    Ok(Json(suggestion_ep_inner(payload.into_inner())?))\n}\n\n/// Get search suggestions endpoint\npub(crate) fn suggestion_ep_inner(payload: Request) -> Result<Response, actix_web::Error> {\n    request::validate(&payload)?;\n\n    if payload.hashtag {\n        let suggestions = hashtag::suggestions(&payload.input, payload.search_target);\n        if let Some(res) = suggestions {\n            return Ok(Response::with_type(res, SuggestionType::Hashtag));\n        }\n        return Ok(Response::default());\n    }\n\n    // Adjust payload and parse to query\n    let (query, radicals) = request::get_query(request::adjust(payload))?;\n\n    // Eg. when tags get parsed, the query becomes empty\n    if query.query_str.trim().is_empty() {\n        return Ok(Response::default());\n    }\n\n    Ok(get_suggestions(query, radicals))\n}\n\n/// Returns best matching suggestions for the given query\nfn get_suggestions(query: Query, radicals: Vec<char>) -> Response {\n    let res = match query.target {\n        SearchTarget::Kanji => kanji::suggestions(query),\n        SearchTarget::Names => names::suggestions(query),\n        SearchTarget::Words | SearchTarget::Sentences => {\n            if let Some(kanji_reading) = as_kanji_reading(&query) {\n                kanji::reading::suggestions(kanji_reading)\n            } else {\n                words::suggestions(query, &radicals)\n            }\n        }\n    };\n\n    res.unwrap_or_default()\n}\n\n/// Returns Some(KanjiReading) if query is or 'could be' a kanji reading query.\n/// \"Could be\" means that a kanji-reading search is being types. This the case\n/// if a single kanji and a space is written in the current query\nfn as_kanji_reading(query: &Query) -> Option<ReadingSearch> {\n    match &query.form {\n        Form::KanjiReading(r) => Some(r.clone()),\n        _ => {\n            let mut query_str = query.raw_query.chars();\n            let first = query_str.next()?;\n            let second = query_str.next()?;\n\n            if first.is_kanji() && second == ' ' {\n                Some(ReadingSearch {\n                    reading: String::new(),\n                    literal: first,\n                })\n            } else {\n                None\n            }\n        }\n    }\n}\n\n/// Converts engine output to a set of `WordPair`\n#[inline]\npub(crate) fn convert_results(engine_output: Vec<autocompletion::index::Output>) -> Vec<WordPair> {\n    engine_output\n        .into_iter()\n        .map(|i| WordPair {\n            primary: i.primary,\n            secondary: i.secondary,\n        })\n        .collect()\n}\n"
  },
  {
    "path": "lib/api/src/app/completions/names/mod.rs",
    "content": "use super::{convert_results, Response};\nuse autocompletion::suggest::{\n    extension::ngram::NGramExtension, query::SuggestionQuery, task::SuggestionTask,\n};\nuse japanese::ToKanaExt;\nuse search::query::{Query, QueryLang};\nuse wana_kana::to_katakana::to_katakana;\n\n/// Returns name suggestions\npub(crate) fn suggestions(query: Query) -> Option<Response> {\n    match query.q_lang {\n        QueryLang::Japanese => native_suggestions(&query),\n        QueryLang::Foreign => transcription_suggestions(&query),\n        _ => None,\n    }\n}\n\n/// Returns trascripted name suggestions\npub fn transcription_suggestions(query: &Query) -> Option<Response> {\n    let query_str = &query.query_str;\n    let index = indexes::get_suggestions().names_foreign();\n\n    let mut task = SuggestionTask::new(30);\n\n    let mut def_query = SuggestionQuery::new(index, query_str);\n    let ng_ext = NGramExtension::new(index);\n    def_query.add_extension(ng_ext);\n\n    task.add_query(def_query);\n\n    if let Some(romaji_query) = super::words::foreign::try_romaji(query_str) {\n        let jp_index = indexes::get_suggestions().names_native();\n        task.add_query(SuggestionQuery::new(jp_index, romaji_query.clone()));\n\n        let katakana = to_katakana(romaji_query.as_str());\n        if katakana != romaji_query {\n            task.add_query(SuggestionQuery::new(index, katakana));\n        }\n    }\n\n    let suggestions = convert_results(task.search());\n    Some(Response::new(suggestions))\n}\n\n/// Returns native name suggestions\npub fn native_suggestions(query: &Query) -> Option<Response> {\n    let query_str = &query.query_str;\n\n    let index = indexes::get_suggestions().names_native();\n    let mut task = SuggestionTask::new(30);\n\n    let mut def_query = SuggestionQuery::new(index, query_str);\n    let ng_ext = NGramExtension::new(index);\n    def_query.add_extension(ng_ext);\n\n    task.add_query(def_query);\n\n    let katakana = to_katakana(query_str.as_str());\n    if &katakana != query_str {\n        task.add_query(SuggestionQuery::new(index, katakana));\n    }\n\n    let hiragana = query_str.to_hiragana();\n    if &hiragana != query_str {\n        task.add_query(SuggestionQuery::new(index, hiragana));\n    }\n\n    let suggestions = convert_results(task.search());\n    Some(Response {\n        suggestions,\n        ..Default::default()\n    })\n}\n"
  },
  {
    "path": "lib/api/src/app/completions/opensearch/mod.rs",
    "content": "mod parse;\n\nuse actix_web::web;\nuse serde::Deserialize;\nuse types::{api::app::completions::Request, jotoba::search::SearchTarget};\n\n#[derive(Deserialize)]\npub struct EPQuery {\n    q: String,\n}\n\npub async fn suggestion_ep(query: web::Query<EPQuery>) -> Result<String, actix_web::Error> {\n    let raw_query = query.into_inner().q;\n    let parsed = parse::parse(raw_query.clone());\n\n    let s_target = parsed.search_target().unwrap_or(SearchTarget::Words);\n    let query = make_request(parsed.query.clone(), s_target);\n\n    let suggestions = get_suggestions(query)?;\n\n    Ok(gen_output(suggestions, raw_query))\n}\n\nfn get_suggestions(query: Request) -> Result<Vec<String>, actix_web::Error> {\n    let s_target = query.search_target;\n    let res = super::suggestion_ep_inner(query)?\n        .suggestions\n        .iter()\n        .map(|i| {\n            let mut s = i.secondary_preferred().to_string();\n            if s_target != SearchTarget::Words {\n                s.push_str(&format!(\" #{s_target:?}\"));\n            }\n            s\n        })\n        .collect::<Vec<_>>();\n    Ok(res)\n}\n\nfn gen_output(suggestions: Vec<String>, raw_query: String) -> String {\n    let mut data =\n        serde_json::to_string(&[suggestions, vec![], vec![]]).unwrap_or_else(|_| \"\".to_string());\n    if data.len() > 2 {\n        data = data[1..(data.len() - 1)].to_string();\n    }\n    format!(\"[\\\"{raw_query}\\\",{data}]\")\n}\n\nfn make_request(inp: String, search_target: SearchTarget) -> Request {\n    Request {\n        input: inp,\n        lang: \"en\".to_string(),\n        search_target,\n        radicals: vec![],\n        hashtag: false,\n    }\n}\n"
  },
  {
    "path": "lib/api/src/app/completions/opensearch/parse.rs",
    "content": "use search::query::{parser::QueryParser, Tag, UserSettings};\nuse types::jotoba::search::SearchTarget;\n\npub(crate) fn parse(inp: String) -> Parsed {\n    let query = QueryParser::new(inp.clone(), SearchTarget::Words, UserSettings::default()).parse();\n\n    if query.is_none() {\n        return Parsed::new(inp.to_string());\n    }\n\n    let query = query.unwrap();\n    let tags = query.tags;\n    Parsed::with_tags(query.query_str, tags)\n}\n\npub(crate) struct Parsed {\n    pub query: String,\n    pub tags: Vec<Tag>,\n}\n\nimpl Parsed {\n    #[inline]\n    fn new(query: String) -> Self {\n        Self {\n            query,\n            tags: vec![],\n        }\n    }\n\n    #[inline]\n    fn with_tags(query: String, tags: Vec<Tag>) -> Self {\n        Self { query, tags }\n    }\n\n    #[inline]\n    pub fn search_target(&self) -> Option<SearchTarget> {\n        self.tags\n            .iter()\n            .find(|i| i.is_search_type())\n            .map(|i| i.as_search_type().unwrap())\n            .copied()\n    }\n}\n"
  },
  {
    "path": "lib/api/src/app/completions/request.rs",
    "content": "use std::str::FromStr;\n\nuse error::api_error::RestError;\nuse jp_utils::JapaneseExt;\nuse search::query::{self, parser::QueryParser, Query, QueryLang, UserSettings};\nuse types::{api::app::completions::Request, jotoba::language::Language};\nuse utils::real_string_len;\n\n/// Adjust the query and returns a newly allocated one\npub(crate) fn adjust(request: Request) -> Request {\n    let mut query_str = request.input.to_string();\n    let query_len = real_string_len(&request.input);\n\n    // Some inputs place the roman letter of the japanese text while typing with romanized input.\n    // If input is japanese but last character is a romanized letter, strip it off\n\n    let lang = query::parser::lang::parse(&query_str);\n\n    if lang == QueryLang::Japanese && query_str.ends_with(\"ｎ\") {\n        query_str = query_str.replace(\"ｎ\", \"ん\");\n    }\n\n    let last_chars = query_str.chars().rev().take(2).collect::<Vec<_>>();\n    if lang == QueryLang::Japanese\n        && !last_chars.iter().any(|i| !i.is_japanese())\n        && query_len > 1\n        && !last_chars.is_empty()\n    {\n        let len: usize = last_chars\n            .into_iter()\n            .filter(|i| i.is_roman_letter())\n            .map(|i| i.len_utf8())\n            .sum();\n        query_str = query_str[..query_str.len() - len].to_string();\n    }\n\n    Request {\n        input: query_str.to_owned(),\n        ..request\n    }\n}\n\n/// Returns a `Query` based on the `Request`\npub(crate) fn get_query(request: Request) -> Result<(Query, Vec<char>), RestError> {\n    let query_str = request.input.trim_start().to_string();\n\n    let search_type = request.search_target;\n\n    let settings = UserSettings {\n        user_lang: get_language(&request),\n        ..UserSettings::default()\n    };\n\n    // Build and parse the query\n    let query = QueryParser::new(query_str, search_type, settings)\n        .parse()\n        .ok_or(RestError::BadRequest)?;\n\n    Ok((query, request.radicals))\n}\n\n/// Returns the user configured language of the [`Request`]\n#[inline]\npub(crate) fn get_language(request: &Request) -> Language {\n    Language::from_str(&request.lang).unwrap_or_default()\n}\n\n/// Validates the API request payload\npub(crate) fn validate(payload: &Request) -> Result<(), RestError> {\n    let query_len = real_string_len(&payload.input.trim());\n    if (query_len < 1 && !payload.hashtag) || query_len > 37 {\n        return Err(RestError::BadRequest.into());\n    }\n    Ok(())\n}\n"
  },
  {
    "path": "lib/api/src/app/completions/words/foreign.rs",
    "content": "use autocompletion::suggest::{\n    extension::{\n        kanji_align::KanjiAlignExtension, ngram::NGramExtension,\n        similar_terms::SimilarTermsExtension,\n    },\n    query::SuggestionQuery,\n    task::SuggestionTask,\n};\nuse japanese::{\n    guessing::{could_be_romaji, is_romaji_repl},\n    to_hira_fmt,\n};\nuse types::jotoba::language::Language;\nuse utils::real_string_len;\n\nuse super::super::*;\n\n/// Returns suggestions based on non japanese input\npub fn suggestions(query: &Query, query_str: &str) -> Option<Vec<WordPair>> {\n    let query_lower = autocompletion::index::basic::basic_format(query_str.trim());\n    let mut task = SuggestionTask::new(30);\n\n    let lang = query.settings.language();\n\n    // Default search query\n    task.add_query(new_suggestion_query(&query_lower, lang)?);\n\n    // Add results for english\n    if query.settings.show_english() {\n        let mut en_sugg_query = new_suggestion_query(&query_lower, Language::English)?;\n        en_sugg_query.weights.total_weight = 0.75;\n        en_sugg_query.weights.freq_weight = 0.15;\n        task.add_query(en_sugg_query);\n    }\n\n    // Romaji result\n    //if let Some(hira_query) = try_romaji(query_str.trim()) {\n    let hira_query =\n        try_romaji(query_str.trim()).unwrap_or_else(|| japanese::to_hira_fmt(query_str));\n    //let hira_query = query_str.to_hiragana();\n    println!(\"hira query: {hira_query}\");\n    let jp_engine = indexes::get_suggestions().jp_words();\n    let mut rom_query = SuggestionQuery::new(jp_engine, hira_query.clone());\n    if could_be_romaji(query_str) {\n        rom_query.weights.total_weight = 0.99;\n    } else {\n        rom_query.weights.total_weight = 0.5;\n    }\n    /*\n    query.weights.freq_weight = 0.1;\n    query.weights.str_weight = 1.9;\n    */\n\n    let mut k_r_align = KanjiAlignExtension::new(jp_engine);\n    k_r_align.options.weights.freq_weight = 1.0;\n    k_r_align.options.threshold = 5;\n    rom_query.add_extension(k_r_align);\n\n    let mut similar_terms = SimilarTermsExtension::new(jp_engine, 14);\n    similar_terms.options.threshold = 10;\n    similar_terms.options.weights.total_weight = 0.75;\n    similar_terms.options.weights.freq_weight = 0.2;\n    similar_terms.options.weights.str_weight = 1.8;\n    similar_terms.options.min_query_len = 4;\n    rom_query.add_extension(similar_terms);\n\n    let mut ng_ext = NGramExtension::with_sim_threshold(jp_engine, 0.4);\n    ng_ext.options.threshold = 5;\n    ng_ext.options.weights.total_weight = 0.25;\n    ng_ext.options.weights.freq_weight = 0.02;\n    ng_ext.query_weigth = 0.15;\n    ng_ext.options.limit = 100;\n    ng_ext.query_weigth = 0.05;\n    ng_ext.options.min_query_len = 5;\n    ng_ext.cust_query = Some(hira_query.clone());\n    rom_query.add_extension(ng_ext);\n\n    task.set_rel_mod(|i, rel| {\n        let out = i.to_output();\n        let kana = &out.primary;\n        if japanese::romaji_prefix(query_str.trim(), &kana) {\n            return rel + 1000;\n        }\n        rel\n    });\n\n    task.add_query(rom_query);\n    //}\n\n    Some(convert_results(task.search()))\n}\n\nfn new_suggestion_query(query: &str, lang: Language) -> Option<SuggestionQuery> {\n    let engine = indexes::get_suggestions().foreign_words(lang)?;\n\n    let mut suggestion_query = SuggestionQuery::new(engine, &query);\n    suggestion_query.weights.str_weight = 1.5;\n    suggestion_query.weights.freq_weight = 0.5;\n\n    let mut ng_ex = NGramExtension::with_sim_threshold(engine, 0.5);\n    ng_ex.options.weights.total_weight = 0.7;\n    ng_ex.options.weights.freq_weight = 0.05;\n    ng_ex.query_weigth = 0.05;\n    ng_ex.options.min_query_len = 5;\n    ng_ex.options.limit = 100;\n    ng_ex.options.threshold = 5;\n    suggestion_query.add_extension(ng_ex);\n\n    Some(suggestion_query)\n}\n\n/// Returns Some(String) if `query_str` could be (part of) romaji search input and None if not\npub(crate) fn try_romaji(query_str: &str) -> Option<String> {\n    let mut query_str = query_str.replace(\"-\", \"ー\");\n    if query_str.ends_with(\"m\") {\n        query_str.pop();\n    }\n    let query_str = &query_str;\n\n    let str_len = real_string_len(query_str);\n    if str_len < 3 || query_str.contains(' ') {\n        return None;\n    }\n\n    if let Some(v) = is_romaji_repl(query_str) {\n        return Some(to_hira_fmt(&v));\n    }\n\n    if str_len < 3 {\n        return None;\n    }\n\n    // 'n' is the only hiragana with with=1 in romaji so allow them\n    // to be treated properly too\n    let min_len = 3usize.saturating_sub(query_str.chars().filter(|i| *i == 'n').count());\n\n    // Strip one to avoid switching between romaji/normal results\n    if str_len > min_len {\n        let prefix = strip_str_end(query_str, 1);\n        if let Some(v) = is_romaji_repl(prefix) {\n            return Some(to_hira_fmt(&v));\n        }\n    }\n\n    // shi ending needs more stripping but also more existing romaji to not\n    // heavily overlap with other results\n    if str_len >= min_len + 2 && end_three_char_kana(query_str) {\n        let prefix = strip_str_end(query_str, 2);\n        if let Some(v) = is_romaji_repl(prefix) {\n            return Some(to_hira_fmt(&v));\n        }\n    }\n\n    None\n}\n\n/// Returns a substring of `inp` with `len` amount of tailing characters being removed.\n/// This works for non UTF-8 as well. If len > |inp| \"\" gets returned\n#[inline]\npub fn strip_str_end(inp: &str, len: usize) -> &str {\n    match inp.char_indices().rev().nth(len - 1).map(|i| i.0) {\n        Some(end) => &inp[..end],\n        None => \"\",\n    }\n}\n\n/// Returns `true` if `s` ends with 2 of 3 3-char kana romaji\n#[inline]\nfn end_three_char_kana(s: &str) -> bool {\n    [\n        \"sh\", \"ch\", \"ts\", \"hy\", \"ky\", \"ny\", \"my\", \"gy\", \"ry\", \"by\", \"py\",\n    ]\n    .iter()\n    .any(|i| s.ends_with(i))\n}\n\n#[cfg(test)]\nmod test {\n    use super::*;\n\n    #[test]\n    fn test_strip_end() {\n        let inp = \"これはかっこいいテキスト\";\n        assert_eq!(strip_str_end(inp, 1), \"これはかっこいいテキス\");\n        assert_eq!(strip_str_end(inp, 2), \"これはかっこいいテキ\");\n        assert_eq!(strip_str_end(inp, 3), \"これはかっこいいテ\");\n    }\n}\n"
  },
  {
    "path": "lib/api/src/app/completions/words/hashtag.rs",
    "content": "use index_framework::traits::{backend::Backend, storage::IndexStorage};\nuse std::ops::Deref;\nuse types::{api::app::completions::WordPair, jotoba::search::SearchTarget};\n\npub fn suggestions(query: &str, search_target: SearchTarget) -> Option<Vec<WordPair>> {\n    if query.trim().is_empty() {\n        return Some(empty(search_target));\n    }\n\n    let index = indexes::get_suggestions().hashtags();\n    let res = index.ngram_search(query, &[search_target]);\n    let max = res.first()?.1;\n\n    let out: Vec<_> = res\n        .into_iter()\n        .filter(|i| i.1 >= max - 0.4)\n        .map(|i| WordPair::new(i.0.tag.clone()))\n        .collect();\n\n    Some(out)\n}\n\nfn empty(search_target: SearchTarget) -> Vec<WordPair> {\n    let start = std::time::Instant::now();\n    let index = &indexes::get_suggestions().hashtags();\n    let ngindex = index.index.deref();\n\n    let mut out: Vec<_> = ngindex\n        .storage()\n        .iter()\n        .map(|i| index.get(i.into_item() as usize).unwrap())\n        .filter(|i| i.s_targets.contains(&search_target))\n        .collect();\n\n    out.sort_by(|a, b| a.freq.total_cmp(&b.freq).reverse());\n\n    let res = out\n        .into_iter()\n        .take(10)\n        .map(|i| WordPair::new(i.tag.clone()))\n        .collect();\n    println!(\"took: {:?}\", start.elapsed());\n    res\n}\n"
  },
  {
    "path": "lib/api/src/app/completions/words/kana_end_ext.rs",
    "content": "use autocompletion::{\n    index::{\n        japanese::{Item, JapaneseIndex},\n        IndexItem,\n    },\n    relevance::{item::EngineItem, RelevanceCalc},\n    suggest::{\n        extension::{Extension, ExtensionOptions},\n        query::SuggestionQuery,\n    },\n};\nuse jp_utils::JapaneseExt;\nuse priority_container::PrioContainerMax;\n\n#[derive(Clone, Copy)]\npub struct KanaEndExtension<'a> {\n    pub options: ExtensionOptions,\n    index: &'a JapaneseIndex,\n    max_dist: u32,\n}\n\nimpl<'a> KanaEndExtension<'a> {\n    /// Create a new Longest-Prefix Extension\n    pub fn new(index: &'a JapaneseIndex, max_dist: u32) -> Self {\n        let mut options = ExtensionOptions::default();\n        options.weights.freq_weight = 0.01;\n        Self {\n            options,\n            index,\n            max_dist,\n        }\n    }\n}\n\nimpl<'a> Extension<'a> for KanaEndExtension<'a> {\n    #[inline]\n    fn run(&self, query: &SuggestionQuery, rel_weight: f64) -> Vec<EngineItem<'a>> {\n        let query_str = &query.query_str;\n\n        let first_char = &query.query_str.chars().nth(0).unwrap();\n        let last_char = &query.query_str.chars().last().unwrap();\n        if !first_char.is_kanji() || !last_char.is_kana() {\n            return vec![];\n        }\n\n        let mut parts: Vec<_> = jp_utils::tokenize::by_alphabet(&query_str, true)\n            .filter(|i| !i.trim().is_empty())\n            .collect();\n        if parts.len() != 2 {\n            return vec![];\n        }\n\n        let kanji_part = parts.remove(0);\n        let kana_part = parts.remove(0);\n        let kana_hash = jpeudex::Hash::new(kana_part);\n\n        let rel_weight = rel_weight * self.options.weights.total_weight;\n        let mut out = PrioContainerMax::new(self.options.limit);\n\n        let rel_calc = RelevanceCalc::new(self.options.weights).with_total_weight(rel_weight);\n\n        let items = self.index.trie.iter_prefix_str(kanji_part);\n        for j in items.map(|i| i.1).flatten() {\n            let word = self.index.get_item(*j);\n            if word.kanji.is_none() {\n                continue;\n            }\n\n            let similarity = match word_similarity(word, kanji_part, kana_part, &kana_hash) {\n                Some(s) => s,\n                None => continue,\n            };\n            if similarity > self.max_dist {\n                continue;\n            }\n\n            let mut item = word.into_engine_item();\n            let str_rel = item.inner().str_relevance(&query.query_str);\n            let rel = rel_calc.calc(&item, str_rel);\n            item.set_relevance(rel);\n            out.insert(item);\n        }\n\n        let out = out.into_iter().map(|i| i.0).collect::<Vec<_>>();\n        let rel_calc = RelevanceCalc::new(self.options.weights).with_total_weight(rel_weight);\n        query.order_items(out, rel_calc)\n    }\n\n    #[inline]\n    fn should_run(&self, already_found: usize, _query: &SuggestionQuery) -> bool {\n        self.options.enabled && already_found < self.options.threshold\n    }\n\n    #[inline]\n    fn get_options(&self) -> &ExtensionOptions {\n        &self.options\n    }\n}\n\n#[inline]\nfn word_similarity(\n    item: &Item,\n    kanji: &str,\n    kana: &str,\n    kana_hash: &Option<jpeudex::Hash>,\n) -> Option<u32> {\n    let item_kanji = item.kanji.as_ref().unwrap();\n    if item.kana.ends_with(kana) && item_kanji.starts_with(kanji) {\n        return Some(0);\n    }\n\n    if let Some(found_sub) = find_kana_str(&item.kana, kana) {\n        let item_part = &item.kana[found_sub..];\n        let l = item_part.chars().count();\n        let kana_len = kana.chars().count();\n        return Some((l - kana_len) as u32 * 2);\n    }\n\n    if let Some(kana_hash) = &kana_hash {\n        let item_kana_hash = jpeudex::Hash::new(&item.kana)?;\n        let dist = (item_kana_hash - *kana_hash).dist();\n        return Some(dist);\n    }\n\n    None\n}\n\n/// Requires `full_kana` to be longer than `end_sub`\nfn find_kana_str(full_kana: &str, end_sub: &str) -> Option<usize> {\n    full_kana.match_indices(end_sub).last().map(|i| i.0)\n}\n"
  },
  {
    "path": "lib/api/src/app/completions/words/mod.rs",
    "content": "pub mod foreign;\npub mod hashtag;\npub mod kana_end_ext;\npub mod native;\n\nuse std::{cmp::Ordering, time::Instant};\n\nuse jp_utils::JapaneseExt;\nuse search::query::{Query, QueryLang};\nuse types::api::app::completions::{Response, WordPair};\nuse utils::bool_ord;\nuse wana_kana::{to_katakana::to_katakana, to_romaji::to_romaji};\n\n/// Returns word suggestions based on the query. Applies various approaches to give better results\npub(crate) fn suggestions(query: Query, radicals: &[char]) -> Option<Response> {\n    let response = try_word_suggestions(&query, radicals)?;\n\n    // Tries to do a katakana search if nothing was found\n    let result = if response.is_empty() && query.query_str.is_hiragana() {\n        try_word_suggestions(&get_katakana_query(&query), radicals)?\n    } else {\n        response\n    };\n\n    Some(Response::new(result))\n}\n\n/// Returns Ok(suggestions) for the given query ordered and ready to display\nfn try_word_suggestions(query: &Query, radicals: &[char]) -> Option<Vec<WordPair>> {\n    let start = Instant::now();\n    // Get sugesstions for matching language\n\n    let romaji_query = to_romaji(query.query_str.as_str());\n\n    let word_pairs = match query.q_lang {\n        QueryLang::Japanese => native::suggestions(&query, &romaji_query, radicals)?,\n        QueryLang::Foreign | QueryLang::Undetected | QueryLang::Korean => {\n            let mut res = foreign::suggestions(&query, &query.query_str).unwrap_or_default();\n\n            // Order: put exact matches to top\n            res.sort_by(|a, b| word_pair_order(a, b, &query.query_str));\n            res\n        }\n    };\n    log::debug!(\"Suggestions took: {:?}\", start.elapsed());\n\n    Some(word_pairs)\n}\n\n/// Ordering for [`WordPair`]s which puts the exact matches to top\n#[inline]\nfn word_pair_order(a: &WordPair, b: &WordPair, query: &str) -> Ordering {\n    bool_ord(a.has_reading(&query), b.has_reading(&query))\n}\n\n/// Returns an equivalent katakana query\nfn get_katakana_query(query: &Query) -> Query {\n    Query {\n        query_str: to_katakana(query.query_str.as_str()),\n        ..query.clone()\n    }\n}\n"
  },
  {
    "path": "lib/api/src/app/completions/words/native.rs",
    "content": "use super::{super::*, kana_end_ext::KanaEndExtension};\nuse autocompletion::{\n    index::{str_item::StringItem, IndexItem},\n    suggest::{\n        extension::{\n            kanji_align::KanjiAlignExtension, ngram::NGramExtension,\n            similar_terms::SimilarTermsExtension,\n        },\n        query::SuggestionQuery,\n        task::SuggestionTask,\n    },\n};\nuse wana_kana::ConvertJapanese;\n\nconst MAX_SENTENCE_LEN: usize = 15;\n\n/// Get suggestions for foreign search input\npub fn suggestions(query: &Query, _romaji_query: &str, radicals: &[char]) -> Option<Vec<WordPair>> {\n    let jp_engine = indexes::get_suggestions().jp_words();\n    let query_str = query.query_str.as_str();\n\n    let mut suggestion_task = SuggestionTask::new(30);\n\n    let mut main_sugg_query = SuggestionQuery::new(jp_engine, query_str);\n    main_sugg_query.weights.str_weight = 1.2;\n\n    // Kanji reading align (くにうた ー＞ 国歌)\n    let mut k_r_align = KanjiAlignExtension::new(jp_engine);\n    k_r_align.options.weights.freq_weight = 1.0;\n    k_r_align.options.threshold = 5;\n    main_sugg_query.add_extension(k_r_align);\n\n    // Find 天気予報 even if 天気よほう was written\n    let mut kana_end_ext = KanaEndExtension::new(jp_engine, 10);\n    kana_end_ext.options.weights.total_weight = 0.45;\n    kana_end_ext.options.weights.freq_weight = 0.4;\n    main_sugg_query.add_extension(kana_end_ext);\n\n    let (norm_form, sentence) = normalize_inflections(query_str);\n    if let Some(normalized) = norm_form {\n        let mut norm_query = SuggestionQuery::new(jp_engine, normalized);\n        norm_query.threshold = 10;\n        norm_query.weights.total_weight = 0.75;\n        norm_query.weights.freq_weight = 0.5;\n        suggestion_task.add_query(norm_query);\n    }\n\n    // Fix typos\n    let mut ng_ex = NGramExtension::with_sim_threshold(jp_engine, 0.5);\n    ng_ex.options.weights.freq_weight = 0.05;\n    ng_ex.query_weigth = 0.7;\n    //ng_ex.cust_query = Some(&romaji_query);\n    ng_ex.cust_query = Some(query_str.to_owned());\n    main_sugg_query.add_extension(ng_ex);\n\n    // Similar terms based on pronounciation\n    let mut ste = SimilarTermsExtension::new(jp_engine, 16);\n    ste.options.threshold = 10;\n    ste.options.weights.total_weight = 0.45;\n    ste.options.weights.freq_weight = 0.05;\n    //ste.options.weights.str_weight = 1.4;\n    main_sugg_query.add_extension(ste);\n\n    suggestion_task.add_query(main_sugg_query);\n\n    // Add katakana results\n    if query_str.has_kana() {\n        let kanaquery = query_str.to_katakana();\n        if kanaquery != query_str {\n            let mut kana_query = SuggestionQuery::new(jp_engine, kanaquery);\n            kana_query.weights.total_weight = 0.8;\n            suggestion_task.add_query(kana_query);\n        }\n    }\n\n    let sentence_len = sentence.len();\n    let items: Vec<_> = sentence\n        .into_iter()\n        .filter(|i| !i.is_empty())\n        .map(|w| StringItem::new(w, 0.0))\n        .collect();\n    let items: Vec<_> = items\n        .iter()\n        .enumerate()\n        .map(|(pos, i)| {\n            let mut engine_item = i.into_engine_item();\n            engine_item.set_relevance((sentence_len - pos) as u16);\n            engine_item\n        })\n        .collect();\n    if sentence_len > 0 && sentence_len <= MAX_SENTENCE_LEN {\n        suggestion_task.add_custom_entries(items);\n    }\n\n    // radical filter\n    let word_res = resources::get().words();\n    suggestion_task.set_filter(move |item| {\n        if radicals.is_empty() {\n            return true;\n        }\n\n        let word = match word_res.by_sequence(item.word_id()) {\n            Some(word) => word,\n            None => return true,\n        };\n        word_rad_filter(query_str, word, radicals)\n    });\n\n    Some(convert_results(suggestion_task.search()))\n}\n\npub(crate) fn normalize_inflections(query_str: &str) -> (Option<String>, Vec<String>) {\n    let parse_res = sentence_reader::Parser::new(query_str).parse();\n\n    if let sentence_reader::output::ParseResult::InflectedWord(word) = parse_res {\n        return (Some(word.get_normalized()), vec![]);\n    }\n\n    if let sentence_reader::output::ParseResult::Sentence(sentence) = parse_res {\n        let items: Vec<_> = sentence\n            .iter()\n            .filter_map(|i| {\n                let wc = i.word_class_raw();\n                if wc.is_space() || wc.is_symbol() || wc.is_particle() {\n                    return None;\n                }\n                Some(i.get_normalized())\n            })\n            .collect();\n        return (None, items);\n    }\n\n    (None, vec![])\n}\n\nfn word_rad_filter(query: &str, word: &types::jotoba::words::Word, radicals: &[char]) -> bool {\n    let kanji = match word.reading.kanji.as_ref() {\n        Some(k) => &k.reading,\n        None => return false,\n    };\n\n    let retrieve = resources::get().kanji();\n\n    let query_kanji = query.chars().filter(|i| i.is_kanji()).collect::<Vec<_>>();\n\n    kanji\n        .chars()\n        // Don't apply on existing kanji\n        .filter(|i| !query_kanji.contains(&i))\n        .filter_map(|k| k.is_kanji().then(|| retrieve.by_literal(k)).flatten())\n        .any(|k| {\n            if !k.parts.is_empty() {\n                return utils::part_of(radicals, &k.parts);\n            }\n            false\n        })\n}\n"
  },
  {
    "path": "lib/api/src/app/details/mod.rs",
    "content": "pub mod sentences;\npub mod word;\n"
  },
  {
    "path": "lib/api/src/app/details/sentences.rs",
    "content": "use crate::app::{search::sentences::convert_sentence, Result};\nuse actix_web::web::{Data, Json};\nuse config::Config;\nuse engine::task::SearchTask;\nuse error::api_error::RestError;\nuse jp_utils::JapaneseExt;\nuse search::{engine::words::native::Engine, word::order::native::NativeOrder};\nuse sentence_reader::output::ParseResult;\nuse types::{\n    api::app::{\n        details::{query::DetailsPayload, sentence},\n        search::responses::{kanji::Kanji, words::Word},\n    },\n    jotoba::{sentences::Sentence, words::filter_languages},\n};\n\npub async fn details_ep(\n    payload: Json<DetailsPayload>,\n    config: Data<Config>,\n) -> Result<Json<sentence::Details>> {\n    Ok(Json(\n        sentence_details(&payload, &config).ok_or(RestError::NotFound)?,\n    ))\n}\n\nfn sentence_details(payload: &DetailsPayload, config: &Config) -> Option<sentence::Details> {\n    let sentence = resources::get().sentences().by_id(payload.sequence)?;\n\n    let kanji = get_kanji(sentence);\n\n    let words = get_words(sentence, payload, config);\n\n    let sentence =\n        search::sentence::result::Sentence::from_m_sentence(sentence, payload.lang_param())?;\n\n    let sentence = convert_sentence(sentence);\n    Some(sentence::Details::new(sentence, words, kanji))\n}\n\nfn get_kanji(sentence: &Sentence) -> Vec<Kanji> {\n    let kanji_iter = sentence.japanese.chars().filter(|i| i.is_kanji());\n\n    let mut out: Vec<Kanji> = vec![];\n\n    for k_lit in kanji_iter {\n        if let Some(kanji) = resources::get().kanji().by_literal(k_lit) {\n            out.push(kanji.to_owned().into());\n        }\n    }\n\n    out\n}\n\nfn get_words(sentence: &Sentence, payload: &DetailsPayload, config: &Config) -> Vec<Word> {\n    let parsed = sentence_reader::Parser::new(&sentence.japanese).parse();\n\n    match parsed {\n        ParseResult::Sentence(s) => s\n            .iter()\n            .map(|i| i.get_normalized())\n            .filter_map(|i| find_word(&i, payload, config))\n            .collect::<Vec<_>>(),\n        ParseResult::InflectedWord(i) => find_word(&i.get_normalized(), payload, config)\n            .map(|i| vec![i])\n            .unwrap_or_default(),\n        ParseResult::None => vec![],\n    }\n}\n\nfn find_word(w: &str, payload: &DetailsPayload, config: &Config) -> Option<Word> {\n    let mut task = SearchTask::<Engine>::new(w)\n        .with_limit(4)\n        .with_threshold(0.8)\n        .with_custom_order(NativeOrder::new(w.to_string()));\n\n    let res = task.find();\n    if res.len() == 0 {\n        return None;\n    }\n\n    let mut word = vec![res.into_inner().remove(0).item.clone()];\n    filter_languages(word.iter_mut(), payload.lang_param());\n    let word = super::super::conv_word(word.remove(0), payload.language, config);\n\n    Some(word)\n}\n"
  },
  {
    "path": "lib/api/src/app/details/word.rs",
    "content": "use crate::app::Result;\nuse actix_web::web::{Data, Json};\nuse config::Config;\nuse error::api_error::RestError;\nuse jp_utils::JapaneseExt;\nuse types::{\n    api::app::{\n        details::{\n            query::DetailsPayload,\n            word::{self, TransitivityPair},\n        },\n        search::responses::{kanji::Kanji, words::Word},\n    },\n    jotoba::language::Language,\n};\n\npub async fn details(\n    payload: Json<DetailsPayload>,\n    config: Data<Config>,\n) -> Result<Json<word::Details>> {\n    Ok(Json(\n        Details::new(&payload)\n            .ok_or(RestError::NotFound)?\n            .get_details(&config),\n    ))\n}\n\npub(crate) struct Details<'a> {\n    payload: &'a DetailsPayload,\n    word: &'static types::jotoba::words::Word,\n}\n\nimpl<'a> Details<'a> {\n    #[inline]\n    fn new(payload: &'a DetailsPayload) -> Option<Self> {\n        let word = resources::get().words().by_sequence(payload.sequence)?;\n        Some(Details { payload, word })\n    }\n\n    fn get_details(&self, config: &Config) -> word::Details {\n        let kanji = self.get_kanji();\n        let has_sentence = self.has_sentence();\n        let transitivity_pair = self.transitivity_pair();\n        let collocations = self.get_collocations(config);\n        let inflection_table = self.word.get_inflections();\n\n        let word = self.get_word(config);\n\n        word::Details::new(\n            word,\n            kanji,\n            inflection_table,\n            collocations,\n            has_sentence,\n            transitivity_pair,\n        )\n    }\n\n    fn get_kanji(&self) -> Vec<Kanji> {\n        let retrieve = resources::get().kanji();\n\n        self.word\n            .get_reading()\n            .reading\n            .chars()\n            .filter_map(|i| i.is_kanji().then(|| i).and_then(|k| retrieve.by_literal(k)))\n            .map(|i| (*i).clone().into())\n            .collect::<Vec<_>>()\n    }\n\n    #[inline]\n    fn has_sentence(&self) -> bool {\n        self.word.has_sentence(self.payload.language)\n            || (self.payload.show_english && self.word.has_sentence(Language::English))\n    }\n\n    fn transitivity_pair(&self) -> Option<TransitivityPair> {\n        if let Some(trans) = self.word.transive_version {\n            return Some(TransitivityPair::Transitive(trans.get()));\n        }\n\n        if let Some(intrans) = self.word.intransive_version {\n            return Some(TransitivityPair::Intransitive(intrans.get()));\n        }\n\n        None\n    }\n\n    fn get_collocations(&self, config: &Config) -> Vec<Word> {\n        let collocations = match &self.word.collocations {\n            Some(colloc) => colloc,\n            None => return vec![],\n        };\n        let retrieve = resources::get().words();\n\n        collocations\n            .iter()\n            .filter_map(|i| {\n                let word = retrieve.by_sequence(*i)?;\n                Some(self.format_word(word, config))\n            })\n            .collect()\n    }\n\n    #[inline]\n    fn get_word(&self, config: &Config) -> Word {\n        self.format_word(self.word, config)\n    }\n\n    #[inline]\n    fn format_word(&self, word: &types::jotoba::words::Word, config: &Config) -> Word {\n        let mut word = word.clone();\n        word.adjust_language(self.payload.lang_param());\n        crate::app::conv_word(word, self.payload.language, config)\n    }\n}\n"
  },
  {
    "path": "lib/api/src/app/img/mod.rs",
    "content": "#![allow(unused)]\n#[cfg(feature = \"img_scan\")]\npub mod request;\n\nuse actix_multipart::Multipart;\nuse actix_web::web::{self, Json};\nuse config::Config;\nuse error::api_error::RestError;\nuse itertools::Itertools;\nuse once_cell::sync::Lazy;\nuse regex::Regex;\nuse std::path::Path;\nuse types::api::app::image::{Request, Response};\n\n// MAX 2MB\nconst MAX_UPLOAD_SIZE: usize = 2 * 1024 * 1024;\n\n// Filter japanese from image text\nconst FILTER_JP_REGEX: Lazy<Regex> =\n    Lazy::new(|| Regex::new(\"[あ-ん一-龯一-龯０-９Ａ-ｚア-ン』『]\").unwrap());\n\n/// Get search suggestions endpoint\npub async fn scan_ep(\n    payload: Multipart,\n    args: web::Query<Request>,\n    config: web::Data<Config>,\n) -> Result<Json<Response>, actix_web::Error> {\n    // Load payload\n    let local_file = request::read_payload(&config, payload).await?;\n\n    // Scan image\n    let local_file_cloned = local_file.clone();\n    let res = web::block(move || scan_image(local_file_cloned, &args, &config)).await;\n\n    // Cleanup file\n    web::block(move || std::fs::remove_file(local_file)).await??;\n\n    // Handle result after cleaning up files\n    Ok(Json(res??))\n}\n\n/// Scans an image and returns a `Response` with the recognized text or an error\n#[cfg(feature = \"img_scan\")]\nfn scan_image<P: AsRef<Path>>(\n    file: P,\n    req: &Request,\n    config: &Config,\n) -> Result<Response, RestError> {\n    let tess_data = config.server.tess_data.as_ref().map(|i| i.as_str());\n    let mut lt = leptess::LepTess::new(tess_data, \"jpn\").map_err(|_| RestError::Internal)?;\n    lt.set_image(file).map_err(|_| RestError::NoTextFound)?;\n\n    if lt.get_source_y_resolution() <= 0 {\n        lt.set_source_resolution(70)\n    }\n\n    if lt.mean_text_conf() < req.threshold {\n        return Err(RestError::NoTextFound);\n    }\n\n    let text = lt\n        .get_utf8_text()\n        .ok()\n        .and_then(|text| format_text(text))\n        .ok_or(RestError::NoTextFound)?;\n\n    Ok(Response { text })\n}\n\n/// Format non-japanese characters from scanned result\nfn format_text(text: String) -> Option<String> {\n    let modded_text = FILTER_JP_REGEX\n        .captures_iter(&text)\n        .into_iter()\n        .map(|i| {\n            i.iter()\n                .filter_map(|j| Some(j?.as_str().to_string()))\n                .collect::<Vec<_>>()\n        })\n        .flatten()\n        .join(\"\");\n\n    (!modded_text.is_empty()).then(|| modded_text)\n}\n\n#[cfg(not(feature = \"img_scan\"))]\nmod request {\n    use super::*;\n    use std::path::PathBuf;\n    pub(crate) async fn read_payload(\n        config: &Config,\n        mut payload: Multipart,\n    ) -> Result<PathBuf, RestError> {\n        todo!()\n    }\n}\n\n/// Scans an image and returns a `Response` with the recognized text or an error\n#[cfg(not(feature = \"img_scan\"))]\nfn scan_image<P: AsRef<Path>>(\n    _file: P,\n    _req: &Request,\n    _config: &Config,\n) -> Result<Response, RestError> {\n    Ok(Response {\n        text: String::from(\"unsupported\"),\n    })\n}\n"
  },
  {
    "path": "lib/api/src/app/img/request.rs",
    "content": "use std::{\n    convert::TryInto,\n    fs::{create_dir, File},\n    io::Write,\n    path::{Path, PathBuf},\n};\n\nuse actix_multipart::{Field, Multipart};\nuse actix_web::web;\nuse config::Config;\nuse error::api_error::{Origin, RestError};\nuse futures::{StreamExt, TryStreamExt};\n\nuse super::MAX_UPLOAD_SIZE;\n\n/// Reads, validates and stores a multipart for img_scan endpoint requests\npub(crate) async fn read_payload(\n    config: &Config,\n    mut payload: Multipart,\n) -> Result<PathBuf, RestError> {\n    // Generate file\n    let rand_file = gen_local_file(config).await?;\n\n    // Get first payload\n    let field = payload\n        .try_next()\n        .await\n        .ok()\n        .flatten()\n        .ok_or(RestError::Missing(Origin::File))?;\n\n    // Read payload into file\n    read_field(field, &rand_file).await?;\n\n    Ok(rand_file)\n}\n\nasync fn gen_local_file(config: &Config) -> Result<PathBuf, RestError> {\n    let path = config.get_img_scan_upload_path();\n    let rand_file = Path::new(&path);\n\n    if !rand_file.exists() {\n        let path = config.get_img_scan_upload_path();\n        web::block(move || create_dir(Path::new(&path))).await??;\n    }\n\n    Ok(rand_file.join(format!(\"{}_img_scan\", utils::rand_alpha_numeric(75))))\n}\n\nasync fn read_field(mut field: Field, local_file: &PathBuf) -> Result<(), RestError> {\n    let local_file_cloned = local_file.clone();\n    let mut local_file = web::block(move || File::create(&local_file_cloned)).await??;\n\n    // Whether the magic number has been verified or not\n    let mut verified = false;\n\n    // The current amount of uploaded bytes\n    let mut size = 0;\n\n    while let Some(chunk) = field\n        .next()\n        .await\n        .map(|i| i.map_err(|_| RestError::IoError))\n    {\n        let chunk = chunk?;\n\n        size += chunk.len();\n\n        if size > MAX_UPLOAD_SIZE {\n            return Err(RestError::BadRequest.into());\n        }\n\n        if !verified {\n            check_magic_bytes(&chunk)?;\n            verified = true;\n        }\n\n        local_file.write_all(&chunk)?;\n    }\n\n    Ok(())\n}\n\n/// Verifies the input files magic number\nfn check_magic_bytes(chunk: &[u8]) -> Result<(), RestError> {\n    let magic_bytes: [u8; 4] = chunk[0..4].try_into().map_err(|_| RestError::BadRequest)?;\n    if !is_supported_format(magic_bytes) {\n        return Err(RestError::FormatNotSupported.into());\n    }\n    Ok(())\n}\n\n/// Returns `true` if given magic_nr bytes represent a supported image format\n#[inline]\nfn is_supported_format(magic_nr: [u8; 4]) -> bool {\n    match magic_nr {\n        // JPG\n        [255, 216, 255, 224] => true,\n        // PNG\n        [137, 80, 78, 71] => true,\n        _ => false,\n    }\n}\n"
  },
  {
    "path": "lib/api/src/app/kanji/ids_tree/builder.rs",
    "content": "use ids_parser::Origin;\nuse once_cell::sync::Lazy;\nuse std::collections::HashSet;\nuse types::api::app::kanji::ids_tree::OutObject;\n\nstatic STOP_RADICALS: Lazy<HashSet<char>> = Lazy::new(|| {\n    japanese::radicals::RADICALS\n        .iter()\n        .map(|i| i.1)\n        .flatten()\n        .map(|i| i.chars().next().unwrap())\n        .collect()\n});\n\npub struct KanjiTreeBuilder {\n    build_full: bool,\n}\n\nimpl KanjiTreeBuilder {\n    /// Creates a new TreeBuilder. The parameter specifies whether a full tree should be bulit or\n    /// Only one which is restricted to the Radicals used in the radical picker\n    pub fn new(build_full: bool) -> Self {\n        Self { build_full }\n    }\n\n    /// Recursive method to build the OutObjects\n    pub fn build(&self, c: char) -> Option<OutObject> {\n        let retrieve = resources::get().kanji();\n        let ids_kanji = retrieve.ids(c)?;\n\n        let mut out = OutObject::new(c);\n\n        out.set_literal_available(retrieve.has_literal(c));\n\n        //let radicals = ids_kanji.comp_by_lang(Origin::Japan)?.get_radicals();\n        let comps = match ids_kanji.comp_by_lang(Origin::Japan) {\n            Some(s) => s,\n            None => {\n                if ids_kanji.compositions.len() == 1 {\n                    &ids_kanji.compositions[0]\n                } else {\n                    return None;\n                }\n            }\n        };\n        let radicals = comps.get_radicals();\n\n        // recursive exit condition\n        if (radicals.len() == 1 && radicals[0] == c)\n            || radicals.is_empty()\n            || (STOP_RADICALS.contains(&c) && !self.build_full)\n        {\n            return Some(out);\n        }\n\n        let mut visited_items = HashSet::with_capacity(radicals.len());\n\n        for radical in radicals {\n            if visited_items.contains(&radical) {\n                continue;\n            }\n            if let Some(child) = self.build(radical) {\n                out.add_child(child);\n            }\n\n            visited_items.insert(radical);\n        }\n\n        Some(out)\n    }\n}\n"
  },
  {
    "path": "lib/api/src/app/kanji/ids_tree/mod.rs",
    "content": "pub mod builder;\n\nuse actix_web::web::Json;\nuse builder::KanjiTreeBuilder;\nuse error::api_error::RestError;\nuse types::api::app::kanji::ids_tree::{Request, Response};\n\n/// Get a decomposition graph\npub async fn decomp_graph(payload: Json<Request>) -> Result<Json<Response>, RestError> {\n    let tree = KanjiTreeBuilder::new(payload.full)\n        .build(payload.literal)\n        .ok_or(RestError::NotFound)?;\n\n    let size_opposite = KanjiTreeBuilder::new(!payload.full)\n        .build(payload.literal)\n        .ok_or(RestError::NotFound)?;\n\n    let has_big = tree != size_opposite;\n\n    Ok(Json(Response::new(tree, has_big)))\n}\n"
  },
  {
    "path": "lib/api/src/app/kanji/mod.rs",
    "content": "pub mod ids_tree;\n"
  },
  {
    "path": "lib/api/src/app/mod.rs",
    "content": "pub mod completions;\npub mod details;\npub mod img;\npub mod kanji;\npub mod news;\npub mod radical;\npub mod search;\n\nuse std::path::Path;\n\nuse config::Config;\nuse error::api_error::RestError;\nuse types::{\n    api::app::search::responses::words,\n    jotoba::{self, language::Language},\n};\n\npub type Result<T> = std::result::Result<T, RestError>;\n\npub(crate) fn conv_word(word: jotoba::words::Word, lang: Language, config: &Config) -> words::Word {\n    let is_common = word.is_common();\n    let accents = word.get_pitches();\n\n    let audio = word.audio_file_name().and_then(|name| {\n        let audio_p = Path::new(\"mp3\").join(name);\n        let local_path = Path::new(config.server.get_audio_files()).join(&audio_p);\n        if local_path.exists() {\n            let url = Path::new(\"/audio/\")\n                .join(&audio_p)\n                .to_str()\n                .unwrap()\n                .to_string();\n            Some(url)\n        } else {\n            None\n        }\n    });\n\n    let reading = word\n        .furigana\n        .as_ref()\n        .map(|i| i.clone())\n        .unwrap_or(word.get_reading().reading.clone());\n\n    let alt_readings = word\n        .reading\n        .alternative\n        .into_iter()\n        .map(|i| i.reading)\n        .collect();\n\n    let senses = word\n        .senses\n        .into_iter()\n        .map(|i| conv_ex_sentence(i, lang))\n        .collect::<Vec<_>>();\n\n    words::Word {\n        sequence: word.sequence,\n        is_common,\n        reading,\n        alt_readings,\n        senses,\n        accents,\n        jlpt_lvl: word.jlpt_lvl.map(|i| i.get()),\n        furigana: word.furigana,\n        transive_version: word.transive_version.map(|i| i.get()),\n        intransive_version: word.intransive_version.map(|i| i.get()),\n        sentences_available: word.sentences_available,\n        audio,\n    }\n}\n\n#[inline]\npub fn conv_ex_sentence(sense: jotoba::words::sense::Sense, lang: Language) -> words::Sense {\n    let glosses = sense\n        .glosses\n        .into_iter()\n        .map(|i| i.gloss)\n        .collect::<Vec<_>>();\n\n    let example_sentence = sense\n        .example_sentence\n        .and_then(|i| get_example_sentence(i, lang));\n\n    words::Sense {\n        misc: sense.misc,\n        field: sense.field,\n        dialect: sense.dialect,\n        glosses,\n        xref: sense.xref,\n        antonym: sense.antonym,\n        information: sense.information,\n        part_of_speech: sense.part_of_speech,\n        language: sense.language,\n        example_sentence,\n        gairaigo: sense.gairaigo,\n    }\n}\n\nfn get_example_sentence(id: u32, language: Language) -> Option<(String, String)> {\n    let sentence = resources::get().sentences().by_id(id)?;\n\n    let translation = sentence\n        .translation_for(language)\n        .or_else(|| sentence.translation_for(Language::English))?;\n\n    Some((sentence.furigana.clone(), translation.to_string()))\n}\n"
  },
  {
    "path": "lib/api/src/app/news/detailed.rs",
    "content": "use actix_web::web::Json;\nuse error::api_error;\nuse types::api::app::news::long::{Request, Response};\n\n/// Get detailed news endpoint\npub async fn news(payload: Json<Request>) -> Result<Json<Response>, actix_web::Error> {\n    let id = payload.id;\n\n    let entry = news::get()\n        .by_id(id)\n        .map(|i| super::ne_from_resource(i, false))\n        .ok_or(api_error::RestError::NotFound)?;\n\n    Ok(Json(Response { entry }))\n}\n"
  },
  {
    "path": "lib/api/src/app/news/mod.rs",
    "content": "pub mod detailed;\npub mod short;\n\nuse types::api::app::news::NewsEntry;\n\nfn ne_from_resource(src: &news::NewsEntry, short: bool) -> NewsEntry {\n    let html = if short {\n        src.short.clone()\n    } else {\n        src.long.clone()\n    };\n\n    NewsEntry {\n        id: src.id,\n        html,\n        title: src.title.clone(),\n        creation_time: src.creation_time,\n        trimmed: src.was_trimmed && !short,\n    }\n}\n"
  },
  {
    "path": "lib/api/src/app/news/short.rs",
    "content": "use actix_web::web::Json;\nuse types::api::app::news::short::{Request, Response};\n\n/// Get short news endpoint\npub async fn news(payload: Json<Request>) -> Result<Json<Response>, actix_web::Error> {\n    let after = payload.after;\n\n    let entries = news::get()\n        .last_entries(3)\n        .filter(|i| i.creation_time > after)\n        .map(|i| super::ne_from_resource(i, true))\n        .collect::<Vec<_>>();\n\n    Ok(Json(Response { entries }))\n}\n"
  },
  {
    "path": "lib/api/src/app/radical/kanji.rs",
    "content": "use actix_web::web::Json;\nuse intmap::{int_set::IntSet, IntMap};\nuse std::{collections::HashMap, time::Instant};\nuse types::api::app::radical::find_kanji::{Request, Response};\n\n/// Get kanji by its radicals\npub async fn kanji_by_radicals(payload: Json<Request>) -> Result<Json<Response>, actix_web::Error> {\n    let start = Instant::now();\n    let res = find_kanji(&payload.radicals);\n    log::debug!(\"Radical results took: {:?}\", start.elapsed());\n\n    Ok(Json(res))\n}\n\npub fn find_kanji(rads: &[char]) -> Response {\n    let mut possible_rads_set = IntSet::with_capacity(rads.len() * 3);\n    let mut kanji_res: IntMap<Vec<char>> = IntMap::with_capacity(8);\n\n    let k_retrieve = resources::get().kanji();\n    for kanji in k_retrieve.by_radicals(rads) {\n        push_or_insert(&mut kanji_res, kanji.stroke_count as u32, kanji.literal);\n\n        if !kanji.parts.is_empty() {\n            possible_rads_set.reserve(kanji.parts.len());\n            possible_rads_set.extend(kanji.parts.iter().map(|i| *i as u32));\n        }\n    }\n\n    let mut possible_rads = HashMap::<u32, Vec<char>>::new();\n    for i in possible_rads_set {\n        let c = unsafe { char::from_u32_unchecked(i) };\n        let s_count = japanese::radicals::get_stroke_count(c).unwrap();\n        possible_rads.entry(s_count as u32).or_default().push(c);\n    }\n\n    // Sort all radicals\n    for (_, v) in possible_rads.iter_mut() {\n        v.sort_unstable();\n    }\n\n    let mut kanji_res2 = HashMap::<u32, Vec<char>>::with_capacity(kanji_res.len());\n    kanji_res2.extend(kanji_res);\n\n    Response {\n        possible_radicals: possible_rads,\n        kanji: kanji_res2,\n    }\n}\n\nfn push_or_insert<T>(map: &mut IntMap<Vec<T>>, key: u32, item: T) {\n    if let Some(s) = map.get_mut(key) {\n        s.push(item);\n        return;\n    }\n\n    let capacity = (25u32.saturating_sub(key) + 1) * 2;\n    let mut new_vec = Vec::with_capacity(capacity as usize);\n    new_vec.push(item);\n    map.insert(key, new_vec);\n}\n"
  },
  {
    "path": "lib/api/src/app/radical/mod.rs",
    "content": "pub mod kanji;\npub mod search;\n\npub use kanji::kanji_by_radicals;\n"
  },
  {
    "path": "lib/api/src/app/radical/search/jp_search.rs",
    "content": "use jp_utils::JapaneseExt;\nuse search::radical::word::RomajiSearch;\nuse std::collections::{HashMap, HashSet};\nuse types::{api::app::radical::search::KanjiRads, jotoba::kanji::Kanji};\n\n/// Returns a list of radicals based on the radical-search `query`\npub fn search(query: &str) -> HashSet<char> {\n    if query.has_kanji() {\n        return kanji_search(query);\n    }\n\n    RomajiSearch::new(query).run()\n}\n\n/// Returns a List of kanji that use similar radicals as the query.\npub fn similar_kanji_search(query: &str) -> Vec<KanjiRads> {\n    let kanji = query\n        .chars()\n        .filter(|i| i.is_kanji())\n        .filter_map(|lit| get_kanji(lit));\n\n    let mut dups: HashSet<char> = HashSet::new();\n    let mut out: Vec<KanjiRads> = Vec::new();\n\n    for kanji in kanji {\n        // Add written kanji to the result too\n        out.push(into_kanji_rads(kanji));\n        dups.insert(kanji.literal);\n\n        for part in kanji.parts.iter() {\n            let mut kanji_w_r = resources::get().kanji().by_radicals(&[*part]);\n            kanji_w_r.sort_by(|a, b| a.stroke_count.cmp(&b.stroke_count));\n            for k in kanji_w_r.into_iter().take(10) {\n                if k.stroke_count < kanji.stroke_count || dups.contains(&k.literal) {\n                    continue;\n                }\n                dups.insert(k.literal);\n                out.push(into_kanji_rads(k));\n            }\n        }\n    }\n\n    out.truncate(50);\n    out\n}\n\n#[inline]\nfn get_kanji(lit: char) -> Option<&'static Kanji> {\n    resources::get().kanji().by_literal(lit)\n}\n\n/// Convert a kanji to a `KanjiRads`\nfn into_kanji_rads(kanji: &Kanji) -> KanjiRads {\n    let mut rads: HashMap<u32, Vec<char>> = HashMap::with_capacity(kanji.parts.len());\n    for part in &kanji.parts {\n        let stroke_count = japanese::radicals::get_stroke_count(*part);\n        if let Some(stroke_count) = stroke_count {\n            rads.entry(stroke_count).or_default().push(*part);\n        }\n    }\n    KanjiRads::new(kanji.literal, rads)\n}\n\n/// Takes all kanji from `query` and returns a list of all unique radicals to build all kanji\n/// picked from `query`\n#[inline]\nfn kanji_search(query: &str) -> HashSet<char> {\n    query.chars().map(|k| kanji_radicals(k)).flatten().collect()\n}\n\n#[inline]\nfn kanji_radicals(kanji: char) -> Vec<char> {\n    get_kanji(kanji)\n        .map(|i| i.parts.clone())\n        .unwrap_or_default()\n}\n/*\n/// Does a kana word-search and returns some likely radicals for the given query\nfn kana_search(query: &str) -> HashSet<char> {\n    let mut search_task: SearchTask<Engine> = SearchTask::new(&query)\n        .with_limit(3)\n        .with_threshold(0.8)\n        .with_custom_order(NativeOrder::new(query.to_string()));\n\n    search_task\n        .find()\n        .into_iter()\n        .map(|i| i.get_reading().reading.chars().filter(|i| i.is_kanji()))\n        .flatten()\n        .unique()\n        .map(|kanji| kanji_radicals(kanji))\n        .flatten()\n        .take(10)\n        .collect()\n} */\n"
  },
  {
    "path": "lib/api/src/app/radical/search/meaning.rs",
    "content": "use std::collections::HashSet;\n\nuse japanese::ToKanaExt;\nuse types::jotoba::language::Language;\n\npub fn search(query: &str, language: Language) -> HashSet<char> {\n    if query.len() < 2 {\n        return HashSet::new();\n    }\n\n    let mut res = search::radical::meaning_search(query);\n\n    if res.len() > 4 {\n        return res;\n    }\n\n    if japanese::guessing::could_be_romaji(query) {\n        res.extend(super::jp_search::search(&query.to_hiragana()));\n    } else {\n        //res.extend(word_search(query, language));\n        let fw_search = search::radical::word::ForeignSearch::new(query, language);\n        res.extend(fw_search.run())\n    }\n\n    res\n}\n"
  },
  {
    "path": "lib/api/src/app/radical/search/mod.rs",
    "content": "mod jp_search;\nmod meaning;\n\nuse std::{\n    collections::{BTreeSet, HashMap, HashSet},\n    str::FromStr,\n};\n\nuse actix_web::{web::Json, HttpRequest};\nuse error::api_error::RestError;\nuse jp_utils::JapaneseExt;\nuse types::{\n    api::app::radical::search::{Request, Response},\n    jotoba::language::Language,\n};\n\n/// Search for radicals\npub async fn search_radical(\n    mut payload: Json<Request>,\n    request: HttpRequest,\n) -> Result<Json<Response>, actix_web::Error> {\n    verify_payload(&mut payload)?;\n\n    let rad_res;\n    let mut kanji_res = vec![];\n\n    if payload.query.is_japanese() {\n        rad_res = jp_search::search(&payload.query);\n        kanji_res = jp_search::similar_kanji_search(&payload.query);\n    } else {\n        rad_res = meaning::search(&payload.query, user_lang(&request));\n    }\n\n    if rad_res.is_empty() && kanji_res.is_empty() {\n        return Ok(Json(Response::default()));\n    }\n\n    let radicals = map_radicals(&rad_res);\n\n    Ok(Json(Response {\n        radicals,\n        kanji: kanji_res,\n    }))\n}\n\n/// Load the users language from cookies\n#[inline]\nfn user_lang(request: &HttpRequest) -> Language {\n    request\n        .cookie(\"default_lang\")\n        .and_then(|i| Language::from_str(i.value()).ok())\n        .unwrap_or_default()\n}\n\n/// Maps radicals by its literals to ResRadical with its stroke count\nfn map_radicals(inp: &HashSet<char>) -> HashMap<u8, BTreeSet<char>> {\n    let mut radicals: HashMap<u8, BTreeSet<char>> = HashMap::with_capacity(inp.len());\n\n    for (lit, strokes) in inp\n        .iter()\n        .filter_map(|lit| Some((*lit, japanese::radicals::get_stroke_count(*lit)?)))\n    {\n        radicals.entry(strokes as u8).or_default().insert(lit);\n    }\n\n    radicals\n}\n\n/// Verifies the payload itself and returns a proper error if the request is invalid\nfn verify_payload(payload: &mut Request) -> Result<(), RestError> {\n    if payload.query.trim().is_empty() {\n        return Err(RestError::BadRequest);\n    }\n\n    payload.query = payload.query.trim().to_string();\n    Ok(())\n}\n"
  },
  {
    "path": "lib/api/src/app/search/kanji.rs",
    "content": "use super::new_page;\n\nuse super::convert_payload;\nuse crate::app::Result;\nuse actix_web::web::{self, Json};\nuse error::api_error::RestError;\n\nuse types::jotoba::language::param::AsLangParam;\nuse types::{\n    api::app::search::{\n        query::SearchPayload,\n        responses::{\n            k_compounds::{CompoundResponse, CompoundSet, CompoundWord},\n            kanji, Response,\n        },\n    },\n    jotoba::{\n        search::SearchTarget,\n        words::{filter_languages, Word},\n    },\n};\n\n/// API response type\npub type SearchResp = Response<kanji::KanjiResponse>;\n\n/// Do an app kanji search via API\npub async fn search(payload: Json<SearchPayload>) -> Result<Json<SearchResp>> {\n    let query = convert_payload(&payload)\n        .parse()\n        .ok_or(RestError::BadRequest)?;\n\n    let query_c = query.clone();\n    let result = web::block(move || search::kanji::search(&query_c)).await??;\n\n    let items = result\n        .items\n        .into_iter()\n        .map(|i| {\n            let k: kanji::Kanji = i.kanji.into();\n            k\n        })\n        .collect::<Vec<kanji::Kanji>>();\n\n    let len = result.total_len as u32;\n    let kanji = kanji::KanjiResponse::new(items);\n    let page = new_page(&payload, kanji, len, payload.settings.page_size);\n    Ok(Json(super::new_response(page, SearchTarget::Kanji, &query)))\n}\n\n/// Kanji compound request\npub async fn reading_compounds(payload: Json<SearchPayload>) -> Result<Json<CompoundResponse>> {\n    let lang = payload.lang_param();\n\n    let compounds: Vec<_> = payload\n        .query_str\n        .chars()\n        .filter_map(|i| resources::get().kanji().by_literal(i))\n        .map(|i| {\n            let on_words = convert_dicts(&i.on_dicts, lang);\n            let kun_words = convert_dicts(&i.on_dicts, lang);\n            CompoundSet::new(on_words, kun_words)\n        })\n        .collect();\n    Ok(Json(CompoundResponse::new(compounds)))\n}\n\n#[inline]\nfn convert_dicts(dicts: &Vec<u32>, lang: impl AsLangParam) -> Vec<CompoundWord> {\n    load_dicts(dicts, lang)\n        .into_iter()\n        .filter_map(|j| Some(CompoundWord::from_word(&j)))\n        .collect::<Vec<_>>()\n}\n\n#[inline]\nfn load_dicts(dicts: &Vec<u32>, lang: impl AsLangParam) -> Vec<Word> {\n    let word_storage = resources::get().words();\n    let mut words: Vec<_> = dicts\n        .iter()\n        .filter_map(|j| word_storage.by_sequence(*j))\n        .cloned()\n        .collect();\n    filter_languages(words.iter_mut(), lang);\n    words\n}\n"
  },
  {
    "path": "lib/api/src/app/search/mod.rs",
    "content": "pub mod kanji;\npub mod names;\npub mod sentences;\npub mod words;\n\nuse search::{\n    query::UserSettings,\n    query::{parser::QueryParser, Query},\n};\nuse serde::Serialize;\nuse types::{\n    api::app::search::{query::SearchPayload, responses::Response},\n    jotoba::{\n        pagination::{page::Page, Pagination},\n        search::SearchTarget,\n    },\n};\n\nconst FIRST_PAGE: u32 = 1;\nconst LAST_PAGE: u32 = 100;\n\npub(crate) fn new_response<T: Serialize>(\n    page: Page<T>,\n    q_type: SearchTarget,\n    query: &Query,\n) -> Response<T> {\n    Response::with_help_fn(page, |p| {\n        if !p.is_empty() {\n            return None;\n        }\n        search::build_help(q_type, &query)\n    })\n}\n\npub(crate) fn new_page<V: Serialize + Clone>(\n    pl: &SearchPayload,\n    v: V,\n    items: u32,\n    items_per_page: u32,\n) -> Page<V> {\n    let current_page = if items > 0 {\n        (pl.page.unwrap_or(FIRST_PAGE)).max(FIRST_PAGE)\n    } else {\n        0\n    };\n\n    let mut pagination = Pagination::new_page(v, current_page, items, items_per_page, LAST_PAGE);\n\n    if items == 0 {\n        pagination.set_pages(0);\n    }\n\n    pagination\n}\n\npub(crate) fn convert_payload(pl: &SearchPayload) -> QueryParser {\n    let user_settings = convert_user_settings(&pl.settings);\n\n    let mut q_parser = QueryParser::new(\n        pl.query_str.clone(),\n        types::jotoba::search::SearchTarget::Kanji,\n        user_settings,\n    )\n    .with_page(pl.page.unwrap_or_default() as usize)\n    .with_word_index(pl.word_index.unwrap_or_default());\n\n    if let Some(lang) = pl.lang_overwrite {\n        q_parser = q_parser.with_lang_overwrite(lang);\n    }\n\n    q_parser\n}\n\npub(crate) fn convert_user_settings(\n    settings: &types::api::app::search::query::UserSettings,\n) -> UserSettings {\n    UserSettings {\n        user_lang: settings.user_lang,\n        show_english: settings.show_english,\n        english_on_top: true,\n        page_size: settings.page_size,\n        show_example_sentences: settings.show_example_sentences,\n        sentence_furigana: settings.sentence_furigana,\n        ..Default::default()\n    }\n}\n"
  },
  {
    "path": "lib/api/src/app/search/names.rs",
    "content": "use super::new_page;\n\nuse super::convert_payload;\nuse crate::app::Result;\nuse actix_web::web::{self, Json};\nuse error::api_error::RestError;\nuse search::SearchExecutor;\nuse types::{\n    api::app::search::{\n        query::SearchPayload,\n        responses::{names, Response},\n    },\n    jotoba::search::SearchTarget,\n};\n\n/// API response type\npub type Resp = Response<names::Response>;\n\n/// Do an app name search via API\npub async fn search(payload: Json<SearchPayload>) -> Result<Json<Resp>> {\n    let query = convert_payload(&payload)\n        .parse()\n        .ok_or(RestError::BadRequest)?;\n\n    let query_c = query.clone();\n    let result = web::block(move || {\n        let search = search::name::Search::new(&query_c);\n        SearchExecutor::new(search).run()\n    })\n    .await?;\n    let res = names::Response::new(result.items.into_iter().cloned().collect());\n    let len = result.total as u32;\n    let page = new_page(&payload, res, len, payload.settings.page_size);\n    let res = super::new_response(page, SearchTarget::Names, &query);\n    Ok(Json(res))\n}\n"
  },
  {
    "path": "lib/api/src/app/search/sentences.rs",
    "content": "use super::new_page;\n\nuse super::convert_payload;\nuse crate::app::Result;\nuse actix_web::web::{self, Json};\nuse error::api_error::RestError;\nuse types::{\n    api::app::search::{\n        query::SearchPayload,\n        responses::{sentences, Response},\n    },\n    jotoba::search::SearchTarget,\n};\n\n/// API response type\npub type Resp = Response<sentences::Response>;\n\n/// Do an app sentence search via API\npub async fn search(payload: Json<SearchPayload>) -> Result<Json<Resp>> {\n    let query = convert_payload(&payload)\n        .parse()\n        .ok_or(RestError::BadRequest)?;\n\n    let query_c = query.clone();\n    let result = web::block(move || {\n        let search = search::sentence::Search::new(&query_c);\n        search::SearchExecutor::new(search).run()\n    })\n    .await?;\n\n    let items = result\n        .items\n        .into_iter()\n        .map(|i| convert_sentence(i))\n        .collect::<Vec<_>>();\n\n    let res = sentences::Response::new(items);\n    let len = result.total as u32;\n\n    let page = new_page(&payload, res, len, payload.settings.page_size);\n    let res = super::new_response(page, SearchTarget::Sentences, &query);\n    Ok(Json(res))\n}\n\n#[inline]\npub(crate) fn convert_sentence(\n    sentence: search::sentence::result::Sentence,\n) -> sentences::Sentence {\n    sentences::Sentence::new(\n        sentence.id,\n        sentence.furigana.to_string(),\n        sentence.translation.to_string(),\n    )\n}\n"
  },
  {
    "path": "lib/api/src/app/search/words.rs",
    "content": "use super::new_page;\n\nuse super::convert_payload;\nuse crate::app::Result;\nuse actix_web::web::Data;\nuse actix_web::web::{self, Json};\nuse config::Config;\nuse error::api_error::RestError;\nuse search::{word::Search, SearchExecutor};\nuse types::{\n    api::app::search::{\n        query::SearchPayload,\n        responses::{\n            words::{self, Sentence},\n            Response,\n        },\n    },\n    jotoba::search::SearchTarget,\n};\n\n/// API response type\npub type Resp = Response<words::Response>;\n\n/// Do an app word search via API\npub async fn search(payload: Json<SearchPayload>, config: Data<Config>) -> Result<Json<Resp>> {\n    let query = convert_payload(&payload)\n        .parse()\n        .ok_or(RestError::BadRequest)?;\n    let user_lang = query.settings.user_lang;\n\n    let query_c = query.clone();\n    let result = web::block(move || {\n        let search = Search::new(&query_c);\n        SearchExecutor::new(search).run()\n    })\n    .await?;\n\n    let kanji = search::word::kanji::load_word_kanji_info(&result.items)\n        .into_iter()\n        .map(|i| i.into())\n        .collect::<Vec<_>>();\n\n    let words = result\n        .items\n        .iter()\n        .map(|i| super::super::conv_word(i.clone(), user_lang, &config))\n        .collect::<Vec<_>>();\n\n    let s_index = result.sentence_index();\n\n    let number = result.number.clone();\n\n    let sentence = result\n        .other_data\n        .sentence\n        .and_then(|i| i.parts)\n        .map(|i| conv_sentence(i, s_index));\n    let infl_info = result.other_data.inflection.map(|i| conv_infl_info(i));\n\n    let original_query = result.other_data.raw_query.clone();\n\n    let res = words::Response::new(words, kanji, infl_info, sentence, original_query, number);\n    let len = result.total as u32;\n\n    let page = new_page(&payload, res, len, payload.settings.page_size);\n    let res = super::new_response(page, SearchTarget::Words, &query);\n    Ok(Json(res))\n}\n\nfn conv_sentence(sentence: sentence_reader::Sentence, index: usize) -> Sentence {\n    let parts = sentence\n        .into_parts()\n        .into_iter()\n        .map(|i| i.into())\n        .collect();\n    Sentence::new(index, parts)\n}\n\nfn conv_infl_info(infl_info: search::word::result::InflectionInformation) -> words::InflectionInfo {\n    words::InflectionInfo::new(infl_info.inflections, infl_info.lexeme)\n}\n"
  },
  {
    "path": "lib/api/src/internal/info/mod.rs",
    "content": "pub mod words;\n"
  },
  {
    "path": "lib/api/src/internal/info/words.rs",
    "content": "use std::collections::HashSet;\n\nuse actix_web::{web::Json, HttpResponse};\nuse error::api_error::RestError;\nuse types::{\n    api::internal::info::words::{Request, Response, WordItem},\n    jotoba::words::{part_of_speech::PosSimple, Word},\n};\n\n/// Handles a word info API request\npub async fn word_info(payload: Json<Request>) -> Result<HttpResponse, RestError> {\n    let word_retr = resources::get().words();\n\n    let items: Vec<_> = payload\n        .ids\n        .iter()\n        .filter_map(|i| word_retr.by_sequence(*i))\n        .cloned()\n        .map(|mut word| {\n            word.adjust_language(payload.lang_param());\n            let pos = unique_pos(&word);\n            WordItem {\n                sentences: vec![],\n                audio: word.audio_file_name_old(),\n                word,\n                pos,\n            }\n        })\n        .collect();\n\n    let response = Response { items };\n    Ok(HttpResponse::Ok().body(bincode::serialize(&response).unwrap()))\n}\n\nfn unique_pos(word: &Word) -> Vec<PosSimple> {\n    word.senses()\n        .into_iter()\n        .map(|i| &i.part_of_speech)\n        .flatten()\n        .map(|i| i.to_pos_simple())\n        .flatten()\n        .collect::<HashSet<_>>()\n        .into_iter()\n        .collect()\n}\n"
  },
  {
    "path": "lib/api/src/internal/mod.rs",
    "content": "pub mod info;\n"
  },
  {
    "path": "lib/api/src/lib.rs",
    "content": "/// API endpoints for the webapp\npub mod app;\n\n/// API endpoints for internal communication\npub mod internal;\n\n/// Search API endpoint\npub mod search;\n"
  },
  {
    "path": "lib/api/src/search/kanji/mod.rs",
    "content": "use actix_web::web::{self, Data, Json};\nuse config::Config;\nuse types::{\n    api::search::kanji::{Kanji, Response},\n    jotoba::search::SearchTarget,\n};\n\nuse super::{Result, SearchRequest};\n\n/// Do a kanji search via API\npub async fn kanji_search(\n    payload: Json<SearchRequest>,\n    config: Data<Config>,\n) -> Result<Json<Response>> {\n    let query = super::parse_query(payload, SearchTarget::Kanji)?;\n    let result = web::block(move || search::kanji::search(&query))\n        .await??\n        .items;\n    Ok(Json(to_response(result, &config)))\n}\n\n#[inline]\nfn to_response(items: Vec<search::kanji::result::Item>, config: &Config) -> Response {\n    let kanji = items\n        .into_iter()\n        .map(|i| Kanji::from(&i.kanji, config.server.get_html_files()))\n        .collect();\n    Response { kanji }\n}\n"
  },
  {
    "path": "lib/api/src/search/mod.rs",
    "content": "pub mod kanji;\npub mod name;\npub mod sentence;\npub mod word;\n\nuse actix_web::web::Json;\nuse error::api_error::RestError;\nuse search::query::{parser::QueryParser, Query, UserSettings};\nuse types::{api::search::SearchRequest, jotoba::search::SearchTarget};\n\npub type Result<T> = std::result::Result<T, RestError>;\n\npub(crate) fn parse_query(payload: Json<SearchRequest>, q_type: SearchTarget) -> Result<Query> {\n    let settings = UserSettings {\n        user_lang: payload.language,\n        show_english: !payload.no_english,\n        ..UserSettings::default()\n    };\n\n    let q_str = payload.query_str.clone();\n\n    let query = QueryParser::new(q_str, q_type, settings)\n        .parse()\n        .ok_or(RestError::BadRequest)?;\n\n    Ok(query)\n}\n"
  },
  {
    "path": "lib/api/src/search/name/mod.rs",
    "content": "use actix_web::web::{self, Json};\nuse search::SearchExecutor;\nuse types::{api::search::name::Response, jotoba::search::SearchTarget};\n\nuse super::{Result, SearchRequest};\n\n/// Do a name search via API\npub async fn name_search(payload: Json<SearchRequest>) -> Result<Json<Response>> {\n    let query = super::parse_query(payload, SearchTarget::Kanji)?;\n    let result = web::block(move || {\n        let search = search::name::Search::new(&query);\n        SearchExecutor::new(search).run()\n    })\n    .await?;\n    Ok(Json(result.items.into()))\n}\n"
  },
  {
    "path": "lib/api/src/search/sentence/mod.rs",
    "content": "use actix_web::web::{self, Json};\nuse types::{\n    api::search::sentence::{Response, Sentence},\n    jotoba::search::SearchTarget,\n};\n\nuse super::{Result, SearchRequest};\n\n/// Do a Sentence search via API\npub async fn sentence_search(payload: Json<SearchRequest>) -> Result<Json<Response>> {\n    let query = super::parse_query(payload, SearchTarget::Kanji)?;\n\n    let result = web::block(move || {\n        let search = search::sentence::Search::new(&query);\n        search::SearchExecutor::new(search).run()\n    })\n    .await?\n    .items\n    .into_iter()\n    .map(|i| search_to_sentence(i))\n    .collect::<Vec<_>>();\n\n    Ok(Json(result.into()))\n}\n\n#[inline]\nfn search_to_sentence(sentence: search::sentence::result::Sentence) -> Sentence {\n    Sentence {\n        eng: sentence.get_english().map(|i| i.to_owned()),\n        content: sentence.content.to_string(),\n        furigana: sentence.furigana.to_string(),\n        translation: sentence.translation.to_string(),\n        language: sentence.language,\n    }\n}\n"
  },
  {
    "path": "lib/api/src/search/word/mod.rs",
    "content": "use super::{Result, SearchRequest};\nuse actix_web::web::{self, Data, Json};\nuse config::Config;\nuse search::{word::Search, SearchExecutor};\nuse types::{\n    api::search::{\n        kanji::Kanji,\n        word::{Response, Word},\n    },\n    jotoba::search::SearchTarget,\n};\n\n/// Do a word search via API\npub async fn word_search(\n    payload: Json<SearchRequest>,\n    config: Data<Config>,\n) -> Result<Json<Response>> {\n    let query = super::parse_query(payload, SearchTarget::Words)?;\n    let result = web::block(move || {\n        let search = Search::new(&query);\n        SearchExecutor::new(search).run()\n    })\n    .await?;\n\n    let kanji: Vec<Kanji> = search::word::kanji::load_word_kanji_info(&result.items)\n        .into_iter()\n        .map(|i| Kanji::from(&i, config.server.get_html_files()))\n        .collect();\n    let words: Vec<Word> = result.items.into_iter().map(|i| (&i).into()).collect();\n    Ok(Json(Response::new(words, kanji)))\n}\n"
  },
  {
    "path": "lib/config/Cargo.toml",
    "content": "[package]\nname = \"config\"\nversion = \"0.1.0\"\nauthors = [\"jojii <jojii@gmx.net>\"]\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\nserde = { version = \"1.0.171\", features = [\"derive\"] }\ntoml = \"0.7.6\"\nsha1 = { git = \"https://github.com/mitsuhiko/rust-sha1\"}\n"
  },
  {
    "path": "lib/config/src/lib.rs",
    "content": "use std::{\n    fs::DirEntry,\n    io::{BufReader, Read, Write},\n    time::Duration,\n};\n\nuse serde::{Deserialize, Serialize};\nuse std::{\n    fs::{self, File},\n    path::{Path, PathBuf},\n};\n\n#[derive(Serialize, Deserialize, Default, Clone, Debug)]\npub struct Config {\n    pub server: ServerConfig,\n    pub sentry: Option<SentryConfig>,\n    pub search: Option<SearchConfig>,\n\n    #[serde(skip)]\n    pub asset_hash: String,\n}\n\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct ServerConfig {\n    pub html_files: Option<String>,\n    pub audio_files: Option<String>,\n    pub listen_address: String,\n    pub storage_data: Option<String>,\n    pub img_upload_dir: Option<String>,\n    pub tess_data: Option<String>,\n    pub news_folder: Option<String>,\n    pub unidic_dict: Option<String>,\n    pub debug_mode: Option<bool>,\n    pub internal_api_key: String,\n}\n\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct SentryConfig {\n    pub dsn: String,\n}\n\n#[derive(Serialize, Deserialize, Clone, Debug, Default)]\npub struct SearchConfig {\n    pub suggestion_sources: Option<String>,\n    pub indexes_source: Option<String>,\n    pub report_queries_after: Option<u64>,\n}\n\nimpl Config {\n    /// Returns the configured index source files or its default value if not set\n    pub fn get_indexes_source(&self) -> &str {\n        self.search\n            .as_ref()\n            .and_then(|i| i.indexes_source.as_deref())\n            .unwrap_or(\"./resources/indexes\")\n    }\n\n    /// Returns the configured suggestion source files or its default value if not set\n    pub fn get_suggestion_sources(&self) -> &str {\n        self.search\n            .as_ref()\n            .and_then(|i| i.suggestion_sources.as_deref())\n            .unwrap_or(\"./resources/suggestions\")\n    }\n\n    /// Returns the configured query report timeout\n    pub fn get_query_report_timeout(&self) -> Duration {\n        let timeout = self\n            .search\n            .as_ref()\n            .and_then(|i| i.report_queries_after)\n            .unwrap_or(4);\n\n        Duration::from_secs(timeout)\n    }\n\n    /// Returns the configured (or default) path for storage data\n    pub fn get_storage_data_path(&self) -> String {\n        self.server\n            .storage_data\n            .as_ref()\n            .cloned()\n            .unwrap_or_else(|| ServerConfig::default().storage_data.unwrap())\n    }\n\n    pub fn get_kreading_freq_path(&self) -> String {\n        Path::new(self.get_indexes_source())\n            .join(\"kreading_freq_index\")\n            .to_str()\n            .unwrap()\n            .to_string()\n    }\n\n    /// Returns the configured (or default) path for the radical map\n    pub fn get_unidic_dict(&self) -> String {\n        self.server\n            .unidic_dict\n            .as_ref()\n            .cloned()\n            .unwrap_or_else(|| ServerConfig::default().unidic_dict.unwrap())\n    }\n\n    /// Returns the configured (or default) path for the radical map\n    pub fn get_img_scan_upload_path(&self) -> String {\n        self.server\n            .img_upload_dir\n            .as_ref()\n            .cloned()\n            .unwrap_or_else(|| ServerConfig::default().img_upload_dir.unwrap())\n    }\n\n    /// Returns `true` if system is in debug mode\n    pub fn is_debug(&self) -> bool {\n        self.server.debug_mode.unwrap_or(false)\n    }\n}\n\nimpl Default for ServerConfig {\n    #[inline]\n    fn default() -> Self {\n        Self {\n            html_files: Some(String::from(\"html/assets\")),\n            audio_files: Some(String::from(\"html/audio\")),\n            listen_address: String::from(\"127.0.0.1:8080\"),\n            storage_data: Some(String::from(\"./resources/storage_data\")),\n            img_upload_dir: Some(String::from(\"./img_scan_tmp\")),\n            unidic_dict: Some(String::from(\"./resources/unidic-mecab\")),\n            tess_data: None,\n            news_folder: Some(String::from(\"./resources/news\")),\n            debug_mode: Some(false),\n            internal_api_key: \"ReplaceMe!!!!\".to_string(),\n        }\n    }\n}\n\nimpl ServerConfig {\n    pub fn get_audio_files(&self) -> &str {\n        self.audio_files.as_deref().unwrap_or(\"html/audio\")\n    }\n\n    pub fn get_html_files(&self) -> &str {\n        self.html_files.as_deref().unwrap_or(\"html/assets\")\n    }\n\n    pub fn get_locale_path(&self) -> &str {\n        \"./locales\"\n    }\n\n    pub fn get_news_folder(&self) -> &str {\n        self.news_folder.as_deref().unwrap_or(\"./resources/news\")\n    }\n}\n\nimpl Config {\n    /// Create a new config object\n    pub fn new(src: Option<PathBuf>) -> Result<Self, String> {\n        let config_file = src\n            .or_else(|| {\n                std::env::var(\"JOTOBA_CONFIG\")\n                    .map(|i| Path::new(&i).to_owned())\n                    .ok()\n            })\n            .unwrap_or(Self::get_config_file()?);\n\n        let mut config = if !config_file.exists()\n            // Check if file is empty\n            || fs::metadata(&config_file).map(|i| i.len()).unwrap_or(1)\n                == 0\n        {\n            Self::default().save()?\n        } else {\n            let conf_data = fs::read_to_string(&config_file).map_err(|e| e.to_string())?;\n            toml::from_str(&conf_data).map_err(|e| e.to_string())?\n        };\n\n        /*\n        // Warn if sentry is configured but feature not enabled\n        #[cfg(not(feature = \"sentry_error\"))]\n        if let Some(ref sentry) = config.sentry {\n            if !sentry.dsn.is_empty() {\n                warn!(\"Sentry configured but not available. Build with \\\"sentry_error\\\" feature\");\n            }\n        }\n        */\n\n        config.asset_hash = variable_asset_hash(&config).map_err(|i| i.to_string())?;\n\n        Ok(config)\n    }\n\n    // Save the config\n    fn save(self) -> Result<Self, String> {\n        let config_file = Self::get_config_file()?;\n\n        let s = toml::to_string_pretty(&self).map_err(|e| e.to_string())?;\n        let mut f = File::create(&config_file).map_err(|e| e.to_string())?;\n        f.write_all(s.as_bytes()).map_err(|e| e.to_string())?;\n\n        Ok(self)\n    }\n\n    // Create missing folders and return the config file\n    pub fn get_config_file() -> Result<PathBuf, String> {\n        let conf_dir: PathBuf = Path::new(\"./\").join(\"data\");\n\n        if !conf_dir.exists() {\n            fs::create_dir_all(&conf_dir).map_err(|e| e.to_string())?;\n        }\n\n        Ok(conf_dir.join(\"config.toml\"))\n    }\n}\n\nfn variable_asset_hash(config: &Config) -> std::io::Result<String> {\n    let asset_path = Path::new(config.server.get_html_files());\n    let js_files = dir_content(&asset_path.join(\"js\"))?;\n    let css_files = dir_content(&asset_path.join(\"css\"))?;\n\n    let mut files = js_files\n        .into_iter()\n        .chain(css_files.into_iter())\n        .collect::<Vec<_>>();\n\n    files.sort_by(|a, b| a.file_name().cmp(&b.file_name()));\n\n    let mut hash = sha1::Sha1::new();\n    let mut buf: Vec<u8> = vec![0u8; 100];\n\n    for file in files {\n        let mut content = BufReader::new(File::open(file)?);\n\n        loop {\n            let read = content.read(&mut buf[..])?;\n            if read == 0 {\n                break;\n            }\n            hash.update(&buf[..read]);\n        }\n    }\n\n    Ok(hash.digest().to_string())\n}\n\nfn dir_content(path: &Path) -> std::io::Result<Vec<PathBuf>> {\n    let mut files = Vec::new();\n\n    visit_dirs(path, &mut files)?;\n\n    Ok(files.into_iter().map(|i| i.path()).collect::<Vec<_>>())\n}\n\nfn visit_dirs(dir: &Path, out: &mut Vec<DirEntry>) -> std::io::Result<()> {\n    if dir.is_dir() {\n        for entry in std::fs::read_dir(dir)? {\n            let entry = entry?;\n            let path = entry.path();\n            if path.is_dir() {\n                visit_dirs(&path, out)?;\n            } else {\n                out.push(entry)\n            }\n        }\n    }\n    Ok(())\n}\n"
  },
  {
    "path": "lib/engine/Cargo.toml",
    "content": "[package]\nname = \"engine\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\ntypes = { path = \"../types\", default-features = false, features = [\n  \"jotoba_intern\",\n] }\n#priority_container = { path = \"../../../priority_container\" }\npriority_container = { git = \"https://github.com/JojiiOfficial/PrioContainer/\" }\norder_struct = { git = \"https://github.com/JojiiOfficial/OrderStruct\" }\n#index_framework = { path = \"../../../index_framework\"}\nindex_framework = { git = \"https://github.com/WeDontPanic/index_framework\" }\n#vsm = { path = \"../../../vsm\" }\nsparse_vec = { git = \"https://github.com/JojiiOfficial/SparseVec\"}\n"
  },
  {
    "path": "lib/engine/src/lib.rs",
    "content": "pub mod pushable;\npub mod relevance;\npub mod result;\npub mod task;\npub mod utils;\n\nuse index_framework::{\n    retrieve::{retriever::Retriever, Retrieve},\n    traits::{backend::Backend, deser::DeSer},\n};\nuse std::hash::Hash;\nuse types::jotoba::language::Language;\n\n/// Generic search engine\npub trait Engine<'index> {\n    // Index\n    type B: Backend<Self::DictItem, Self::Document>;\n\n    // Index dictionary term\n    type DictItem: DeSer + Ord + From<String>;\n\n    /// Index output\n    type Document: DeSer;\n\n    /// Retrieving algorithm\n    type Retriever: Retriever<\n        'index,\n        Self::B,\n        Self::DictItem,\n        Self::Document,\n        Output = Self::Document,\n    >;\n\n    /// Engine output\n    type Output: Eq + Hash + Clone;\n\n    /// The search query\n    type Query;\n\n    fn make_query<S: AsRef<str>>(inp: S, lang: Option<Language>) -> Option<Self::Query>;\n\n    /// Converts index output to engine output\n    fn doc_to_output(input: &Self::Document) -> Option<Vec<Self::Output>>;\n\n    /// Returns the engines index\n    fn get_index(lang: Option<Language>) -> &'index Self::B;\n\n    /// Returns a new retrieve for the given terms\n    fn retrieve_for(\n        inp: &Self::Query,\n        query_str: &str,\n        lang: Option<Language>,\n    ) -> Retrieve<'index, Self::B, Self::DictItem, Self::Document>;\n\n    /// Returns a new retrieve for the engine\n    #[inline]\n    fn retrieve(\n        lang: Option<Language>,\n    ) -> Retrieve<'index, Self::B, Self::DictItem, Self::Document> {\n        Retrieve::new(Self::get_index(lang))\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/pushable/counter.rs",
    "content": "use std::marker::PhantomData;\n\nuse super::Pushable;\n\n/// Counts all push calls without storing the items\npub struct Counter<T> {\n    c: usize,\n    p: PhantomData<T>,\n}\n\nimpl<T> Counter<T> {\n    #[inline]\n    pub fn new() -> Self {\n        Self {\n            c: 0,\n            p: PhantomData,\n        }\n    }\n\n    #[inline]\n    pub fn val(&self) -> usize {\n        self.c\n    }\n}\n\nimpl<T> Pushable for Counter<T> {\n    type Item = T;\n\n    #[inline]\n    fn push(&mut self, _: Self::Item) -> bool {\n        self.c += 1;\n        true\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/pushable/f_max_cnt.rs",
    "content": "use super::Pushable;\nuse std::marker::PhantomData;\n\n/// A counter that Implements CancelPushable which counts up to a fixed value and\n/// Cancels counting if this value has been reached\npub struct FilteredMaxCounter<'a, T> {\n    val: usize,\n    max: usize,\n    pub filter: Box<dyn Fn(&T) -> bool + 'a>,\n    p: PhantomData<T>,\n}\n\nimpl<'a, T> FilteredMaxCounter<'a, T> {\n    #[inline]\n    pub fn new<F>(max: usize, filter: F) -> Self\n    where\n        F: Fn(&T) -> bool + 'a,\n    {\n        Self {\n            val: 0,\n            max,\n            filter: Box::new(filter),\n            p: PhantomData,\n        }\n    }\n\n    #[inline]\n    pub fn val(&self) -> usize {\n        self.val\n    }\n\n    #[inline]\n    pub fn inc(&mut self, delta: usize) {\n        self.val += delta;\n    }\n\n    #[inline]\n    pub fn is_full(&self) -> bool {\n        self.val >= self.max\n    }\n}\n\nimpl<'a, T> Pushable for FilteredMaxCounter<'a, T> {\n    type Item = T;\n\n    #[inline]\n    fn push(&mut self, i: Self::Item) -> bool {\n        if self.is_full() {\n            return false;\n        }\n\n        if !(self.filter)(&i) {\n            self.val += 1;\n        }\n\n        true\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/pushable/max_cnt.rs",
    "content": "use super::Pushable;\nuse std::marker::PhantomData;\n\n/// A counter that Implements CancelPushable which counts up to a fixed value and\n/// Cancels counting if this value has been reached\npub struct MaxCounter<T> {\n    val: usize,\n    max: usize,\n    p: PhantomData<T>,\n}\n\nimpl<T> MaxCounter<T> {\n    #[inline]\n    pub fn new(max: usize) -> Self {\n        Self {\n            val: 0,\n            max,\n            p: PhantomData,\n        }\n    }\n\n    #[inline]\n    pub fn val(&self) -> usize {\n        self.val\n    }\n\n    #[inline]\n    pub fn inc(&mut self, delta: usize) {\n        self.val += delta;\n    }\n\n    #[inline]\n    pub fn is_full(&self) -> bool {\n        self.val >= self.max\n    }\n}\n\nimpl<T> Pushable for MaxCounter<T> {\n    type Item = T;\n\n    #[inline]\n    fn push(&mut self, _i: Self::Item) -> bool {\n        if self.is_full() {\n            return false;\n        }\n\n        self.val += 1;\n        true\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/pushable/mod.rs",
    "content": "pub mod counter;\npub mod f_max_cnt;\npub mod max_cnt;\npub mod push_dbg;\npub mod push_fn;\npub mod push_mod;\n\npub use counter::Counter;\npub use f_max_cnt::FilteredMaxCounter;\npub use max_cnt::MaxCounter;\npub use push_mod::PushMod;\n\nuse super::relevance::item::RelItem;\nuse priority_container::StableUniquePrioContainerMax;\nuse std::hash::Hash;\n\npub trait Pushable {\n    type Item;\n\n    fn push(&mut self, i: Self::Item) -> bool;\n}\n\nimpl<T> Pushable for StableUniquePrioContainerMax<RelItem<T>>\nwhere\n    T: Eq + Hash + Clone,\n{\n    type Item = RelItem<T>;\n\n    #[inline]\n    fn push(&mut self, i: Self::Item) -> bool {\n        self.insert(i);\n        true\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/pushable/push_dbg.rs",
    "content": "use super::Pushable;\nuse std::{fmt::Debug, marker::PhantomData};\n\n/// Allows debugging pushed items\npub struct PushDbg<'a, P, I> {\n    output: &'a mut P,\n    p: PhantomData<I>,\n}\n\nimpl<'a, P, I> PushDbg<'a, P, I>\nwhere\n    P: Pushable<Item = I>,\n    I: Debug,\n{\n    pub fn new(output: &'a mut P) -> Self {\n        Self {\n            output,\n            p: PhantomData,\n        }\n    }\n}\n\nimpl<'a, P, I> Pushable for PushDbg<'a, P, I>\nwhere\n    P: Pushable<Item = I>,\n    I: Debug,\n{\n    type Item = I;\n\n    #[inline]\n    fn push(&mut self, i: Self::Item) -> bool {\n        print!(\"{i:#?}\");\n        let cont = self.output.push(i);\n        println!(\" continue: {cont}\");\n        cont\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/pushable/push_fn.rs",
    "content": "use super::Pushable;\nuse std::marker::PhantomData;\n\npub struct PushFn<F, T> {\n    f: F,\n    p: PhantomData<T>,\n}\n\nimpl<F, T> PushFn<F, T>\nwhere\n    F: FnMut(T) -> bool,\n{\n    #[inline]\n    pub fn new(f: F) -> Self {\n        Self { f, p: PhantomData }\n    }\n}\n\nimpl<F, T> Pushable for PushFn<F, T>\nwhere\n    F: FnMut(T) -> bool,\n{\n    type Item = T;\n\n    #[inline]\n    fn push(&mut self, i: Self::Item) -> bool {\n        (self.f)(i)\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/pushable/push_mod.rs",
    "content": "use std::marker::PhantomData;\n\nuse super::Pushable;\n\n/// Allows modifying pushed data\npub struct PushMod<'a, P, I, O, F> {\n    output: &'a mut P,\n    f: F,\n    p: PhantomData<I>,\n    p2: PhantomData<O>,\n}\n\nimpl<'a, P, I, O, F> PushMod<'a, P, I, O, F>\nwhere\n    P: Pushable<Item = O>,\n    F: Fn(I) -> O,\n{\n    pub fn new(output: &'a mut P, f: F) -> Self {\n        Self {\n            output,\n            f,\n            p: PhantomData,\n            p2: PhantomData,\n        }\n    }\n}\n\nimpl<'a, P, I, O, F> Pushable for PushMod<'a, P, I, O, F>\nwhere\n    F: Fn(I) -> O,\n    P: Pushable<Item = O>,\n{\n    type Item = I;\n\n    #[inline]\n    fn push(&mut self, i: Self::Item) -> bool {\n        self.output.push((self.f)(i))\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/relevance/data.rs",
    "content": "use sparse_vec::{SpVec32, VecExt};\nuse types::jotoba::language::Language;\n\n/// Item to sort stuff\n#[derive(Debug)]\npub struct SortData<'item, 'query, T, I, Q> {\n    out_item: &'item T,\n    index_item: &'item I,\n    rel: f32,\n    query_str: &'query str,\n    query: &'query Q,\n    language: Option<Language>,\n    threshold: Option<f32>,\n}\n\nimpl<'item, 'query, T, I, Q> SortData<'item, 'query, T, I, Q> {\n    #[inline]\n    pub fn new(\n        out_item: &'item T,\n        index_item: &'item I,\n        rel: f32,\n        query: &'query Q,\n        query_str: &'query str,\n        language: Option<Language>,\n        threshold: Option<f32>,\n    ) -> Self {\n        Self {\n            out_item,\n            index_item,\n            rel,\n            query_str,\n            query,\n            language,\n            threshold,\n        }\n    }\n\n    #[inline]\n    pub fn item(&self) -> &T {\n        self.out_item\n    }\n\n    #[inline]\n    pub fn rel(&self) -> f32 {\n        self.rel\n    }\n\n    #[inline]\n    pub fn query_str(&self) -> &str {\n        self.query_str\n    }\n\n    #[inline]\n    pub fn language(&self) -> Option<Language> {\n        self.language\n    }\n\n    #[inline]\n    pub fn query(&self) -> &'query Q {\n        self.query\n    }\n\n    #[inline]\n    pub fn index_item(&self) -> &I {\n        self.index_item\n    }\n\n    #[inline]\n    pub fn threshold(&self) -> Option<f32> {\n        self.threshold\n    }\n}\n\nimpl<'item, 'query, T, I> SortData<'item, 'query, T, I, SpVec32>\nwhere\n    I: AsRef<SpVec32>,\n{\n    #[inline]\n    pub fn vec_similarity(&self) -> f32 {\n        self.query.cosine(self.index_item.as_ref())\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/relevance/item.rs",
    "content": "use std::{\n    cmp::Ordering,\n    hash::{Hash, Hasher},\n};\n\n/// A single item (result) in a set of search results\n#[derive(Clone, Copy, Default, Debug)]\npub struct RelItem<T> {\n    pub item: T,\n    pub relevance: f32,\n}\n\nimpl<T: PartialEq> RelItem<T> {\n    /// Create a new ResultItem<T>\n    #[inline]\n    pub fn new(item: T, relevance: f32) -> Self {\n        Self { item, relevance }\n    }\n}\n\nimpl<T> RelItem<T> {\n    /// Maps the item within the result without changing other data\n    #[inline]\n    pub fn map_item<F, O>(self, f: F) -> RelItem<O>\n    where\n        F: Fn(T) -> O,\n    {\n        let item = (f)(self.item);\n        RelItem {\n            item,\n            relevance: self.relevance,\n        }\n    }\n}\n\nimpl<T: PartialEq> PartialEq for RelItem<T> {\n    #[inline(always)]\n    fn eq(&self, other: &Self) -> bool {\n        self.item == other.item\n    }\n}\n\nimpl<T: PartialEq> Eq for RelItem<T> {}\n\nimpl<T: Eq + Hash> Hash for RelItem<T> {\n    #[inline]\n    fn hash<H: Hasher>(&self, state: &mut H) {\n        self.item.hash(state);\n    }\n}\n\nimpl<T: PartialEq> PartialOrd for RelItem<T> {\n    #[inline]\n    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n        Some(self.relevance.total_cmp(&other.relevance))\n    }\n}\n\nimpl<T: PartialEq> Ord for RelItem<T> {\n    #[inline]\n    fn cmp(&self, other: &Self) -> Ordering {\n        self.relevance.total_cmp(&other.relevance)\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/relevance/mod.rs",
    "content": "pub mod data;\npub mod item;\n\nuse data::SortData;\nuse types::jotoba::language::Language;\n\npub trait RelevanceEngine {\n    type OutItem;\n    type IndexItem;\n    type Query;\n\n    fn init(&mut self, _init: RelEngineInit) {}\n\n    fn score<'item, 'query>(\n        &self,\n        item: &SortData<'item, 'query, Self::OutItem, Self::IndexItem, Self::Query>,\n    ) -> f32;\n}\n\npub struct RelEngineInit {\n    pub query: String,\n    pub language: Option<Language>,\n}\n\nimpl RelEngineInit {\n    #[inline]\n    pub(crate) fn new(query: String, language: Option<Language>) -> Self {\n        Self { query, language }\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/result.rs",
    "content": "use super::relevance::item::RelItem;\nuse std::{fmt::Debug, slice::Iter};\n\n/// A result from a search. Contains information about the actual\n/// amount of items returned and the items to display on the current page.\n/// The items are always ordered\npub struct SearchResult<T> {\n    pub total_items: usize,\n    pub items: Vec<RelItem<T>>,\n}\n\nimpl<T: PartialEq> SearchResult<T> {\n    /// Create a new `SearchResult` from a list of items. Requires `items` to be sorted\n    #[inline]\n    pub fn new(items: Vec<RelItem<T>>, total_items: usize) -> Self {\n        Self { items, total_items }\n    }\n}\n\nimpl<T> SearchResult<T> {\n    /// Get the total amount of items in the result. This value is\n    /// always bigger or equal to the length of the items in the resultset\n    #[inline]\n    pub fn len(&self) -> usize {\n        self.total_items\n    }\n\n    /// Returns `true` if result is empty\n    #[inline]\n    pub fn is_empty(&self) -> bool {\n        self.len() == 0\n    }\n\n    /// Returns an iterator over the raw result items\n    #[inline]\n    pub fn iter(&self) -> Iter<'_, RelItem<T>> {\n        self.items.iter()\n    }\n\n    #[inline]\n    pub fn into_inner(self) -> Vec<RelItem<T>> {\n        self.items\n    }\n\n    /// Returns an iterator over the raw result items\n    #[inline]\n    pub fn into_iter(self) -> impl Iterator<Item = T> {\n        self.items.into_iter().map(|i| i.item)\n    }\n\n    /// Returns the item at `index` from the result or None if index is out of bounds\n    #[inline]\n    pub fn get(&self, index: usize) -> Option<&RelItem<T>> {\n        self.items.get(index)\n    }\n}\n\nimpl<T: PartialEq> Default for SearchResult<T> {\n    #[inline]\n    fn default() -> Self {\n        Self {\n            total_items: 0,\n            items: vec![],\n        }\n    }\n}\n\nimpl<T: PartialEq + Debug> Debug for SearchResult<T> {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        f.debug_struct(\"SearchResult\")\n            .field(\"total_items\", &self.total_items)\n            .field(\"items\", &self.items)\n            .finish()\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/task.rs",
    "content": "use crate::{\n    pushable::{MaxCounter, PushMod, Pushable},\n    relevance::{data::SortData, RelevanceEngine},\n    relevance::{item::RelItem, RelEngineInit},\n    result::SearchResult,\n    Engine,\n};\nuse priority_container::StableUniquePrioContainerMax;\nuse std::marker::PhantomData;\nuse types::jotoba::{\n    language::Language,\n    search::guess::{Guess, GuessType},\n};\n\npub struct SearchTask<'index, E: Engine<'index>> {\n    /// Search query\n    query_str: String,\n\n    /// Language to search in\n    query_lang: Option<Language>,\n\n    /// filter out items\n    item_filter: Option<Box<dyn Fn(&E::Document) -> bool>>,\n\n    /// Filter out results\n    res_filter: Option<Box<dyn Fn(&E::Output) -> bool>>,\n\n    /// Custom result order function\n    cust_order: Option<\n        Box<dyn RelevanceEngine<OutItem = E::Output, IndexItem = E::Document, Query = E::Query>>,\n    >,\n\n    /// Min relevance returned from search algo\n    threshold: f32,\n\n    /// Max distance to max item\n    max_dist: Option<f32>,\n\n    limit: usize,\n    offset: usize,\n    est_limit: usize,\n    phantom: PhantomData<E>,\n}\n\nimpl<'index, E> SearchTask<'index, E>\nwhere\n    E: Engine<'index> + 'index,\n{\n    #[inline]\n    pub fn new<S: AsRef<str>>(query: S) -> Self {\n        let mut task = Self::default();\n        task.query_str = query.as_ref().to_string();\n        task\n    }\n\n    /// Creates a new Search task with a query assigned language\n    #[inline]\n    pub fn with_language<S: AsRef<str>>(query: S, language: Language) -> Self {\n        let mut task = Self::default();\n        task.query_str = query.as_ref().to_string();\n        task.query_lang = Some(language);\n        task\n    }\n\n    /// Returns `true` if the SearchTask has a language assigned\n    #[inline]\n    pub fn has_language(&self) -> bool {\n        self.query_lang.is_some()\n    }\n\n    /// Set the total limit. This is the max amount of vectors which will be loaded and processed\n    #[inline]\n    pub fn with_limit(mut self, total_limit: usize) -> Self {\n        self.limit = total_limit;\n        self\n    }\n\n    /// Sets the max distance a result item can be ratet in order to be a part of the final result set\n    #[inline]\n    pub fn with_max_dist(mut self, max_dist: f32) -> Self {\n        self.max_dist = Some(max_dist);\n        self\n    }\n\n    /// Sets the search task's threshold. This does not apply on the final score, which can be\n    /// overwritten by `order` but applies to the vector space relevance itself.\n    #[inline]\n    pub fn with_threshold(mut self, threshold: f32) -> Self {\n        self.threshold = threshold;\n        self\n    }\n\n    /// Returns `true` if there is a threshold set\n    #[inline]\n    pub fn has_threshold(&self) -> bool {\n        self.threshold > 0.0\n    }\n\n    /// Sets the offeset of the search. Can be used for pagination. Requires output of search being\n    /// directly used and not manually reordered\n    pub fn with_offset(mut self, offset: usize) -> Self {\n        self.offset = offset;\n        self\n    }\n\n    /// Set the search task's result filter.\n    pub fn with_result_filter<F: 'static>(mut self, res_filter: F) -> Self\n    where\n        F: Fn(&E::Output) -> bool,\n    {\n        self.res_filter = Some(Box::new(res_filter));\n        self\n    }\n\n    /// Set the search task's custom order function\n    pub fn with_custom_order(\n        mut self,\n        res_filter: impl RelevanceEngine<OutItem = E::Output, IndexItem = E::Document, Query = E::Query>\n            + 'static,\n    ) -> Self {\n        self.cust_order = Some(Box::new(res_filter));\n        self\n    }\n\n    /// Set the search task's raw document filter\n    pub fn with_item_filter<F: 'static>(mut self, item_filter: F) -> Self\n    where\n        F: Fn(&E::Document) -> bool,\n    {\n        self.item_filter = Some(Box::new(item_filter));\n        self\n    }\n\n    /// Runs the search task and returns the result.\n    pub fn find(&mut self) -> SearchResult<E::Output> {\n        self.rel_init();\n        let cap = self.limit + self.offset;\n        let mut pqueue = StableUniquePrioContainerMax::new_allocated(cap, cap);\n        self.find_to(&mut pqueue);\n        self.make_result(pqueue)\n    }\n\n    /// Rettrieves results and pushes them into `out`\n    #[inline]\n    pub fn find_to<O>(&mut self, out: &mut O) -> Option<usize>\n    where\n        O: Pushable<Item = RelItem<E::Output>>,\n    {\n        self.rel_init();\n        self.find_to_inner(out, true)\n    }\n\n    /// Estimates the amount of results efficiently. This 'guess' is defined as follows:\n    ///\n    /// Be 'm' the amount of items a full search would return.\n    /// Be 'n' the guess returned by this function.\n    ///\n    /// - n = 0 => m = 0\n    /// - n <= m\n    pub fn estimate_result_count(&mut self) -> Guess {\n        self.rel_init();\n\n        let mut counter = MaxCounter::new(self.est_limit + 1);\n        self.estimate_to(&mut counter);\n        let estimated = counter.val();\n\n        let mut guess_type = GuessType::Undefined;\n\n        if (estimated <= self.est_limit) || estimated == 0 {\n            // All filtering operations are applied in estimation algorithm as well.\n            // Since we use the max value of query\n            // result, we can only assure it being accurate if there was only one query and no\n            // Limit was reached. From the 1st condition follows that estimated == 0 implies\n            // an accurate results\n            guess_type = GuessType::Accurate;\n        } else if estimated > self.est_limit {\n            // Were counting 1 more than `est_limit`. Thus `estimated` being bigger than limit\n            // means there are more elements than the given limit. However since were returning a\n            // number <= est_limit, relatively to the estimation the guess type is `Opentop`\n            guess_type = GuessType::MoreThan;\n        }\n\n        let est_result = (estimated).min(self.est_limit) as u32;\n        Guess::new(est_result, guess_type)\n    }\n\n    /// Estimates result count by pushing elements to `out`\n    #[inline]\n    pub fn estimate_to<P>(&mut self, out: &mut P)\n    where\n        P: Pushable<Item = E::Output>,\n    {\n        self.rel_init();\n        let mut out = PushMod::new(out, |i: RelItem<E::Output>| i.item);\n        self.find_to_inner(&mut out, false);\n    }\n\n    /// Retrieves results and pushes all items into `out`. Calculates relevance for each item if `sort` is true or\n    /// The SearchTask has a threshold set.\n    fn find_to_inner<O>(&self, out: &mut O, sort: bool) -> Option<usize>\n    where\n        O: Pushable<Item = RelItem<E::Output>>,\n    {\n        let query = E::make_query(&self.query_str, self.query_lang)?;\n\n        let mut retr: E::Retriever =\n            E::retrieve_for(&query, &self.query_str, self.query_lang).get();\n\n        let mut pushed = 0;\n\n        loop {\n            let (index_item, out_items) = match self.retrieve_next(&mut retr) {\n                Some(v) => v,\n                None => break,\n            };\n\n            for i in out_items {\n                let score = if sort || self.has_threshold() {\n                    self.score(&i, &index_item, &query)\n                } else {\n                    0.0\n                };\n\n                if self.has_threshold() && score < self.threshold {\n                    continue;\n                }\n\n                // Break if caller doesn't want to consume more\n                pushed += 1;\n                if !out.push(RelItem::new(i, score)) {\n                    break;\n                }\n            }\n        }\n\n        Some(pushed)\n    }\n\n    #[inline]\n    fn score(&self, out_item: &E::Output, index_item: &E::Document, query: &E::Query) -> f32 {\n        let threshold = self.has_threshold().then(|| self.threshold);\n        let s_data = SortData::new(\n            out_item,\n            index_item,\n            0.0,\n            query,\n            &self.query_str,\n            self.query_lang,\n            threshold,\n        );\n        self.cust_order\n            .as_ref()\n            .map(|i| i.score(&s_data))\n            .unwrap_or(0.0)\n    }\n\n    /// Builds output from the given Prio Queue\n    fn make_result(\n        &self,\n        data: StableUniquePrioContainerMax<RelItem<E::Output>>,\n    ) -> SearchResult<E::Output> {\n        let total_count = data.total_pushed();\n        let p_items = self.take_page(data);\n        SearchResult::new(p_items, total_count)\n    }\n\n    /// Takes the correct page from a UniquePrioContainerMax based on the given offset and limit\n    #[inline]\n    fn take_page<U: Ord>(&self, pqueue: StableUniquePrioContainerMax<U>) -> Vec<U> {\n        super::utils::page_from_pqueue(self.limit, self.offset, pqueue)\n    }\n\n    #[inline]\n    fn retrieve_next(&self, retr: &mut E::Retriever) -> Option<(E::Document, Vec<E::Output>)> {\n        let next = retr.next()?;\n\n        if !self.item_filter(&next) {\n            return Some((next, vec![]));\n        };\n\n        let mut out_items = E::doc_to_output(&next).unwrap_or_default();\n        if out_items.is_empty() {\n            return Some((next, out_items));\n        }\n\n        if let Some(ref filter) = self.res_filter {\n            out_items.retain(|i| filter(i));\n        }\n\n        Some((next, out_items))\n    }\n\n    /// Returns `false` if the item has to be removed from the result\n    #[inline]\n    fn item_filter(&self, item: &E::Document) -> bool {\n        self.item_filter.as_ref().map(|i| i(item)).unwrap_or(true)\n    }\n\n    #[inline]\n    fn rel_init(&mut self) {\n        if self.cust_order.is_none() {\n            return;\n        }\n\n        let init = self.make_rel_init();\n        self.cust_order.as_mut().unwrap().init(init);\n    }\n\n    #[inline]\n    fn make_rel_init(&self) -> RelEngineInit {\n        RelEngineInit::new(self.query_str.clone(), self.query_lang)\n    }\n}\n\nimpl<'a, T: Engine<'a>> Default for SearchTask<'a, T> {\n    #[inline]\n    fn default() -> Self {\n        Self {\n            query_str: Default::default(),\n            query_lang: None,\n            item_filter: None,\n            res_filter: None,\n            cust_order: None,\n            threshold: 0.0,\n            limit: 1000,\n            offset: 0,\n            est_limit: 100,\n            phantom: PhantomData,\n            max_dist: None,\n        }\n    }\n}\n"
  },
  {
    "path": "lib/engine/src/utils.rs",
    "content": "use crate::relevance::item::RelItem;\nuse priority_container::StableUniquePrioContainerMax;\n\n/// Takes the correct \"limit\" elements form a from a UniquePrioContainerMax at \"offset\"\npub fn page_from_pqueue<U: Ord>(\n    limit: usize,\n    offset: usize,\n    pqueue: StableUniquePrioContainerMax<U>,\n) -> Vec<U> {\n    let len = pqueue.len();\n\n    let take = (len.saturating_sub(offset)).min(limit);\n    let to_skip = len.saturating_sub(offset + take);\n\n    let mut o: Vec<_> = pqueue.into_iter().skip(to_skip).take(take).collect();\n    o.reverse();\n    o\n}\n\n/// Takes the correct \"limit\" elements form a from a UniquePrioContainerMax at \"offset\"\npub fn page_from_pqueue_with_max_dist<I: PartialEq>(\n    limit: usize,\n    offset: usize,\n    max_dist: f32,\n    max: f32,\n    pqueue: StableUniquePrioContainerMax<RelItem<I>>,\n) -> Vec<RelItem<I>> {\n    let peeked = pqueue.peek();\n\n    if peeked.is_none() {\n        return vec![];\n    }\n\n    let len = pqueue.len();\n\n    let take = (len.saturating_sub(offset)).min(limit);\n    let to_skip = len.saturating_sub(offset + take);\n\n    let mut o: Vec<_> = pqueue\n        .into_iter()\n        .filter(|i| i.relevance + max_dist >= max || max_dist == 0.0)\n        .skip(to_skip)\n        .take(take)\n        .collect();\n    o.reverse();\n    o\n}\n"
  },
  {
    "path": "lib/error/Cargo.toml",
    "content": "[package]\nname = \"error\"\nversion = \"0.1.0\"\nauthors = [\"jojii <jojii@gmx.net>\"]\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\nactix-web = { version = \"4.3.1\", optional = true }\nserde = \"1.0.171\"\nstrum = \"0.25.0\"\nthiserror = \"1.0.43\"\n\n[features]\ndefault = []\nweb_error = [\"actix-web\"]\n"
  },
  {
    "path": "lib/error/src/api_error.rs",
    "content": "#![allow(dead_code, unreachable_patterns)]\n\nuse actix_web::{error::BlockingError, http::StatusCode, HttpResponse, ResponseError};\nuse serde::Serialize;\nuse thiserror::Error;\n\n#[derive(Clone, Copy, PartialEq)]\npub enum Origin {\n    Radicals,\n    Suggestions,\n    File,\n}\n\nimpl std::fmt::Debug for Origin {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        write!(\n            f,\n            \"{}\",\n            match self {\n                Origin::Radicals => \"radicals\",\n                Origin::Suggestions => \"suggestions\",\n                Origin::File => \"file\",\n            }\n        )\n    }\n}\n\n#[derive(Error, Debug, Clone, Copy, PartialEq)]\npub enum RestError {\n    #[error(\"Not found\")]\n    NotFound,\n\n    #[error(\"Bad request\")]\n    BadRequest,\n\n    #[error(\"Internal server error\")]\n    Internal,\n\n    #[error(\"Timeout exceeded\")]\n    Timeout,\n\n    #[error(\"IO error\")]\n    IoError,\n\n    #[error(\"Format not supported\")]\n    FormatNotSupported,\n\n    #[error(\"No text found\")]\n    NoTextFound,\n\n    #[error(\"missing {0:?}\")]\n    Missing(Origin),\n\n    #[error(\"Unauthorized\")]\n    Unauthorized,\n}\n\n/// Error response format. Used as json encoding structure\n#[derive(Serialize)]\nstruct ErrorResponse {\n    code: u16,\n    error: String,\n    message: String,\n}\n\nimpl RestError {\n    pub fn name(&self) -> String {\n        match self {\n            Self::NotFound => \"NotFound\".to_string(),\n            Self::BadRequest => \"BadRequest\".to_string(),\n            Self::Internal => \"InternalError\".to_string(),\n            Self::Timeout => \"Timeout\".to_string(),\n            Self::IoError => \"IoError\".to_string(),\n            Self::NoTextFound => \"NoTextFound\".to_string(),\n            Self::FormatNotSupported => \"FormatNotSupported\".to_string(),\n            Self::Unauthorized => \"Unauthtorized\".to_string(),\n            _ => \"InternalError\".to_string(),\n        }\n    }\n}\n\n/// Implement ResponseError trait. Required for actix web\nimpl ResponseError for RestError {\n    fn status_code(&self) -> StatusCode {\n        match *self {\n            Self::NotFound => StatusCode::NOT_FOUND,\n            Self::BadRequest => StatusCode::BAD_REQUEST,\n            Self::Internal => StatusCode::INTERNAL_SERVER_ERROR,\n            Self::Timeout => StatusCode::REQUEST_TIMEOUT,\n            Self::FormatNotSupported => StatusCode::BAD_REQUEST,\n            Self::NoTextFound => StatusCode::SEE_OTHER,\n            Self::Unauthorized => StatusCode::UNAUTHORIZED,\n            _ => StatusCode::INTERNAL_SERVER_ERROR,\n        }\n    }\n\n    fn error_response(&self) -> HttpResponse {\n        let status_code = self.status_code();\n        let error_response = ErrorResponse {\n            code: status_code.as_u16(),\n            message: self.to_string(),\n            error: self.name(),\n        };\n        HttpResponse::build(status_code).json(error_response)\n    }\n}\n\nimpl From<super::Error> for RestError {\n    #[inline]\n    fn from(err: super::Error) -> Self {\n        eprintln!(\"Error: {:?}\", err);\n        match err {\n            crate::Error::NotFound => Self::NotFound,\n            _ => Self::Internal,\n        }\n    }\n}\n\nimpl From<std::io::Error> for RestError {\n    #[inline]\n    fn from(_: std::io::Error) -> Self {\n        Self::IoError\n    }\n}\n\nimpl From<BlockingError> for RestError {\n    #[inline]\n    fn from(_: BlockingError) -> Self {\n        Self::Internal\n    }\n}\n"
  },
  {
    "path": "lib/error/src/lib.rs",
    "content": "#[cfg(feature = \"web_error\")]\npub mod api_error;\n\nuse std::{fmt::Display, num::ParseIntError, string::FromUtf8Error};\nuse strum::ParseError;\n\n#[derive(Debug)]\npub enum Error {\n    NotFound,\n    ParseInt(ParseIntError),\n    Utf8Error(FromUtf8Error),\n    Utf8StrError(std::str::Utf8Error),\n    ParseError,\n    Undefined,\n    IoError(std::io::Error),\n    Unexpected,\n}\n\nimpl From<std::io::Error> for Error {\n    fn from(err: std::io::Error) -> Self {\n        Self::IoError(err)\n    }\n}\n\nimpl From<FromUtf8Error> for Error {\n    fn from(err: FromUtf8Error) -> Self {\n        Self::Utf8Error(err)\n    }\n}\n\nimpl From<ParseError> for Error {\n    fn from(err: ParseError) -> Self {\n        match err {\n            ParseError::VariantNotFound => Self::ParseError,\n        }\n    }\n}\n\nimpl From<ParseIntError> for Error {\n    fn from(err: ParseIntError) -> Self {\n        Self::ParseInt(err)\n    }\n}\n\nimpl From<std::str::Utf8Error> for Error {\n    fn from(err: std::str::Utf8Error) -> Self {\n        Self::Utf8StrError(err)\n    }\n}\n\nimpl Display for Error {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        write!(f, \"{:?}\", self)\n    }\n}\n\nimpl std::error::Error for Error {}\n"
  },
  {
    "path": "lib/frontend/Cargo.toml",
    "content": "[package]\nname = \"frontend\"\nversion = \"0.1.0\"\nauthors = [\"jojii <jojii@gmx.net>\"]\nedition = \"2021\"\nbuild = \"src/build.rs\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\njapanese = { path = \"../japanese\" }\nnews = { path = \"../news\"}\nsearch = { path = \"../search\" }\nerror = { path = \"../error\" }\nutils = { path = \"../utils\" }\nconfig = { path = \"../config\" }\nlocalization = { path = \"../localization\" }\nresources = { path = \"../resources\"}\ntypes = { path = \"../types\" , features = [\"jotoba_intern\"]}\nactix-web = \"4.3.1\"\nserde = \"1.0.171\"\nsentry = { version = \"0.31.5\", optional = true }\nlog = \"0.4.19\"\npercent-encoding = \"2.3.0\"\nitertools = \"0.11.0\"\njp_utils = { git = \"https://github.com/JojiiOfficial/jp_utils\", features = [\"furigana\"] }\n\n[dev-dependencies]\nructe = \"0.15.0\"\n\n[build-dependencies]\nructe = \"0.15.0\"\n\n[features]\nsentry_error = [\"sentry\"]\n"
  },
  {
    "path": "lib/frontend/src/about.rs",
    "content": "use std::sync::Arc;\n\n//use actix_session::Session;\nuse actix_web::{web, HttpRequest, HttpResponse};\nuse config::Config;\nuse localization::TranslationDict;\n\nuse crate::{\n    templates, user_settings, {BaseData, Site},\n};\n\n/// About page\npub async fn about(\n    locale_dict: web::Data<Arc<TranslationDict>>,\n    config: web::Data<Config>,\n    request: HttpRequest,\n) -> Result<HttpResponse, actix_web::Error> {\n    let settings = user_settings::parse(&request);\n\n    //session::init(&session, &settings);\n\n    Ok(HttpResponse::Ok().body(\n        render!(\n            templates::base,\n            BaseData::new(&locale_dict, settings, &config.asset_hash, &config)\n                .with_site(Site::About)\n        )\n        .render(),\n    ))\n}\n"
  },
  {
    "path": "lib/frontend/src/actix_ructe.rs",
    "content": "macro_rules! render {\n    ($template:path) => (super::actix_ructe::Render(|o| $template(o)));\n    ($template:path, $($arg:expr),*) => {{\n        use super::actix_ructe::Render;\n        Render(|o| $template(o, $($arg),*))\n    }};\n    ($template:path, $($arg:expr),* ,) => {{\n        use super::actix_ructe::Render;\n        Render(|o| $template(o, $($arg),*))\n    }};\n}\n\npub struct Render<T: FnOnce(&mut Vec<u8>) -> std::io::Result<()>>(pub T);\n\nimpl<T: FnOnce(&mut Vec<u8>) -> std::io::Result<()>> Render<T> {\n    pub fn render(self) -> Vec<u8> {\n        let mut bytes = Vec::new();\n        self.0(&mut bytes).unwrap();\n        bytes\n    }\n}\n"
  },
  {
    "path": "lib/frontend/src/build.rs",
    "content": "use ructe::{Result, Ructe};\n\nfn main() -> Result<()> {\n    let mut ructe = Ructe::from_env()?;\n    ructe.compile_templates(\"templates\")\n}\n"
  },
  {
    "path": "lib/frontend/src/direct.rs",
    "content": "use std::sync::Arc;\n\nuse actix_web::{web, HttpRequest, HttpResponse};\nuse config::Config;\nuse localization::TranslationDict;\nuse search::{\n    query::{Query, UserSettings},\n    sentence,\n    word::result::AddResData,\n};\nuse types::jotoba::{\n    search::SearchTarget,\n    words::{filter_languages, Word},\n};\n\nuse crate::{\n    og_tags::{self, TagKeyName},\n    search_ep::redirect_home,\n    templates, user_settings,\n    web_error::{self, Error},\n    BaseData, ResultData, SearchResult,\n};\n\n/// Endpoint to perform a search\npub async fn direct_ep(\n    h_query: web::Path<(u8, String)>,\n    locale_dict: web::Data<Arc<TranslationDict>>,\n    config: web::Data<Config>,\n    request: HttpRequest,\n) -> Result<HttpResponse, Error> {\n    let settings = user_settings::parse(&request);\n\n    let (stype, id) = h_query.into_inner();\n    let query_type = SearchTarget::try_from(stype).map_err(|_| Error::BadRequest)?;\n\n    let result_data = match query_type {\n        SearchTarget::Words => find_direct_word(&id, &settings).await,\n        SearchTarget::Names => find_direct_name(&id).await,\n        SearchTarget::Sentences => find_direct_sentence(&id, &settings).await,\n        SearchTarget::Kanji => return Ok(redirect_home()),\n    };\n\n    if let Err(err) = result_data {\n        return match err {\n            web_error::Error::NotFound => Err(err),\n            _ => Ok(redirect_home()),\n        };\n    }\n\n    let query = Query::default();\n    let mut base_data = BaseData::new(&locale_dict, settings, &config.asset_hash, &config)\n        .with_search_result(&query, result_data.unwrap(), None);\n\n    set_og_tag(&mut base_data, query_type);\n\n    Ok(HttpResponse::Ok().body(render!(templates::base, base_data).render()))\n}\n\nfn set_og_tag(base_data: &mut BaseData, query_type: SearchTarget) {\n    let search_result = base_data.site.as_search_result().unwrap();\n    let mut search_res_og = og_tags::TagSet::with_capacity(5);\n\n    let title = match query_type {\n        SearchTarget::Kanji => return,\n        SearchTarget::Sentences => \"Jotoba sentence\".to_string(),\n        SearchTarget::Names => format!(\"{} - Jotoba name\", search_res_val(&search_result).unwrap()),\n        SearchTarget::Words => format!(\"{} - Jotoba word\", search_res_val(&search_result).unwrap()),\n    };\n\n    let descrption = \"Jotoba entry. See more...\";\n\n    search_res_og.add_og(TagKeyName::Title, &title);\n    search_res_og.add_twitter(TagKeyName::Title, &title);\n    search_res_og.add_og(TagKeyName::Description, descrption);\n    search_res_og.add_twitter(TagKeyName::Description, descrption);\n    search_res_og.add_twitter(TagKeyName::Card, \"summary\");\n\n    base_data.set_og_tags(search_res_og);\n}\n\nfn search_res_val(res: &SearchResult) -> Option<String> {\n    Some(match &res.result {\n        ResultData::Word(w) => w.items[0].get_reading().reading.clone(),\n        ResultData::Name(n) => n[0].kanji.as_ref().unwrap_or(&n[0].kana).to_string(),\n        _ => return None,\n    })\n}\n\n/// Find direct word\npub async fn find_direct_word(id: &str, settings: &UserSettings) -> Result<ResultData, Error> {\n    let sequence_id: u32 = id.parse().map_err(|_| Error::NotFound)?;\n\n    let res_name = resources::get()\n        .words()\n        .by_sequence(sequence_id)\n        .ok_or(web_error::Error::NotFound)?\n        .clone();\n\n    let mut results = vec![res_name];\n\n    // also show enlgish if otherwise no results would be shown due users settings\n    let show_english = !results[0].has_language(settings.user_lang) || settings.show_english;\n    filter_languages(results.iter_mut(), (settings.user_lang, show_english));\n\n    let word = results.remove(0);\n\n    Ok(ResultData::Word(\n        search::executor::search_result::SearchResult::<Word, AddResData>::with_other_default(\n            vec![word],\n            1,\n        ),\n    ))\n    /*\n    Ok(ResultData::Word(WordResult {\n        items,\n        count: 1,\n        contains_kanji,\n        inflection_info: None,\n        sentence_parts: None,\n        sentence_index: 0,\n        searched_query: String::new(),\n    }))*/\n}\n\n/// Find direct name\npub async fn find_direct_name(id: &str) -> Result<ResultData, Error> {\n    let sequence_id: u32 = id.parse().map_err(|_| Error::NotFound)?;\n\n    let res_word = resources::get()\n        .names()\n        .by_sequence(sequence_id)\n        .ok_or(web_error::Error::NotFound)?;\n\n    Ok(ResultData::Name(vec![res_word]))\n}\n\n/// Find direct sentence\npub async fn find_direct_sentence(id: &str, settings: &UserSettings) -> Result<ResultData, Error> {\n    let sequence_id: u32 = id.parse().map_err(|_| Error::NotFound)?;\n\n    let res_sentence = resources::get()\n        .sentences()\n        .by_id(sequence_id)\n        .ok_or(web_error::Error::NotFound)?;\n\n    let res_sentence =\n        sentence::result::Sentence::from_m_sentence(res_sentence, (settings.user_lang, true))\n            .unwrap();\n\n    use search::executor::search_result::SearchResult as SearchResult2;\n    Ok(ResultData::Sentence(SearchResult2 {\n        items: vec![res_sentence],\n        total: 1,\n        other_data: sentence::result::ResData::new(false),\n    }))\n}\n"
  },
  {
    "path": "lib/frontend/src/help_page.rs",
    "content": "use std::sync::Arc;\n\n//use actix_session::Session;\nuse actix_web::{web, HttpRequest, HttpResponse};\nuse config::Config;\nuse localization::TranslationDict;\n\nuse crate::{\n    templates, user_settings, {BaseData, Site},\n};\n\n/// About page\npub async fn help(\n    locale_dict: web::Data<Arc<TranslationDict>>,\n    request: HttpRequest,\n    config: web::Data<Config>,\n) -> Result<HttpResponse, actix_web::Error> {\n    let settings = user_settings::parse(&request);\n\n    //session::init(&session, &settings);\n\n    Ok(HttpResponse::Ok().body(\n        render!(\n            templates::base,\n            BaseData::new(&locale_dict, settings, &config.asset_hash, &config)\n                .with_site(Site::InfoPage)\n        )\n        .render(),\n    ))\n}\n"
  },
  {
    "path": "lib/frontend/src/index.rs",
    "content": "use std::sync::Arc;\n\n//use actix_session::Session;\nuse actix_web::{web, HttpRequest, HttpResponse};\nuse config::Config;\nuse localization::TranslationDict;\n\nuse crate::{\n    templates, user_settings, {BaseData, Site},\n};\n\n/// Homepage\npub async fn index(\n    locale_dict: web::Data<Arc<TranslationDict>>,\n    request: HttpRequest,\n    config: web::Data<Config>,\n) -> Result<HttpResponse, actix_web::Error> {\n    let settings = user_settings::parse(&request);\n\n    //session::init(&session, &settings);\n\n    Ok(HttpResponse::Ok().body(\n        render!(\n            templates::base_index,\n            BaseData::new(&locale_dict, settings, &config.asset_hash, &config)\n                .with_site(Site::Index)\n        )\n        .render(),\n    ))\n}\n"
  },
  {
    "path": "lib/frontend/src/lib.rs",
    "content": "include!(concat!(env!(\"OUT_DIR\"), \"/templates.rs\"));\n\n#[macro_use]\nmod actix_ructe;\n\npub mod about;\npub mod direct;\npub mod help_page;\npub mod index;\npub mod liveness;\npub mod news_ep;\npub mod og_tags;\npub mod search_ep;\n//pub mod search_help;\nmod session;\npub mod templ_utils;\npub mod unescaped;\nmod url_query;\npub mod user_settings;\npub mod web_error;\n\nuse std::fmt::Display;\n\nuse config::Config;\nuse localization::{\n    language::Language,\n    traits::{Translatable, TranslatablePlural},\n    TranslationDict,\n};\nuse news::NewsEntry;\nuse og_tags::TagKeyName;\nuse search::{executor::search_result::SearchResult as SearchResult2, query::Query};\n\nuse search::{kanji::result::Item as KanjiItem, query::UserSettings};\nuse types::jotoba::{\n    names::Name,\n    pagination::Pagination,\n    search::{help::SearchHelp, SearchTarget},\n    words::Word,\n};\nuse unescaped::{UnescapedStr, UnescapedString};\n\n/// Data for the base template\npub struct BaseData<'a> {\n    pub site: Site<'a>,\n    pub dict: &'a TranslationDict,\n    pub user_settings: UserSettings,\n    pub pagination: Option<Pagination>,\n    pub asset_hash: &'a str,\n    pub config: &'a Config,\n    pub og_tags: Option<og_tags::TagSet>,\n}\n\n/// The site to display\n#[derive(Clone)]\npub enum Site<'a> {\n    SearchResult(SearchResult<'a>),\n    Index,\n    About,\n    InfoPage,\n    News(Vec<NewsEntry>),\n}\n\n/// Search result data. Required by individual templates to render the result items\n#[derive(Clone, Debug)]\npub struct SearchResult<'a> {\n    pub query: &'a Query,\n    pub result: ResultData,\n    pub search_help: Option<SearchHelp>,\n}\n\n/// The particular search result items\n#[derive(Clone, Debug)]\npub enum ResultData {\n    Word(SearchResult2<Word, search::word::result::AddResData>),\n    KanjiInfo(Vec<KanjiItem>),\n    Name(Vec<&'static Name>),\n    Sentence(SearchResult2<search::sentence::result::Sentence, search::sentence::result::ResData>),\n}\n\nimpl<'a> BaseData<'a> {\n    #[inline]\n    pub fn new(\n        dict: &'a TranslationDict,\n        user_settings: UserSettings,\n        asset_hash: &'a str,\n        config: &'a Config,\n    ) -> Self {\n        Self {\n            site: Site::Index,\n            dict,\n            user_settings,\n            pagination: None,\n            asset_hash,\n            config,\n            og_tags: None,\n        }\n    }\n\n    #[inline]\n    pub fn with_site(mut self, site: Site<'a>) -> Self {\n        self.site = site;\n        self\n    }\n\n    #[inline]\n    pub fn with_cust_pages(\n        &mut self,\n        items: u32,\n        curr_page: u32,\n        items_per_page: u32,\n        max_pages: u32,\n    ) {\n        let mut pagination = Pagination {\n            items,\n            curr_page,\n            items_per_page,\n            max_pages,\n        };\n\n        // Don't show paginator if there is only one or no page\n        if pagination.get_last() <= 1 {\n            return;\n        }\n\n        if curr_page > pagination.get_last() {\n            pagination.curr_page = pagination.get_last();\n        }\n\n        self.pagination = Some(pagination);\n    }\n\n    #[inline]\n    pub fn with_pages(&mut self, items: u32, curr_page: u32) {\n        self.with_cust_pages(items, curr_page, self.user_settings.page_size, 100);\n    }\n\n    #[inline]\n    pub fn get_search_help(&self) -> Option<&SearchHelp> {\n        let help = self.site.as_search_result()?.search_help.as_ref()?;\n        (!help.is_empty()).then(|| help)\n    }\n\n    #[inline]\n    pub fn get_search_site_id(&self) -> u8 {\n        if let Site::SearchResult(ref res) = self.site {\n            return match res.result {\n                ResultData::Word(_) => 0,\n                ResultData::KanjiInfo(_) => 1,\n                ResultData::Sentence(_) => 2,\n                ResultData::Name(_) => 3,\n            };\n        }\n\n        0\n    }\n\n    #[inline]\n    pub fn get_search_site_name(&self) -> &str {\n        if let Site::SearchResult(ref res) = self.site {\n            return match res.result {\n                ResultData::Word(_) => self.gettext(\"Words\").as_str(),\n                ResultData::KanjiInfo(_) => self.gettext(\"Kanji\").as_str(),\n                ResultData::Sentence(_) => self.gettext(\"Sentences\").as_str(),\n                ResultData::Name(_) => self.gettext(\"Names\").as_str(),\n            };\n        }\n\n        self.gettext(\"Words\").as_str()\n    }\n\n    #[inline]\n    pub fn with_search_result(\n        self,\n        query: &'a Query,\n        result: ResultData,\n        search_help: Option<SearchHelp>,\n    ) -> Self {\n        let search_result = SearchResult {\n            query,\n            result,\n            search_help,\n        };\n        self.with_site(Site::SearchResult(search_result))\n    }\n\n    /// Gets an owned String of the query\n    pub fn get_query_str(&self) -> String {\n        let query = match &self.site {\n            Site::SearchResult(search_result) => {\n                Some(search_result.query.without_search_type_tags())\n            }\n            _ => None,\n        }\n        .unwrap_or_default();\n        query\n    }\n\n    /// Return a string 'selected' if the query_type in qs is equal to i\n    pub fn sel_str(&self, i: SearchTarget) -> &'static str {\n        let is_selected = match &self.site {\n            Site::SearchResult(search_result) => search_result.query.target == i,\n            _ => false,\n        };\n\n        if is_selected {\n            \"selected\"\n        } else {\n            \"\"\n        }\n    }\n\n    /// Returns true if the kanji compounds should be collapsed by default\n    pub fn kanji_copounds_collapsed(&self) -> bool {\n        self.pagination.as_ref().map(|i| i.get_last()).unwrap_or(0) > 1\n    }\n\n    /// Sets og tags which will overwrite the site-defaults if existing\n    pub fn set_og_tags(&mut self, tags: og_tags::TagSet) {\n        self.og_tags = Some(tags);\n    }\n\n    /// returns OG Tags\n    pub fn get_og_tags(&self) -> Option<og_tags::TagSet> {\n        if let Some(override_tags) = &self.og_tags {\n            return Some(override_tags.clone());\n        }\n        self.site.og_tags()\n    }\n\n    #[inline]\n    pub fn assets_path(&self) -> &str {\n        self.config.server.get_html_files()\n    }\n}\n\nimpl<'a> Site<'a> {\n    #[inline]\n    pub fn as_search_result(&self) -> Option<&SearchResult<'a>> {\n        if let Self::SearchResult(v) = self {\n            Some(v)\n        } else {\n            None\n        }\n    }\n\n    /// Returns proper OG tags for the current site\n    pub fn og_tags(&self) -> Option<og_tags::TagSet> {\n        Some(match self {\n            Site::SearchResult(rs) => rs.og_tags(),\n            _ => default_og_tags(),\n        })\n    }\n}\n\nimpl ResultData {\n    /// Returns `true` if the ResultData does not contain any items\n    #[inline]\n    pub fn is_empty(&self) -> bool {\n        match self {\n            ResultData::Word(w) => w.items.is_empty() && w.sentence.is_none(),\n            ResultData::KanjiInfo(k) => k.is_empty(),\n            ResultData::Name(n) => n.is_empty(),\n            ResultData::Sentence(s) => s.items.is_empty(),\n        }\n    }\n}\n\nimpl<'a> SearchResult<'a> {\n    pub fn og_tags(&self) -> og_tags::TagSet {\n        let mut tags = og_tags::TagSet::with_capacity(5);\n\n        let search_type_name = self.search_type_ogg();\n        let query = &self.query.query_str;\n        let title = format!(\"Jotoba {search_type_name} search result for '{query}'\");\n        let description = self.og_tag_description();\n\n        tags.add_og(TagKeyName::Title, &title);\n        tags.add_og(TagKeyName::Description, &description);\n        tags.add_twitter(TagKeyName::Title, &title);\n        tags.add_twitter(TagKeyName::Description, &description);\n        tags.add_twitter(TagKeyName::Card, \"summary\");\n\n        tags\n    }\n\n    pub(crate) fn og_tag_description(&self) -> String {\n        format!(\"{} results. See more...\", self.result_count())\n    }\n\n    pub(crate) fn search_type_ogg(&self) -> &'static str {\n        match self.result {\n            ResultData::Word(_) => \"words\",\n            ResultData::KanjiInfo(_) => \"kanji\",\n            ResultData::Sentence(_) => \"sentences\",\n            ResultData::Name(_) => \"names\",\n        }\n    }\n\n    fn result_count(&self) -> usize {\n        match &self.result {\n            ResultData::Word(w) => w.items.len(),\n            ResultData::KanjiInfo(k) => k.len(),\n            ResultData::Name(n) => n.len(),\n            ResultData::Sentence(s) => s.items.len(),\n        }\n    }\n}\n\nfn default_og_tags() -> og_tags::TagSet {\n    let mut tags = og_tags::TagSet::new();\n    let description =  \"A powerful and free Japanese dictionary supporting words, kanji, sentences, and many different languages.\";\n\n    tags.add_og(TagKeyName::Title, \"Jotoba\");\n    tags.add_og(TagKeyName::Description, description);\n    tags.add_og(TagKeyName::URL, \"https://jotoba.de\");\n\n    tags.add_twitter(TagKeyName::Title, \"Jotoba\");\n    tags.add_twitter(TagKeyName::Description, description);\n\n    tags\n}\n\n/// Translation helper\nimpl<'a> BaseData<'a> {\n    #[inline]\n    pub fn get_lang(&self) -> Language {\n        self.user_settings.page_lang\n    }\n\n    #[inline]\n    pub fn gettext<T: Translatable>(&self, t: T) -> UnescapedStr<'a> {\n        t.gettext(&self.dict, Some(self.get_lang())).into()\n    }\n\n    #[inline]\n    pub fn gettext_custom<T: Translatable>(&self, t: T) -> UnescapedString {\n        t.gettext_custom(&self.dict, Some(self.get_lang())).into()\n    }\n\n    #[inline]\n    pub fn pgettext<T: Translatable>(&self, t: T, context: &'a str) -> UnescapedStr<'a> {\n        t.pgettext(&self.dict, context, Some(self.get_lang()))\n            .into()\n    }\n\n    #[inline]\n    pub fn ngettext<T: TranslatablePlural>(&self, t: T, n: u64) -> UnescapedStr<'a> {\n        t.ngettext(&self.dict, n, Some(self.get_lang())).into()\n    }\n\n    #[inline]\n    pub fn pngettext<T: TranslatablePlural>(\n        &self,\n        t: T,\n        context: &'a str,\n        n: u64,\n    ) -> UnescapedStr<'a> {\n        t.npgettext(&self.dict, context, n, Some(self.get_lang()))\n            .into()\n    }\n\n    // Format functions\n\n    #[inline]\n    pub fn gettext_fmt<T: Translatable, V: Display + Sized + Clone>(\n        &self,\n        t: T,\n        values: &[V],\n    ) -> UnescapedString {\n        t.gettext_fmt(&self.dict, values, Some(self.get_lang()))\n            .into()\n    }\n\n    #[inline]\n    pub fn pgettext_fmt<T: Translatable, V: Display + Sized + Clone>(\n        &self,\n        t: T,\n        context: &'a str,\n        values: &[V],\n    ) -> UnescapedString {\n        t.pgettext_fmt(&self.dict, context, values, Some(self.get_lang()))\n            .into()\n    }\n\n    #[inline]\n    pub fn ngettext_fmt<T: TranslatablePlural, V: Display + Sized + Clone>(\n        &self,\n        t: T,\n        n: u64,\n        values: &[V],\n    ) -> UnescapedString {\n        t.ngettext_fmt(&self.dict, n, values, Some(self.get_lang()))\n            .into()\n    }\n\n    #[inline]\n    pub fn pngettext_fmt<T: TranslatablePlural, V: Display + Sized + Clone>(\n        &self,\n        t: T,\n        context: &'a str,\n        n: u64,\n        values: &[V],\n    ) -> UnescapedString {\n        t.npgettext_fmt(&self.dict, context, n, values, Some(self.get_lang()))\n            .into()\n    }\n\n    #[inline]\n    pub fn gt_search_link<T: Translatable, V: Display + Sized + Clone>(\n        &self,\n        t: T,\n        value: V,\n    ) -> UnescapedString {\n        let link = format_search_link(value);\n        t.gettext_fmt(&self.dict, &[link], Some(self.get_lang()))\n            .into()\n    }\n\n    #[inline]\n    pub fn gt_search_links<T: Translatable, V: Display + Sized + Clone>(\n        &self,\n        t: T,\n        link: usize,\n        values: &[V],\n    ) -> UnescapedString {\n        let mut values = values.iter().map(|i| i.to_string()).collect::<Vec<_>>();\n        values[link] = format_search_link(&values[link]);\n        t.gettext_fmt(&self.dict, &values, Some(self.get_lang()))\n            .into()\n    }\n\n    #[inline]\n    pub fn ngt_search_links<T: TranslatablePlural, V: Display + Sized + Clone>(\n        &self,\n        t: T,\n        link: usize,\n        values: &[V],\n        n: u64,\n    ) -> UnescapedString {\n        let mut values = values.iter().map(|i| i.to_string()).collect::<Vec<_>>();\n        values[link] = format_search_link(&values[link]);\n        t.ngettext_fmt(&self.dict, n, &values, Some(self.get_lang()))\n            .into()\n    }\n}\n\nfn format_search_link<V: Display + Sized + Clone>(input: V) -> String {\n    format!(\n        \"<a class='clickable no-align green' href='/search/{}'>{}</a>\",\n        input, input\n    )\n}\n"
  },
  {
    "path": "lib/frontend/src/liveness.rs",
    "content": "use actix_web::HttpResponse;\n\npub async fn ready() -> HttpResponse {\n    HttpResponse::Ok().finish()\n}\n\npub async fn healthy() -> HttpResponse {\n    HttpResponse::Ok().finish()\n}\n"
  },
  {
    "path": "lib/frontend/src/news_ep.rs",
    "content": "use std::sync::Arc;\n\n//use actix_session::Session;\nuse actix_web::{web, HttpRequest, HttpResponse};\nuse config::Config;\nuse localization::TranslationDict;\n\nuse crate::{\n    templates, user_settings, {BaseData, Site},\n};\n\n/// News page\npub async fn news(\n    locale_dict: web::Data<Arc<TranslationDict>>,\n    config: web::Data<Config>,\n    request: HttpRequest,\n) -> Result<HttpResponse, actix_web::Error> {\n    let settings = user_settings::parse(&request);\n\n    //session::init(&session, &settings);\n\n    let news = news::get().last_entries(5).cloned().collect::<Vec<_>>();\n\n    Ok(HttpResponse::Ok().body(\n        render!(\n            templates::base,\n            BaseData::new(&locale_dict, settings, &config.asset_hash, &config)\n                .with_site(Site::News(news))\n        )\n        .render(),\n    ))\n}\n"
  },
  {
    "path": "lib/frontend/src/og_tags.rs",
    "content": "use crate::unescaped::UnescapedString;\nuse itertools::Itertools;\n\n/// Set of tags which can be rendered as HTML\n#[derive(Clone)]\npub struct TagSet {\n    tags: Vec<Tag>,\n}\n\n#[derive(Clone, PartialEq)]\npub struct Tag {\n    pub key: TagKey,\n    pub value: String,\n}\n\n#[derive(Clone, Copy, PartialEq)]\npub enum TagKey {\n    Og(TagKeyName),\n    Twitter(TagKeyName),\n}\n\n#[derive(Clone, Copy, PartialEq)]\npub enum TagKeyName {\n    Title,\n    Type,\n    Description,\n    URL,\n    Card,\n}\n\nimpl TagSet {\n    /// Creates a new empty tag set\n    #[inline]\n    pub(crate) fn new() -> Self {\n        TagSet { tags: vec![] }\n    }\n\n    /// Creates a new empty tag set with n capacity\n    #[inline]\n    pub(crate) fn with_capacity(cap: usize) -> Self {\n        TagSet {\n            tags: Vec::with_capacity(cap),\n        }\n    }\n\n    /// Adds a new og tag to the `TagSet`\n    #[inline]\n    pub fn add_og<S: AsRef<str>>(&mut self, key: TagKeyName, value: S) {\n        let key = TagKey::Og(key);\n        self.add(Tag::new(key, value))\n    }\n\n    /// Adds a new twitter tag to the `TagSet`\n    #[inline]\n    pub fn add_twitter<S: AsRef<str>>(&mut self, key: TagKeyName, value: S) {\n        let key = TagKey::Twitter(key);\n        self.add(Tag::new(key, value))\n    }\n\n    /// Adds a tag to the `TagSet`\n    #[inline]\n    pub fn add(&mut self, tag: Tag) {\n        self.tags.push(tag);\n    }\n\n    /// Sets the value of an og tag. Returns `None` if no og tag with `key` found\n    #[inline]\n    pub fn set_og_tag<S: AsRef<str>>(&mut self, key: TagKeyName, value: S) -> Option<()> {\n        self.set_tag(TagKey::Og(key), value)\n    }\n\n    /// Sets the value of a twitter tag. Returns `None` if no twitter tag with `key` found\n    #[inline]\n    pub fn set_twitter_tag<S: AsRef<str>>(&mut self, key: TagKeyName, value: S) -> Option<()> {\n        self.set_tag(TagKey::Twitter(key), value)\n    }\n\n    /// Sets the value of a tag. Returns `None` if no tag with `key` found\n    #[inline]\n    pub fn set_tag<S: AsRef<str>>(&mut self, key: TagKey, value: S) -> Option<()> {\n        self.tags.iter_mut().find(|i| i.key == key)?.value = value.as_ref().to_string();\n        Some(())\n    }\n\n    /// Render the `TagSet`\n    #[inline]\n    pub fn render(&self) -> String {\n        self.tags.iter().map(|i| i.render()).join(\"\\n\\t\")\n    }\n\n    /// Render the `TagSet` unescaped (for use in HTML)\n    #[inline]\n    pub fn render_unescaped(&self) -> UnescapedString {\n        self.render().into()\n    }\n}\n\nimpl Tag {\n    /// Creates a new tag\n    pub fn new<S: AsRef<str>>(key: TagKey, value: S) -> Self {\n        let value = value.as_ref().trim().to_string();\n        Self { key, value }\n    }\n\n    /// Renders a single tag to HTML\n    #[inline]\n    pub fn render(&self) -> String {\n        let key_attr = match self.key {\n            TagKey::Og(og) => format!(\"property=\\\"og:{}\\\"\", og.as_ref()),\n            TagKey::Twitter(twitter) => format!(\"property=\\\"twitter:{}\\\"\", twitter.as_ref()),\n        };\n        format!(\"<meta {key_attr} content=\\\"{}\\\"/>\", self.value)\n    }\n}\n\nimpl TagKey {\n    /// Create a new og tag key\n    #[inline]\n    pub fn new_og(tag_name: TagKeyName) -> Self {\n        TagKey::Og(tag_name)\n    }\n\n    /// Create a new twitter key\n    #[inline]\n    pub fn new_twitter(tag_name: TagKeyName) -> Self {\n        TagKey::Og(tag_name)\n    }\n}\n\nimpl AsRef<str> for TagKeyName {\n    #[inline]\n    fn as_ref(&self) -> &str {\n        match self {\n            TagKeyName::Title => \"title\",\n            TagKeyName::Type => \"type\",\n            TagKeyName::Description => \"description\",\n            TagKeyName::URL => \"url\",\n            TagKeyName::Card => \"card\",\n        }\n    }\n}\n"
  },
  {
    "path": "lib/frontend/src/search_ep.rs",
    "content": "use super::user_settings;\nuse super::web_error;\nuse crate::{\n    templates,\n    url_query::{NoJSQueryStruct, QueryStruct},\n    BaseData, ResultData,\n};\nuse actix_web::{web, HttpRequest, HttpResponse};\nuse config::Config;\nuse localization::TranslationDict;\nuse percent_encoding::percent_decode;\nuse search::SearchExecutor;\nuse search::{\n    self,\n    query::{Query, UserSettings},\n};\nuse std::{sync::Arc, time::Instant};\nuse types::jotoba::search::help::SearchHelp;\nuse types::jotoba::search::SearchTarget;\n\n/// Endpoint to perform a search\npub async fn search_ep_no_js(\n    query_data: web::Query<NoJSQueryStruct>,\n    locale_dict: web::Data<Arc<TranslationDict>>,\n    config: web::Data<Config>,\n    request: HttpRequest,\n) -> Result<HttpResponse, web_error::Error> {\n    let (query_data, query) = query_data.0.to_query_struct();\n    search(query, query_data, locale_dict, config, request).await\n}\n\n/// Endpoint to perform a search\npub async fn search_ep(\n    query: web::Path<String>,\n    query_data: web::Query<QueryStruct>,\n    locale_dict: web::Data<Arc<TranslationDict>>,\n    config: web::Data<Config>,\n    request: HttpRequest,\n) -> Result<HttpResponse, web_error::Error> {\n    let query = percent_decode(query.as_bytes()).decode_utf8()?.to_string();\n    search(query, query_data.0, locale_dict, config, request).await\n}\n\nasync fn search(\n    query: String,\n    query_data: QueryStruct,\n    locale_dict: web::Data<Arc<TranslationDict>>,\n    config: web::Data<Config>,\n    request: HttpRequest,\n) -> Result<HttpResponse, web_error::Error> {\n    let settings = user_settings::parse(&request);\n\n    // Parse query and redirect to home on error\n    let query = match query_data\n        .adjust(query.to_string())\n        .as_query_parser(settings)\n        .parse()\n    {\n        Some(k) => k,\n        None => return Ok(redirect_home()),\n    };\n\n    let start = Instant::now();\n\n    // Log search duration if too long and available\n    let search_result = do_search(query.target, &locale_dict, settings, &query, &config).await?;\n\n    log::debug!(\n        \"{:?} search for {:?} took {:?}\",\n        query.target,\n        query.query_str,\n        start.elapsed()\n    );\n\n    Ok(HttpResponse::Ok().body(render!(templates::base, search_result).render()))\n}\n\n/// Run the search and return the `BaseData` for the result page to render\nasync fn do_search<'a>(\n    querytype: SearchTarget,\n    locale_dict: &'a TranslationDict,\n    settings: UserSettings,\n    query: &'a Query,\n    config: &'a Config,\n) -> Result<BaseData<'a>, web_error::Error> {\n    let mut base_data = BaseData::new(locale_dict, settings, &config.asset_hash, &config);\n\n    let result_data = match querytype {\n        SearchTarget::Kanji => kanji_search(&mut base_data, &query).await,\n        SearchTarget::Sentences => sentence_search(&mut base_data, &query).await,\n        SearchTarget::Names => name_search(&mut base_data, &query).await,\n        SearchTarget::Words => word_search(&mut base_data, &query).await,\n    }?;\n\n    let mut search_help: Option<SearchHelp> = None;\n    if result_data.is_empty() {\n        let query = query.to_owned();\n        search_help = web::block(move || search::build_help(querytype, &query)).await?;\n    }\n\n    Ok(base_data.with_search_result(query, result_data, search_help))\n}\n\ntype SResult = Result<ResultData, web_error::Error>;\n\n/// Perform a sentence search\nasync fn sentence_search<'a>(base_data: &mut BaseData<'a>, query: &'a Query) -> SResult {\n    let q = query.to_owned();\n\n    //let result = web::block(move || search::sentence::Search::new(&q).search()).await??;\n    let result = web::block(move || {\n        let s = search::sentence::Search::new(&q);\n        search::SearchExecutor::new(s).run()\n    })\n    .await?;\n\n    base_data.with_pages(result.total as u32, query.page as u32);\n    Ok(ResultData::Sentence(result))\n}\n\n/// Perform a kanji search\nasync fn kanji_search<'a>(base_data: &mut BaseData<'a>, query: &'a Query) -> SResult {\n    let q = query.to_owned();\n    let result = web::block(move || search::kanji::search(&q)).await??;\n    base_data.with_cust_pages(\n        result.total_len as u32,\n        query.page as u32,\n        query.settings.page_size,\n        400,\n    );\n    Ok(ResultData::KanjiInfo(result.items))\n}\n\n/// Perform a name search\nasync fn name_search<'a>(base_data: &mut BaseData<'a>, query: &'a Query) -> SResult {\n    let q = query.to_owned();\n    let result = web::block(move || {\n        let search = search::name::Search::new(&q);\n        SearchExecutor::new(search).run()\n    })\n    .await?;\n\n    base_data.with_pages(result.total as u32, query.page as u32);\n    Ok(ResultData::Name(result.items))\n}\n\n/// Perform a word search\nasync fn word_search<'a>(base_data: &mut BaseData<'a>, query: &'a Query) -> SResult {\n    let q = query.to_owned();\n    let result = web::block(move || {\n        let search = search::word::Search::new(&q);\n        SearchExecutor::new(search).run()\n    })\n    .await?;\n\n    base_data.with_pages(result.total as u32, query.page as u32);\n    Ok(ResultData::Word(result))\n}\n\npub(crate) fn redirect_home() -> HttpResponse {\n    HttpResponse::MovedPermanently()\n        .append_header((\"Location\", \"/\"))\n        .finish()\n}\n\n/// Reports a search timeout to sentry\n#[cfg(feature = \"sentry_error\")]\nfn report_timeout(request: &HttpRequest, query: &Query) {\n    use sentry::{protocol::Event, Level};\n    let msg = format!(\"{:?}-search \\\"{}\\\" timed out\", query.type_, query.query);\n    sentry::capture_event(Event {\n        request: Some(sentry_request_from_http(request)),\n        level: Level::Error,\n        message: Some(msg),\n        ..Default::default()\n    });\n}\n\n/// Build a Sentry request struct from the HTTP request\n#[cfg(feature = \"sentry_error\")]\nfn sentry_request_from_http(request: &HttpRequest) -> sentry::protocol::Request {\n    use sentry::protocol::Request;\n\n    let sentry_req = Request {\n        url: format!(\n            \"{}://{}{}\",\n            request.connection_info().scheme(),\n            request.connection_info().host(),\n            request.uri()\n        )\n        .parse()\n        .ok(),\n        method: Some(request.method().to_string()),\n        headers: request\n            .headers()\n            .iter()\n            .map(|(k, v)| (k.to_string(), v.to_str().unwrap_or_default().to_string()))\n            .collect(),\n        ..Default::default()\n    };\n\n    sentry_req\n}\n\n#[cfg(feature = \"sentry_error\")]\nfn log_duration(search_type: SearchTarget, duration: Duration) {\n    sentry::capture_message(\n        format!(\"{:?}-search took: {:?}\", search_type, duration).as_str(),\n        sentry::Level::Warning,\n    );\n}\n"
  },
  {
    "path": "lib/frontend/src/search_help.rs",
    "content": "use types::jotoba::{\n    language::Language as ResLanguage,\n    search::{guess::Guess, QueryType},\n};\n\n/// Structure containing information for better search help in case no item was\n/// found in a search\n#[derive(Clone, Default, Debug)]\npub struct SearchHelp {\n    pub words: Option<Guess>,\n    pub names: Option<Guess>,\n    pub sentences: Option<Guess>,\n    pub kanji: Option<Guess>,\n    pub other_langs: Vec<ResLanguage>,\n}\n\nimpl SearchHelp {\n    /// Returns `true` if `SearchHelp` is not helpful at all (empty)\n    pub fn is_empty(&self) -> bool {\n        self.iter_items().next().is_none()\n    }\n\n    /// Returns an iterator over all (QueryType, Guess) pairs that have a value\n    pub fn iter_items(&self) -> impl Iterator<Item = (QueryType, Guess)> {\n        let types = &[\n            (self.words, QueryType::Words),\n            (self.names, QueryType::Names),\n            (self.sentences, QueryType::Sentences),\n            (self.kanji, QueryType::Kanji),\n        ];\n\n        types\n            .iter()\n            .filter_map(|i| i.0.is_some().then(|| (i.1, i.0.unwrap())))\n            .filter(|i| i.1.value != 0)\n            .collect::<Vec<_>>()\n            .into_iter()\n    }\n\n    pub fn iter_langs(&self) -> impl Iterator<Item = (ResLanguage, &'static str)> + '_ {\n        self.other_langs\n            .iter()\n            .map(|lang| (*lang, lang.to_query_format()))\n    }\n}\n"
  },
  {
    "path": "lib/frontend/src/session.rs",
    "content": "/*\nuse actix_session::Session;\nuse search::query::UserSettings;\n\n// Initializes the session. Returns a session id if user didn't opt out\npub(super) fn init(session: &Session, settings: &UserSettings) -> Option<String> {\n    None\n    // User opted out\n    if !settings.cookies_enabled {\n        session.purge();\n        return None;\n    }\n\n    // Reads or generates a new session id\n    let session_id = match session.get::<String>(\"id\").ok()? {\n        Some(v) => v,\n        None => {\n            let new_id = utils::rand_alpha_numeric(30);\n            session.set(\"id\", new_id.clone()).ok()?;\n            new_id\n        }\n    };\n\n    Some(session_id)\n}\n    */\n"
  },
  {
    "path": "lib/frontend/src/templ_utils.rs",
    "content": "use itertools::Itertools;\nuse jp_utils::furi::{parse::FuriParser, segment::SegmentRef};\nuse localization::{traits::Translatable, TranslationDict};\nuse search::executor::search_result::SearchResult;\nuse types::jotoba::{\n    kanji::Kanji,\n    language::{param::AsLangParam, Language},\n    names::Name,\n    words::{filter_languages, sense::Sense, Word},\n};\n\nuse crate::unescaped::UnescapedString;\n\n/// Returns a list of all collocations of a word\npub fn get_collocations(word: &Word, lang: impl AsLangParam) -> Vec<(String, String)> {\n    if !word.has_collocations() {\n        return vec![];\n    }\n\n    let word_storage = resources::get().words();\n\n    let mut words = word\n        .collocations\n        .as_ref()\n        .unwrap()\n        .iter()\n        .filter_map(|i| word_storage.by_sequence(*i))\n        .cloned()\n        .collect::<Vec<_>>();\n\n    filter_languages(words.iter_mut(), lang);\n\n    words\n        .into_iter()\n        .map(|word| {\n            let senses: Vec<String> = word\n                .get_senses_with_en()\n                .into_iter()\n                .flatten()\n                .take(5)\n                .map(|i| i.glosses)\n                .flatten()\n                .map(|i| i.gloss)\n                .collect();\n\n            let reading = word.reading.kanji.unwrap_or(word.reading.kana).reading;\n\n            (reading, senses.join(\", \"))\n        })\n        .collect()\n}\n\n#[inline]\npub fn unescaped_string<T: ToString>(s: T) -> UnescapedString {\n    UnescapedString::new(s)\n}\n\n/// Returns the transive version of `word`\n#[inline]\npub fn get_transitive_counterpart(word: &Word) -> Option<Word> {\n    let seq_id = word.transive_version.as_ref()?.get();\n    resources::get().words().by_sequence(seq_id).cloned()\n}\n\n/// Returns the intransive version of `word`\n#[inline]\npub fn get_intransitive_counterpart(word: &Word) -> Option<Word> {\n    let seq_id = word.intransive_version.as_ref()?.get();\n    resources::get().words().by_sequence(seq_id).cloned()\n}\n\n/// Returns an example sentences of a `sense` if existing.\n/// tries to use a sentence written in `language` or falls back to english\npub fn ext_sentence(\n    sense: &Sense,\n    language: &Language,\n) -> Option<(Vec<SegmentRef<'static>>, &'static str)> {\n    let sentence = resources::get()\n        .sentences()\n        .by_id(sense.example_sentence?)?;\n\n    let translation = sentence\n        .translation_for(*language)\n        .or_else(|| sentence.translation_for(Language::English))?;\n\n    // let furigana = furigana::parse::unchecked(&sentence.furigana);\n    // We check furigana at preprocessing so we can unwrap here.\n    let furigana = FuriParser::new(&sentence.furigana).to_vec().unwrap();\n    Some((furigana, translation))\n}\n\npub fn get_types_humanized(\n    name: &Name,\n    dict: &TranslationDict,\n    lang: localization::language::Language,\n) -> String {\n    if let Some(ref n_types) = name.name_type {\n        n_types\n            .iter()\n            .filter_map(|i| (!i.is_gender()).then(|| i.pgettext(dict, \"name_type\", Some(lang))))\n            .join(\", \")\n    } else {\n        String::from(\"\")\n    }\n}\n\npub fn word_kanji<O>(res: &SearchResult<Word, O>) -> Vec<Kanji> {\n    search::word::kanji::load_word_kanji_info(&res.items)\n}\n\npub fn has_kanji<O>(res: &SearchResult<Word, O>) -> bool {\n    !word_kanji(res).is_empty()\n}\n"
  },
  {
    "path": "lib/frontend/src/unescaped.rs",
    "content": "use std::{\n    fmt::Display,\n    io::{self, Write},\n};\n\nuse crate::templates::ToHtml;\n\n/// Unescaped owned String\npub type UnescapedString = Unescaped<String>;\n\n/// Unescaped &str\npub type UnescapedStr<'a> = Unescaped<&'a str>;\n\n/// Write something unescaped\npub struct Unescaped<T: Display>(T);\n\nimpl ToHtml for Unescaped<String> {\n    #[inline]\n    fn to_html(&self, out: &mut dyn Write) -> io::Result<()> {\n        write!(out, \"{}\", self.0)\n    }\n}\n\nimpl<'a> ToHtml for Unescaped<&'a str> {\n    #[inline]\n    fn to_html(&self, out: &mut dyn Write) -> io::Result<()> {\n        write!(out, \"{}\", self.0)\n    }\n}\n\nimpl<'a> From<&'a str> for UnescapedStr<'a> {\n    #[inline]\n    fn from(s: &'a str) -> Self {\n        Unescaped(s)\n    }\n}\n\nimpl From<String> for UnescapedString {\n    #[inline]\n    fn from(s: String) -> Self {\n        Unescaped(s)\n    }\n}\n\nimpl Unescaped<String> {\n    #[inline]\n    pub fn new<T: ToString>(t: T) -> Self {\n        Unescaped(t.to_string())\n    }\n}\n\nimpl<'a> Unescaped<&'a str> {\n    #[inline]\n    pub fn new(t: &'a str) -> Self {\n        Unescaped(t)\n    }\n}\n\nimpl<T: Display> ToString for Unescaped<T> {\n    #[inline]\n    fn to_string(&self) -> String {\n        self.0.to_string()\n    }\n}\n\nimpl<T: Display> Into<String> for Unescaped<T> {\n    #[inline]\n    fn into(self) -> String {\n        (&self).into()\n    }\n}\n\nimpl<T: Display> Into<String> for &Unescaped<T> {\n    #[inline]\n    fn into(self) -> String {\n        format!(\"{}\", self.0)\n    }\n}\n\nimpl<T: AsRef<str> + Display> AsRef<str> for Unescaped<T> {\n    #[inline]\n    fn as_ref(&self) -> &str {\n        self.0.as_ref()\n    }\n}\n\nimpl<'a> Unescaped<&'a str> {\n    /// Returns a string reference of the unescaped value\n    #[inline]\n    pub fn as_str(&self) -> &'a str {\n        self.0.as_ref()\n    }\n}\n\nimpl Unescaped<String> {\n    /// Returns a string reference of the unescaped value\n    #[inline]\n    pub fn as_str(&self) -> &str {\n        self.0.as_ref()\n    }\n}\n"
  },
  {
    "path": "lib/frontend/src/url_query.rs",
    "content": "use std::str::FromStr;\n\nuse search::{\n    self,\n    query::{parser::QueryParser, UserSettings},\n};\nuse serde::{Deserialize, Deserializer};\nuse types::jotoba::{language::Language, search::SearchTarget};\n\n#[derive(Deserialize)]\npub struct QueryStruct {\n    #[serde(rename = \"t\")]\n    pub search_type: Option<SearchTarget>,\n    #[serde(rename = \"i\")]\n    pub word_index: Option<usize>,\n    #[serde(rename = \"p\", default = \"default_page\")]\n    pub page: usize,\n\n    #[serde(default, rename = \"l\", deserialize_with = \"deserialize_lang\")]\n    pub lang_overwrite: Option<Language>,\n\n    #[serde(skip)]\n    pub query_str: String,\n}\n\nimpl QueryStruct {\n    /// Adjusts the search query trim and map empty search queries to Option::None.\n    /// Ensures `search_type` is always 'Some()'\n    pub fn adjust(&self, query_str: String) -> Self {\n        let query_str = query_str.trim().to_string();\n\n        let page = if self.page == 0 {\n            default_page()\n        } else {\n            self.page\n        };\n\n        QueryStruct {\n            query_str,\n            search_type: Some(self.search_type.unwrap_or_default()),\n            page,\n            word_index: self.word_index,\n            lang_overwrite: self.lang_overwrite,\n        }\n    }\n\n    /// Returns a [`QueryParser`] of the query\n    #[inline]\n    pub fn as_query_parser(&self, user_settings: UserSettings) -> QueryParser {\n        let mut q_parser = QueryParser::new(\n            self.query_str.clone(),\n            self.search_type.unwrap_or_default(),\n            user_settings,\n        )\n        .with_page(self.page)\n        .with_word_index(self.word_index.unwrap_or_default());\n\n        if let Some(lang) = self.lang_overwrite {\n            q_parser = q_parser.with_lang_overwrite(lang);\n        }\n\n        q_parser\n    }\n}\n\n#[inline]\nfn default_page() -> usize {\n    1\n}\n\n/// Query format for js fallback queries of the format http://127.0.0.1:8080/search?t=0&s=world\n/// instead of the query being an url parameter\n#[derive(Deserialize)]\npub struct NoJSQueryStruct {\n    #[serde(rename = \"s\")]\n    pub query: String,\n    #[serde(rename = \"t\")]\n    pub search_type: Option<SearchTarget>,\n    #[serde(rename = \"i\")]\n    pub word_index: Option<usize>,\n    #[serde(rename = \"p\", default = \"default_page\")]\n    pub page: usize,\n\n    #[serde(default, rename = \"l\", deserialize_with = \"deserialize_lang\")]\n    pub lang_overwrite: Option<Language>,\n}\n\nimpl NoJSQueryStruct {\n    /// Converts a NoJSQueryStruct into a QueryStruct and the query string\n    pub(crate) fn to_query_struct(self) -> (QueryStruct, String) {\n        let query_struct = QueryStruct {\n            page: self.page,\n            word_index: self.word_index,\n            search_type: self.search_type,\n            query_str: String::new(),\n            lang_overwrite: self.lang_overwrite,\n        };\n\n        (query_struct, self.query)\n    }\n}\n\n/// Deserializes a field into a Option<Language>. None if invalid lang-str or Deserializing str\n/// failed\nfn deserialize_lang<'de, D>(s: D) -> Result<Option<Language>, D::Error>\nwhere\n    D: Deserializer<'de>,\n{\n    return Ok(Language::from_str(&String::deserialize(s)?).ok());\n}\n"
  },
  {
    "path": "lib/frontend/src/user_settings.rs",
    "content": "use std::str::FromStr;\n\nuse actix_web::HttpRequest;\nuse search::query::UserSettings;\nuse types::jotoba::language::Language;\n\n/// Parses user settings from a `HttpRequest`\npub(super) fn parse(request: &HttpRequest) -> UserSettings {\n    let show_english = request\n        .cookie(\"show_english\")\n        .and_then(|i| i.value().parse().ok())\n        .unwrap_or_else(|| UserSettings::default().show_english);\n\n    let user_lang = request\n        .cookie(\"default_lang\")\n        .and_then(|i| Language::from_str(i.value()).ok())\n        .unwrap_or_default();\n\n    let page_lang = request\n        .cookie(\"page_lang\")\n        .and_then(|i| localization::language::Language::from_str(i.value()).ok())\n        .unwrap_or_default();\n\n    let english_on_top = request\n        .cookie(\"show_english_on_top\")\n        .and_then(|i| i.value().parse().ok())\n        .unwrap_or_else(|| UserSettings::default().english_on_top)\n        && show_english;\n\n    let items_per_page = request\n        .cookie(\"items_per_page\")\n        .and_then(|i| i.value().parse().ok())\n        .unwrap_or_else(|| UserSettings::default().page_size);\n\n    let example_sentences_enabled = request\n        .cookie(\"show_sentences\")\n        .and_then(|i| Some(i.value() == \"true\"))\n        .unwrap_or_else(|| UserSettings::default().show_example_sentences);\n\n    let sentence_furigana = request\n        .cookie(\"sentence_furigana\")\n        .and_then(|i| Some(i.value() == \"true\"))\n        .unwrap_or_else(|| UserSettings::default().sentence_furigana);\n\n    UserSettings {\n        user_lang,\n        show_english,\n        english_on_top,\n        page_lang,\n        page_size: items_per_page,\n        show_example_sentences: example_sentences_enabled,\n        sentence_furigana,\n        ..Default::default()\n    }\n}\n"
  },
  {
    "path": "lib/frontend/src/web_error.rs",
    "content": "use actix_web::{error::BlockingError, http::StatusCode, HttpResponse, ResponseError};\n\n#[cfg(not(feature = \"sentry_error\"))]\nuse log::error;\n\nuse crate::templates;\n\n#[derive(Debug)]\npub enum Error {\n    Internal,\n    NotFound,\n    SearchTimeout,\n    BadRequest,\n}\n\nimpl std::fmt::Display for Error {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        write!(f, \"{:?}\", self)\n    }\n}\n\n/// Informatin to print on the error page\npub struct InfoText {\n    pub primary: &'static str,\n    pub secondary: &'static str,\n}\n\n// Treat all crate::error::Error as Internal error\nimpl From<error::Error> for Error {\n    fn from(err: error::Error) -> Self {\n        #[cfg(feature = \"sentry_error\")]\n        sentry::capture_error(&err);\n\n        #[cfg(not(feature = \"sentry_error\"))]\n        error!(\"{}\", err);\n\n        Self::Internal\n    }\n}\n\nimpl ResponseError for Error {\n    fn status_code(&self) -> StatusCode {\n        match self {\n            Error::Internal => StatusCode::INTERNAL_SERVER_ERROR,\n            Error::NotFound => StatusCode::NOT_FOUND,\n            Error::SearchTimeout => StatusCode::REQUEST_TIMEOUT,\n            Error::BadRequest => StatusCode::BAD_REQUEST,\n        }\n    }\n\n    fn error_response(&self) -> HttpResponse {\n        // Render the error template\n        HttpResponse::Ok().body(\n            render!(\n                templates::error_page,\n                self.status_code().as_u16(),\n                self.get_info_text()\n            )\n            .render(),\n        )\n    }\n}\n\nimpl From<std::str::Utf8Error> for Error {\n    fn from(_: std::str::Utf8Error) -> Self {\n        Self::BadRequest\n    }\n}\n\nimpl From<BlockingError> for Error {\n    #[inline]\n    fn from(_: BlockingError) -> Self {\n        Self::Internal\n    }\n}\n\nimpl Error {\n    /// Return an [`InfoText`] based on the error suitable for displaying on the error site\n    fn get_info_text(&self) -> InfoText {\n        let (primary, secondary) = {\n            match self {\n                Error::Internal => (\"Sorry\", \"try again later\"),\n                Error::NotFound => (\"The page\", \"was not found\"),\n                Error::SearchTimeout => (\"Search\", \"timed out\"),\n                Error::BadRequest => (\"Bad request\", \"\"),\n            }\n        };\n\n        InfoText { primary, secondary }\n    }\n}\n\n/// Not found error handler\npub async fn not_found() -> Result<HttpResponse, Error> {\n    Err(Error::NotFound)\n}\n"
  },
  {
    "path": "lib/frontend/templates/base.rs.html",
    "content": "@use super::subtemplates::{head_html, input_dropdown_html, main_body_html, footer_html};\r\n@use super::overlays::{page_overlays_html, search_overlays_html, mobile_overlays_html};\r\n@use crate::BaseData;\r\n\r\n@(data: BaseData)\r\n\r\n<!DOCTYPE html>\r\n<html lang=\"en\">\r\n   @:head_html(&data)\r\n\r\n   <body>\r\n\r\n      <header id=\"search-row\" class=\"wrap-row\">\r\n         <div class=\"d-flex center\">\r\n\r\n            <!-- Settings Button -->\r\n            <div class=\"btn-container\"> \r\n               <div class=\"settingsSvg settingsBtn\" data-p='\"button\", @{\"props\":@{\"name\": \"Settings Overlay\", \"category\": \"modal\"@}@}' data-toggle=\"modal\" data-target=\"#settingsModal\"></div>\r\n            </div>\r\n\r\n            <!-- Home / Info Button -->\r\n            <div class=\"btn-container right\">\r\n               <div class=\"homeBtn noselect\" onmousedown='onHomeClick(event)'>home</div>\r\n               <div class=\"infoSvg infoBtn\" data-p='\"button\", @{\"props\":@{\"name\": \"Help\", \"category\": \"page-swap\"@}@}' onclick='openHelpPage()'></div>\r\n            </div>\r\n\r\n            <!-- Search Bar -->\r\n            <div id=\"searchDiv\">\r\n               <div class=\"searchDivInner\">\r\n                  <form method=\"GET\" action=\"/search\" onsubmit=\"return onSearchStart()\">\r\n                     <div class=\"inner-form form-main\">\r\n                        \r\n                        @:input_dropdown_html(&data)\r\n\r\n                        <div class=\"input-field second-wrap\">\r\n                           <div class=\"input-group\">\r\n                              <input id=\"search\" name=\"s\" type=\"text\" value=\"@data.get_query_str()\" placeholder='@data.gettext(\"Search...\")' tabindex=\"1\" lang=\"ja\" autocapitalize=\"off\" autocomplete=\"off\" data-autoload=\"false\" data-effective-keyword=\"\"/>\r\n                              <span id=\"shadow-text\"></span>\r\n                              <button id=\"emptyInput\" type=\"button\" class=\"btn bg-transparent\" onclick=\"emptySearchInput()\">\r\n                                 <div class=\"clearSvg\"></div>\r\n                              </button>\r\n                              <span id=\"search-vl\"></span>\r\n                              <button id=\"searchBtn\" class=\"search-embedded-btn search\" type=\"submit\">\r\n                                 <div class=\"searchSvg\"></div>\r\n                              </button>\r\n                              <button type=\"button\" id=\"voiceBtn\" class=\"search-embedded-btn p\" data-p='\"button\", @{\"props\":@{\"name\": \"Voice Overlay\", \"category\": \"toggle\"@}@}' onclick='toggleSpeakOverlay()'>\r\n                                 <div class=\"voiceSvg\"></div> \r\n                              </button>\r\n                              <button type=\"button\" class=\"search-embedded-btn radical p\" data-p='\"button\", @{\"props\":@{\"name\": \"Radical Overlay\", \"category\": \"toggle\"@}@}' onclick=\"toggleRadicalOverlay()\">\r\n                                 <span class=\"rad-picker-icon noselect\">部</span>\r\n                              </button>\r\n                          </div>\r\n                        </div>\r\n                     </div>\r\n                  </form>\r\n               </div>\r\n               \r\n               @:search_overlays_html(&data)\r\n\r\n            </div>\r\n         </div>\r\n      </header>\r\n\r\n      <main>\r\n         @:main_body_html(&data)\r\n         @:page_overlays_html(&data)\r\n         @:mobile_overlays_html(&data)\r\n      </main>\r\n\r\n      @:footer_html(&data, false)\r\n\r\n   </body>\r\n</html>\r\n"
  },
  {
    "path": "lib/frontend/templates/base_index.rs.html",
    "content": "@use super::subtemplates::{head_html, input_dropdown_html, footer_html};\n@use super::overlays::{page_overlays_html, search_overlays_html};\n@use crate::BaseData;\n\n@(data: BaseData)\n\n<!DOCTYPE html>\n<html lang=\"en\">\n   @:head_html(&data)\n   <link rel=\"stylesheet\" type=\"text/css\" href=\"variable_assets/@data.asset_hash/assets/css/page/indexPage.css\">\n   <link rel=\"stylesheet\" type=\"text/css\" href=\"variable_assets/@data.asset_hash/assets/css/page/multiPage/markdown.css\">\n   <link rel=\"stylesheet\" type=\"text/css\" href=\"variable_assets/@data.asset_hash/assets/css/overlay/notificationOverlay.css\">\n   <script defer src=\"/variable_assets/@data.asset_hash/assets/js/page/overlay/notifications.js\"></script>\n\n   <body class=\"index\">\n      <main class=\"noselect\">\n\n          <!-- Title Icon -->\n         <div onmousedown=\"return false\" class=\"title\">\n            <img class=\"titleImg\" src=\"variable_assets/@data.asset_hash/assets/jotokun/JotoTitle.svg\"> \n         </div>\n\n         <!-- Search Row -->\n         <div id=\"search-row\" class=\"wrap-row\">\n            <div class=\"d-flex center\">\n\n               <!-- Search Bar -->\n               <div id=\"searchDiv\" class=\"index\">\n                  <div class=\"searchDivInner\">\n                     <form method=\"GET\" action=\"/search\" onsubmit=\"return onSearchStart()\">\n                        <div class=\"inner-form\">\n                           \n                           @:input_dropdown_html(&data)\n\n                           <div class=\"input-field second-wrap\">\n                              <div class=\"input-group\">\n                                 <input id=\"search\" name=\"s\" type=\"text\" value=\"\" placeholder='@data.gettext(\"Search...\")' tabindex=\"1\" lang=\"ja\" autocapitalize=\"off\" autocomplete=\"off\" data-autoload=\"false\" data-effective-keyword=\"\">\n                                 <span id=\"shadow-text\"></span>\n                                 <button type=\"button\" class=\"btn bg-transparent search-embedded-btn p\" data-p='\"button\", @{\"props\":@{\"name\": \"Image upload Overlay\", \"category\": \"modal\"@}@}' onclick=\"toggleImageSearchOverlay()\">\n                                    <div class=\"cameraSvg index\"></div>\n                                 </button>\n                                 <button type=\"button\" class=\"btn bg-transparent search-embedded-btn p\" data-p='\"button\", @{\"props\":@{\"name\": \"Voice input Overlay\", \"category\": \"modal\"@}@}' onclick=\"toggleSpeakOverlay()\">\n                                    <div class=\"voiceSvg index\"></div> \n                                 </button>\n                              </div>\n                           </div>\n                           \n                        </div>\n                     </form>\n                  </div>\n                  \n                  @:search_overlays_html(&data)\n\n               </div>\n            </div>\n\n            <div id=\"notifications-container\" class=\"hidden\">\n               <div class=\"notifications-info-container\">\n                  <div class=\"notification-title\">\n                     @data.gettext(\"Notifications\")\n                  </div>\n                  <div id=\"notification-content\">\n                     <div id=\"no-result\" class=\"notification-entry hidden\">\n                        @data.gettext(\"No new notifications\")\n                     </div>\n                     <div class=\"button-container\">\n                        <button class=\"overlay-button p\" data-p='\"button\", @{\"props\":@{\"name\": \"All notifications\", \"category\": \"page-swap\"@}@}' onclick=\"showAllNotifications()\">@data.gettext(\"Show all\")</button>\n                        <button class=\"overlay-button\" onclick=\"closeNotifications()\">@data.gettext(\"Close\")</button>\n                     </div>\n                  </div>\n               </div>\n            </div>\n\n            @:page_overlays_html(&data)\n\n            <div class=\"modal fade\" id=\"notificationModal\">\n               <div class=\"modal-dialog\">\n                  <div class=\"modal-content\">\n           \n                     <!-- Modal Header -->\n                     <div class=\"modal-header\">\n                        <h3 id=\"notification-detail-head\" class=\"modal-title\"></h3>\n                        <button type=\"button\" class=\"close\" data-dismiss=\"modal\">&times;</button>\n                     </div>\n                     \n                     <!-- Modal body -->\n                     <div id=\"notification-detail-body\" class=\"modal-body\"></div>\n                  </div>\n               </div>\n            </div>\n\n         </div>\n       \n         <!-- Search / Radical Button -->\n         <div class=\"index-btn-container\">\n            <div class=\"input-field third-wrap index\">\n               <button id=\"searchBtn\" class=\"btn-search\" onclick=\"onSearchStart()\">\n                  <div class=\"flex-center\">\n                     <div>@data.gettext(\"Search\")</div>\n                     <div class=\"searchSvg index\"></div>\n                  </div>\n               </button>\n            </div>\n   \n            <div class=\"input-field third-wrap index rad\">\n               <button class=\"btn-search\" onclick=\"toggleRadicalOverlay()\">\n                  <span>@data.gettext(\"Radicals\")</span>\n                  <span class=\"rad-picker-icon index noselect\">部</span>\n               </button>\n            </div>\n         </div> \n\n         <!-- Settings Button -->\n         <div class=\"btn-container\">\n            <div class=\"settingsSvg settingsBtn index p\" data-p='\"button\", @{\"props\":@{\"name\": \"Settings Overlay\", \"category\": \"modal\"@}@}' data-toggle=\"modal\" data-target=\"#settingsModal\"></div>\n         </div>\n\n         <!-- Info / Notification Button-->\n         <div class=\"btn-container\">\n            <div class=\"infoSvg infoBtn index p\" data-p='\"button\", @{\"props\":@{\"name\": \"Help\", \"category\": \"page-swap\"@}@}' onclick='openHelpPage()'></div>\n            <div>\n               <div class=\"notificationSvg notificationBtn index p\" data-p='\"button\", @{\"props\":@{\"name\": \"Notifications Popup\", \"category\": \"popup\"@}@}' onclick=\"toggleNotifications(event)\"></div>\n               <span class=\"notificationPoint\"></span>\n            </div>\n         </div>      \n      </main>\n\n      @:footer_html(&data, true)\n\n   </body>\n</html>\n"
  },
  {
    "path": "lib/frontend/templates/error_page.rs.html",
    "content": "@use crate::web_error::InfoText;\n@(status_code: u16, info: InfoText)\n\n<html lang=\"en\"></html>\n    <title>Jotoba</title>\n    <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n    <meta http-equiv=\"Content-type\" content=\"text/html; charset=utf-8\">\n    <link rel=\"stylesheet\" type=\"text/css\" href=\"/assets/css/lib/bootstrap.min.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" href=\"/assets/css/page/errorPage.css\">\n  </head>\n  <body>\n    <div class=\"err-parent d-flex flex-column\">\n        <h1 class=\"err-code\">@status_code</h1>\n        <hr>\n        <div class=\"txt-primary small\">ごめんなさい</div>\n        <div class=\"txt-secondary\">@info.primary @info.secondary</div>\n        <a class=\"back-btn\" href=\"/\">Go back</a>\n        <span>or create an issue on our Github page:</span>\n        <a class=\"issue-btn git-logo\" href=\"https://github.com/WeDontPanic/Jotoba/issues\">\n          <svg height=\"32\" viewBox=\"0 0 16 16\" width=\"32\" aria-hidden=\"true\">\n            <path d=\"M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z\"></path></svg> \n        </a>\n    </div>\n  </body>\n</html>\n"
  },
  {
    "path": "lib/frontend/templates/functional/render_sentence.rs.html",
    "content": "@use jp_utils::furi::segment::SegmentRef;\n@use jp_utils::furi::segment::AsSegment;\n@* TODO Figure out how to use Iterator instead of Vec for this template. *@\n@type MyVec<'a> = Vec<SegmentRef<'a>>;\n@(iter: MyVec, addl_classes: &str, show_furigana: bool)\n\n<div class=\"furigana-kanji-container\">@*\n*@@for furi_part in iter {@*\n    *@@for r in furi_part.reading_iter() {@*\n      *@@if !furi_part.is_empty() {@*\n              *@@if furi_part.is_kanji() {@*\n                  *@<ruby>@*\n                      *@<span class=\"kanji-preview @addl_classes\">@*\n                          *@@r.kanji().unwrap()@*\n                      *@</span>@*\n                      *@@if show_furigana {@*\n                        *@<rp>（</rp>@*\n                        *@<rt class=\"furigana-preview noselect @addl_classes\">@*\n                            *@@r.kana()@*\n                        *@</rt>@*\n                        *@<rp>）</rp>@*\n                      *@}@*\n                  *@</ruby>@*\n              *@} else {@*\n                  *@<span class=\"inline-kana-preview @addl_classes\">@*\n                      *@@r.kanji().unwrap_or_else(|| r.kana())@*\n                  *@</span>@*\n              *@}@*\n      *@}@*\n    *@}@*\n*@}@*\n*@</div>\n"
  },
  {
    "path": "lib/frontend/templates/overlays/info/collocations.rs.html",
    "content": "@use crate::BaseData;\n@use types::jotoba::words::Word;\n@use crate::{templ_utils::*};\n\n@(data: &BaseData, word: &Word)\n\n<div class=\"modal fade\" id=\"coll@word.sequence\" style=\"display: none;\" aria-hidden=\"true\">\n    <div class=\"modal-dialog modal-lg\">\n      <div class=\"modal-content\">\n        <div class=\"modal-body\">\n          \n          <button type=\"button\" class=\"close\" data-dismiss=\"modal\">×</button>\n          <br>\n\n          <!-- Verb conjugation table -->\n          <h3 class=\"info-h3\">@data.gettext(\"Collocations\")</h3>\n          <table class=\"table collocation\">\n            <tbody>\n              @for collocation in get_collocations(&word, data.user_settings.lang_param()) {\n                <tr>\n                    <th scope=\"row\"><a class=\"clickable no-align green\" href=\"/search/@collocation.0\">@collocation.0</a></th>\n                    <td>@collocation.1</td>\n                </tr>\n              }\n            </tbody>\n          </table>\n\n        </div>\n      </div>\n    </div>\n</div>\n"
  },
  {
    "path": "lib/frontend/templates/overlays/info/definitions_jp.rs.html",
    "content": "@use crate::BaseData;\n@use types::jotoba::words::Word;\n\n@(_data: &BaseData, word: &Word)\n\n<div class=\"modal fade\" id=\"jdef@word.sequence\">\n  <div class=\"modal-dialog modal-lg\">\n    <div class=\"modal-content\">\n     <div class=\"modal-body\">\n        <button type=\"button\" class=\"close\" data-dismiss=\"modal\">×</button>\n        \n        <!-- PASTE HTML HERE -->\n\n        </div>\n     </div>\n  </div>\n</div>"
  },
  {
    "path": "lib/frontend/templates/overlays/info/inflections.rs.html",
    "content": "@use crate::BaseData;\n@use types::jotoba::words::{Word, inflection::Inflections};\n\n@(data: &BaseData, word: &Word, inflections: &Inflections)\n\n<div class=\"modal fade\" id=\"conj@word.sequence\">\n  <div class=\"modal-dialog modal-lg\">\n    <div class=\"modal-content\">\n     <div class=\"modal-body\">\n        <button type=\"button\" class=\"close\" data-dismiss=\"modal\">×</button>\n        \n        <!-- Verb conjugation table -->\n        <table class=\"table conjugation\">\n           <thead>\n             <tr>\n               <th scope=\"col\"></th>\n               <th scope=\"col\">@data.gettext(\"Affirmative\")</th>\n               <th scope=\"col\">@data.gettext(\"Negative\")</th>\n             </tr>\n           </thead>\n           <tbody>\n              <tr>\n                 <th scope=\"row\">@data.gettext(\"Present\")</th>\n                 <td>@inflections.present.positive</td>\n                 <td>@inflections.present.negative</td>\n              </tr>\n\n              <tr>\n                 <th scope=\"row\">@data.gettext(\"Present, polite\")</th>\n                 <td>@inflections.present_polite.positive</td>\n                 <td>@inflections.present_polite.negative</td>\n              </tr>\n\n              <tr>\n                 <th scope=\"row\">@data.gettext(\"Past\")</th>\n                 <td>@inflections.past.positive</td>\n                 <td>@inflections.past.negative</td>\n              </tr>\n\n              <tr>\n                 <th scope=\"row\">@data.gettext(\"Past, polite\")</th>\n                 <td>@inflections.past_polite.positive</td>\n                 <td>@inflections.past_polite.negative</td>\n              </tr>\n\n              <tr>\n                 <th scope=\"row\">@data.gettext(\"Te-form\")</th>\n                 <td>@inflections.te_form.positive</td>\n                 <td>@inflections.te_form.negative</td>\n              </tr>\n\n              <tr>\n                 <th scope=\"row\">@data.gettext(\"Potential\")</th>\n                 <td>@inflections.potential.positive</td>\n                 <td>@inflections.potential.negative</td>\n              </tr>\n\n              <tr>\n                 <th scope=\"row\">@data.gettext(\"Passive\")</th>\n                 <td>@inflections.passive.positive</td>\n                 <td>@inflections.passive.negative</td>\n              </tr>\n\n              <tr>\n                 <th scope=\"row\">@data.gettext(\"Causative\")</th>\n                 <td>@inflections.causative.positive</td>\n                 <td>@inflections.causative.negative</td>\n              </tr>\n\n              <tr>\n                 <th scope=\"row\">@data.gettext(\"Causative Passive\")</th>\n                 <td>@inflections.causative_passive.positive</td>\n                 <td>@inflections.causative_passive.negative</td>\n              </tr>\n              <tr>\n                 <th scope=\"row\">@data.gettext(\"Imperative\")</th>\n                 <td>@inflections.imperative.positive</td>\n                 <td>@inflections.imperative.negative</td>\n              </tr>\n           </tbody>\n         </table>\n        </div>\n     </div>\n  </div>\n</div>"
  },
  {
    "path": "lib/frontend/templates/overlays/mobile_overlays.rs.html",
    "content": "@use crate::BaseData;\n@(_data: &BaseData)\n\n<div class=\"mobile-nav d-flex flex-column-reverse hidden\">\n    <!-- Settings -->\n    <button class=\"mobile-nav-inner-btn\" data-toggle=\"modal\" data-target=\"#settingsModal\" onclick=\"toggleMobileNav()\">\n        <div class=\"settingsSvg mobile\"></div> \n    </button>\n    <!-- Radical Input -->\n    <button class=\"mobile-nav-inner-btn\" onclick=\"toggleRadicalOverlay(); toggleMobileNav();\">\n       <span class=\"rad-picker-icon noselect\">部</span>\n    </button>\n    <!-- Speech Input -->\n    <button class=\"mobile-nav-inner-btn\" onclick=\"toggleSpeakOverlay(); toggleMobileNav();\">\n        <div class=\"voiceSvg mobile\"></div> \n    </button>\n    <!-- Home Btn -->\n     <button class=\"mobile-nav-inner-btn\" onclick='Util.loadUrl(JotoTools.getPageUrl(\"\"));'>\n        <span class=\"homeBtn mobile noselect\">home</span> \n     </button>\n    <!-- Jump Up / Down -->\n    <button onclick=\"jumpToTop()\" id=\"jmp-btn\" class=\"mobile-nav-inner-btn hidden\">\n        <div class=\"jumpSvg mobile\"></div> \n    </button>\n</div>\n\n<button onclick=\"toggleMobileNav()\" class=\"mobile-nav-btn\">\n    <div class=\"menuSvg\"></div> \n</button>"
  },
  {
    "path": "lib/frontend/templates/overlays/page/decomposition_graph.rs.html",
    "content": "@use crate::BaseData;\n@(_data: &BaseData)\n\n<div id=\"backdrop\" class=\"hidden\" onclick='onBackdropClick(event)'>\n    <div class=\"tree-parent\">\n        <span id=\"tree-toggle\" class=\"hidden\" onclick=\"onGraphToggleCheckboxClick()\"></span>\n        <div id=\"tree-target\"></div>\n    </div>\n</div>"
  },
  {
    "path": "lib/frontend/templates/overlays/page/image_crop.rs.html",
    "content": "@use crate::BaseData;\n@(data: &BaseData)\n\n<div class=\"modal fade\" id=\"imageCroppingModal\">\n    <div class=\"modal-body cropping-target-body\">\n       <div class=\"cropping-target-border\">\n          <button class=\"btn-search crop p\" data-p='\"button\", @{\"props\":@{\"name\": \"Image upload\", \"category\": \"modal\"@}@}'onclick=\"uploadCroppedImage()\">\n             <div class=\"flex-center\">\n                <div>@data.gettext(\"Search\")</div>\n                <div class=\"searchSvg index\"></div>\n             </div>\n          </button>\n          <button class=\"btn-search btn-danger crop\" onclick=\"resetUploadUrlInput();\">\n             <div class=\"flex-center\">\n                <div>@data.gettext(\"Close\")</div>\n             </div>\n          </button>\n       </div>\n       <div id=\"croppingTarget\"></div>\n    </div>\n </div>"
  },
  {
    "path": "lib/frontend/templates/overlays/page/loading.rs.html",
    "content": "@use crate::BaseData;\n@(_data: &BaseData)\n\n<div id=\"loading-screen\">\n    <div class=\"loading-animation\"></div>\n</div> "
  },
  {
    "path": "lib/frontend/templates/overlays/page/settings.rs.html",
    "content": "@use crate::BaseData;\n@(data: &BaseData)\n\n<div class=\"modal fade\" id=\"settingsModal\">\n    <div class=\"modal-dialog\">\n       <div class=\"modal-content\">\n          <div class=\"modal-body\">\n            <div class=\"mdl-layout mdl-js-layout mdl-layout--fixed-header\">\n               <div class=\"mdl-layout__header\">\n                 <div class=\"mdl-layout__header-row\">\n                   <!-- Title -->\n                   <span class=\"mdl-layout-title\">Settings</span>\n                   <button type=\"button\" class=\"close\" data-dismiss=\"modal\">×</button>\n                 </div>\n                 <!-- Tabs -->\n                 <div class=\"mdl-layout__tab-bar mdl-js-ripple-effect\">\n                   <a href=\"#scroll-tab-1\" class=\"mdl-layout__tab is-active\">@data.gettext(\"Language\")</a>\n                   <a href=\"#scroll-tab-2\" class=\"mdl-layout__tab\">@data.gettext(\"Search\")</a>\n                   <a href=\"#scroll-tab-3\" class=\"mdl-layout__tab\">@data.gettext(\"Display\")</a>\n                   <a href=\"#scroll-tab-4\" class=\"mdl-layout__tab\">@data.gettext(\"Other\")</a>\n                 </div>\n               </div>\n\n               <!-- Content -->\n               <div class=\"mdl-layout__content\">\n\n                  <!-- Page 1 -->\n                  <section class=\"mdl-layout__tab-panel is-active\" id=\"scroll-tab-1\">\n                     <div class=\"page-content\">\n                        <div class=\"d-flex flex-column\">\n                           <div class=\"settings-entry\">\n                              <span class=\"inner-title\">@data.gettext(\"Default search language\")</span>\n                              <div class=\"choices noselect\" data-type=\"select-one\" tabindex=\"1\">\n                                 <div class=\"choices__inner\">\n                                    <select data-trigger=\"\" name=\"type\" data-onchange='alterLanguage_search' class=\"hidden\">\n                                       <option value=\"0\">@data.gettext(\"English\")</option>\n                                    </select>\n                                    <div class=\"choices__list choices__list--single\">\n                                       <div class=\"choices__item choices__item--selectable\">@data.gettext(\"English\")</div>\n                                    </div>\n                                 </div>\n                                 <div class=\"choices__list choices__list--dropdown\">\n                                 <div id=\"search-lang-select\" class=\"choices__list p\" data-p='\"button\", @{\"props\":@{\"name\": \"Search language change Select\", \"category\": \"settings\"@}@}'>\n                                    <div data-value=\"en-US\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"English\")</div>\n                                    <div data-value=\"de-DE\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"German\")</div>\n                                    <div data-value=\"es-ES\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"Spanish\")</div>\n                                    <div data-value=\"fr-FR\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"French\")</div>\n                                    <div data-value=\"nl-NL\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"Dutch\")</div>\n                                    <div data-value=\"sv-SE\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"Swedish\")</div>\n                                    <div data-value=\"ru\"    class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"Russian\")</div>\n                                    <div data-value=\"hu\"    class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"Hungarian\")</div>\n                                    <div data-value=\"sl-SI\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"Slovenian\")</div>\n                                 </div>\n                              </div>\n                           </div>\n                        </div>\n\n                        <div class=\"settings-entry\">\n                           <span class=\"inner-title\">@data.gettext(\"Page language\")</span>\n                           <div class=\"choices noselect\" data-type=\"select-one\" tabindex=\"1\">\n                              <div class=\"choices__inner\">\n                                 <select name=\"type\" data-onchange='alterLanguage_page' class=\"hidden\">\n                                    <option value=\"0\">@data.gettext(\"English\")</option>\n                                 </select>\n                                 <div class=\"choices__list choices__list--single\">\n                                    <div class=\"choices__item choices__item--selectable\">@data.gettext(\"English\")</div>\n                                 </div>\n                              </div>\n                              <div class=\"choices__list choices__list--dropdown p\" data-p='\"button\", @{\"props\":@{\"name\": \"Page language change Select\", \"category\": \"settings\"@}@}'>\n                                 <div id=\"page-lang-select\" class=\"choices__list\">\n                                    <div data-value=\"en-US\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"English\")</div>\n                                    <div data-value=\"de-DE\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"German\")</div>\n                                    <div data-value=\"hu\"    class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"Hungarian\")</div>\n                                    <div data-value=\"ja-JP\" class=\"choices__item choices__item--choice choices__item--disabled\">@data.gettext(\"Japanese\")</div>\n                                    <div data-value=\"es-ES\" class=\"choices__item choices__item--choice choices__item--disabled\">@data.gettext(\"Spanish\")</div>\n                                    <div data-value=\"fr-FR\" class=\"choices__item choices__item--choice choices__item--disabled\">@data.gettext(\"French\")</div>\n                                    <div data-value=\"nl-NL\" class=\"choices__item choices__item--choice choices__item--disabled\">@data.gettext(\"Dutch\")</div>\n                                    <div data-value=\"sv-SE\" class=\"choices__item choices__item--choice choices__item--disabled\">@data.gettext(\"Swedish\")</div>\n                                    <div data-value=\"ru\"    class=\"choices__item choices__item--choice choices__item--disabled\">@data.gettext(\"Russian\")</div>\n                                    <div data-value=\"sl-SI\" class=\"choices__item choices__item--choice choices__item--disabled\">@data.gettext(\"Slovenian\")</div>\n                                 </div>\n                              </div>\n                           </div>\n                        </div>\n                     </div>\n                   </div>\n                 </section>\n\n                 <!-- Page 2 -->\n                 <section class=\"mdl-layout__tab-panel\" id=\"scroll-tab-2\">\n                     <div class=\"page-content\">\n                        <div class=\"d-flex flex-column\">\n                           <div class=\"inner-header\">@data.gettext(\"General\")</div>\n                           <div class=\"settings-entry no-gap\">\n                              <span class=\"inner-title\">@data.gettext(\"Always show english results\"):</span>\n                              <label for=\"show_eng_settings\" class=\"mdl-checkbox mdl-js-checkbox mdl-js-ripple-effect\">\n                                 <input id=\"show_eng_settings\" type=\"checkbox\" class=\"mdl-checkbox__input\" onchange=\"Settings.alterSearch('alwaysShowEnglish', event.target.checked, true)\">\n                              </label>\n                           </div>\n                           <div class=\"settings-entry sub\" id=\"eng_on_top_parent\">\n                              <span class=\"inner-title\">@data.gettext(\"Show english results on top\"):</span>\n                              <label for=\"show_eng_on_top_settings\" class=\"mdl-checkbox mdl-js-checkbox mdl-js-ripple-effect\">\n                                 <input id=\"show_eng_on_top_settings\" type=\"checkbox\" class=\"mdl-checkbox__input\" onchange=\"Settings.alterSearch('showEnglishOnTop', event.target.checked)\">\n                              </label>\n                           </div>\n                           <div class=\"settings-entry\">\n                              <span class=\"inner-title\">@data.gettext(\"Focus search bar on load\"):</span>\n                              <label for=\"focus_search_bar_settings\" class=\"mdl-checkbox mdl-js-checkbox mdl-js-ripple-effect\">\n                                 <input id=\"focus_search_bar_settings\" type=\"checkbox\" class=\"mdl-checkbox__input\" onchange=\"Settings.alterSearch('focusSearchbar', event.target.checked, true)\">\n                              </label>\n                           </div>\n                           <div class=\"settings-entry sub\" id=\"select_searchbar_content_parent\">\n                              <span class=\"inner-title\">@data.gettext(\"Select input on load\"):</span>\n                              <label for=\"select_searchbar_content_settings\" class=\"mdl-checkbox mdl-js-checkbox mdl-js-ripple-effect\">\n                                 <input id=\"select_searchbar_content_settings\" type=\"checkbox\" class=\"mdl-checkbox__input\" onchange=\"Settings.alterSearch('selectSearchbarContent', event.target.checked)\">\n                              </label>\n                           </div>\n                           <div class=\"settings-entry txt-input\">\n                              <span id=\"rpr\" class=\"inner-title txt-input\">@data.gettext(\"Results per page\"):</span>\n                              <form action=\"#\" onsubmit=\"return false\">\n                                 <div class=\"mdl-textfield mdl-js-textfield\">\n                                   <input class=\"mdl-textfield__input\" type=\"text\" pattern=\"\\b(0*(?:[1-9][0-9]?|100))\\b\" id=\"items_per_page_input\" onblur=\"Settings.alterSearch('itemsPerPage', event.target.value)\">\n                                   <label class=\"mdl-textfield__label\" for=\"items_per_page_input\">@data.gettext(\"Number...\")</label>\n                                   <span class=\"mdl-textfield__error\">@data.gettext(\"Input has to be in range of 1 and 100!\")</span>\n                                 </div>\n                               </form>\n                               <div class=\"mdl-tooltip\" for=\"rpr\">\n                                 @data.gettext(\"max amount of names/words/sentences shown per page\")\n                               </div>\n                           </div>\n\n                           <div class=\"inner-header\">@data.gettext(\"Sentences\")</div>\n                           <div class=\"settings-entry\">\n                              <span class=\"inner-title\">@data.gettext(\"Show Furigana\"):</span>\n                              <label for=\"show_sentence_furigana_settings\" class=\"mdl-checkbox mdl-js-checkbox mdl-js-ripple-effect\">\n                                 <input checked id=\"show_sentence_furigana_settings\" type=\"checkbox\" class=\"mdl-checkbox__input\" onchange=\"Settings.alterSearch('showFurigana', event.target.checked)\">\n                              </label>\n                           </div>\n\n                           <div class=\"inner-header\">@data.gettext(\"Words\")</div>\n                           <div class=\"settings-entry\">\n                              <span class=\"inner-title\">@data.gettext(\"Show example sentences\"):</span>\n                              <label for=\"show_example_sentences_settings\" class=\"mdl-checkbox mdl-js-checkbox mdl-js-ripple-effect\">\n                                 <input checked id=\"show_example_sentences_settings\" type=\"checkbox\" class=\"mdl-checkbox__input\" onchange=\"Settings.alterSearch('showExampleSentences', event.target.checked)\">\n                              </label>\n                           </div>\n                        \n                           <div class=\"inner-header\">@data.gettext(\"Kanji\")</div>\n                           <div class=\"settings-entry no-gap\">\n                              <span id=\"krp\" class=\"inner-title txt-input\">@data.gettext(\"Items per page\"):</span>\n                              <form action=\"#\" onsubmit=\"return false\">\n                                 <div class=\"mdl-textfield mdl-js-textfield\">\n                                   <input class=\"mdl-textfield__input\" type=\"text\" pattern=\"\\b(0*(?:[1-9][0-9]?|100))\\b\" id=\"kanji_per_page_input\" onblur=\"Settings.alterSearch('kanjiPerPage', event.target.value)\">\n                                   <label class=\"mdl-textfield__label\" for=\"kanji_per_page_input\">@data.gettext(\"Number...\")</label>\n                                   <span class=\"mdl-textfield__error\">@data.gettext(\"Input has to be in range of 1 and 100!\")</span>\n                                 </div>\n                               </form>\n                               <div class=\"mdl-tooltip\" for=\"krp\">\n                                 @data.gettext(\"max amount of kanji shown per page\")\n                               </div>\n                           </div>\n                        </div>\n                     </div>\n                 </section>\n\n                 <!-- Page 3 -->\n                 <section class=\"mdl-layout__tab-panel\" id=\"scroll-tab-3\">\n                     <div class=\"page-content\">\n                        <div class=\"d-flex flex-column\">\n                           <div class=\"settings-entry\">\n                              <span class=\"inner-title display\">@data.gettext(\"Use dark mode\"):</span>\n                              <label for=\"use_dark_mode_settings\" class=\"mdl-checkbox mdl-js-checkbox mdl-js-ripple-effect\">\n                                 <input checked id=\"use_dark_mode_settings\" type=\"checkbox\" class=\"mdl-checkbox__input p\" data-p='\"button\", @{\"props\":@{\"name\": \"Dark Mode\", \"category\": \"settings\"@}@}' onchange=\"setTheme(event.target.checked ? 'dark' : 'light')\">\n                              </label>\n                           </div>\n                           <div class=\"settings-entry\">\n                              <span class=\"inner-title display\">@data.gettext(\"Show kanji on load\"):</span>\n                              <label for=\"show_kanji_on_load_settings\" class=\"mdl-checkbox mdl-js-checkbox mdl-js-ripple-effect\">\n                                 <input checked id=\"show_kanji_on_load_settings\" type=\"checkbox\" class=\"mdl-checkbox__input p\" data-p='\"button\", @{\"props\":@{\"name\": \"Show Kanji on load\", \"category\": \"settings\"@}@}' onchange=\"Settings.alterDisplay('showKanjiOnLoad', event.target.checked)\">\n                              </label>\n                           </div>\n                           <div class=\"settings-entry\">\n                              <span class=\"inner-title display\">@data.gettext(\"Show kanji numbers\"):</span>\n                              <label for=\"show_kanji_numbers_settings\" class=\"mdl-checkbox mdl-js-checkbox mdl-js-ripple-effect\">\n                                 <input checked id=\"show_kanji_numbers_settings\" type=\"checkbox\" class=\"mdl-checkbox__input p\" data-p='\"button\", @{\"props\":@{\"name\": \"Show Kanji numbers\", \"category\": \"settings\"@}@}' onchange=\"Settings.alterDisplay('showKanjiNumbers', event.target.checked); if(JotoTools.getCurrentSearchType() == 1) location.reload();\">\n                              </label>\n                           </div>\n                           <div class=\"settings-entry\">\n                              <span class=\"inner-title\">@data.gettext(\"Default kanji animation speed\"):</span>\n                              <div class=\"slidercontainer settings\">\n                                 <input id=\"show_anim_speed_settings\" type=\"range\" min=\"0.05\" max=\"2\" value=\"1\" step=\"0.05\" class=\"slider speedSlider settings\" oninput=\"Settings.alterDisplay('kanjiAnimationSpeed', event.target.value); OverlaySettings.updateSliders();\">\n                                 <span id=\"show_anim_speed_settings_slider\">2</span>\n                              </div>\n                           </div>\n                        </div>\n                     </div>\n                 </section>\n\n                 <!-- Page 4 -->\n                 <section class=\"mdl-layout__tab-panel\" id=\"scroll-tab-4\">\n                     <div class=\"page-content\">\n                        <div class=\"d-flex flex-column\">\n                            <div class=\"settings-entry\">\n                                <span class=\"inner-title\">@data.gettext(\"Enable Quick-Copy\"):</span>\n                                <label for=\"dbl_click_copy_settings\" class=\"mdl-checkbox mdl-js-checkbox mdl-js-ripple-effect\">\n                                    <input checked id=\"dbl_click_copy_settings\" type=\"checkbox\" class=\"mdl-checkbox__input\" onchange=\"Settings.alterOther('enableDoubleClickCopy', event.target.checked)\">\n                                </label>\n                            </div>\n                            <div class=\"settings-entry\">\n                                <span class=\"inner-title\">@data.gettext(\"Share usage statistics\"):</span>\n                                <label for=\"tracking_settings\" class=\"mdl-checkbox mdl-js-checkbox mdl-js-ripple-effect\">\n                                    <input checked id=\"tracking_settings\" type=\"checkbox\" class=\"mdl-checkbox__input\" onchange=\"Settings.onTrackingAcceptChange(event.target.checked)\">\n                                </label>\n                            </div>\n                            <div class=\"settings-entry ex\">\n                                <label>@data.gettext(\"STATISTICS_EXPLANATION\")</label>\n                            </div>\n                        </div>\n                     </div>\n                 </section>\n               </div>\n             </div>\n          </div>\n        </div>\n    </div>\n </div>\n"
  },
  {
    "path": "lib/frontend/templates/overlays/page_overlays.rs.html",
    "content": "@use super::page::{image_crop_html, loading_html, settings_html};\n\n@use crate::BaseData;\n@(data: &BaseData)\n\n@:image_crop_html(data)\n@:loading_html(data)\n@:settings_html(data)"
  },
  {
    "path": "lib/frontend/templates/overlays/search_overlays.rs.html",
    "content": "@use super::searchbar::{image_input_html, radicals_html, speech_html, suggestions_html};\n\n@use crate::BaseData;\n@(data: &BaseData)\n\n@:image_input_html(data)\n@:radicals_html(data)\n@:speech_html(data)\n@:suggestions_html(data)"
  },
  {
    "path": "lib/frontend/templates/overlays/searchbar/image_input.rs.html",
    "content": "@use crate::BaseData;\n@(data: &BaseData)\n\n<div class=\"overlay image hidden\">\n    <span class=\"x-button noselect\" onclick=\"toggleImageSearchOverlay()\">x</span>\n    <div class=\"d-flex flex-column\">\n        <span>@data.gettext(\"Enter a URL or upload your image directly and Jotoba will try to search for japanese words contained in the picture.\")</span>\n        <div class=\"d-flex flex-row image-upload-parent\">\n             <input id=\"imgUploadUrl\" name=\"image-search\" class=\"image-search-input\" type=\"text\" value=\"\" placeholder='@data.gettext(\"Enter Image URL...\")' tabindex=\"1\" autocapitalize=\"off\" autocomplete=\"off\" data-autoload=\"false\" data-effective-keyword=\"\">\n             <input id=\"imgUploadFile\" type=\"file\" class=\"image-search-upload\" accept=\".png,.jpg,.jpeg\" onchange=\"imgSearchFileSelected()\">\n             <span class=\"imgUploadSvg\" onclick=\"imgUploadAltClick()\"></span>\n             <button class=\"image-search-upload-btn\"  onclick=\"openImageCropOverlay()\">@data.gettext(\"Search\")</button>\n        </div>\n    </div>\n</div>"
  },
  {
    "path": "lib/frontend/templates/overlays/searchbar/radicals.rs.html",
    "content": "@use crate::BaseData;\n@(data: &BaseData)\n\n<div class=\"overlay radical noselect hidden\">\n    <span class=\"x-button noselect\" onclick=\"toggleRadicalOverlay()\">x</span>\n\n    <div class=\"rad-page-toggle\">\n        <span id=\"r-t0\" onclick=\"openRadicalPage(0);\" class=\"selected\">1&2</span>\n        <span id=\"r-t1\" onclick=\"openRadicalPage(1);\">3</span>\n        <span id=\"r-t2\" onclick=\"openRadicalPage(2);\">4</span>\n        <span id=\"r-t3\" onclick=\"openRadicalPage(3);\">5</span>\n        <span id=\"r-t4\" onclick=\"openRadicalPage(4);\">6</span>\n        <span id=\"r-t5\" onclick=\"openRadicalPage(5);\">7</span>\n        <span id=\"r-t6\" onclick=\"openRadicalPage(6);\">8</span>\n        <span id=\"r-t7\" onclick=\"openRadicalPage(7);\">9</span>\n        <span id=\"r-t8\" onclick=\"openRadicalPage(8);\">10</span>\n        <span id=\"r-t9\" onclick=\"openRadicalPage(9);\">> 10</span>\n        <span id=\"r-tc\" onclick=\"openRadicalPage(10);\"><div class=\"searchSvg\"></div></span>\n    </div>\n\n    <div class=\"rad-kanji-wrapper\">\n        <div class=\"rad-wrapper\">\n            <span class=\"rad-kanji-title\">@data.gettext(\"Select Radicals\")</span>\n            <div class=\"d-flex flex-row wrap rad-picker\">\n                <!-- Filled by JS -->\n            </div>\n        </div>\n        <div class=\"kanji-wrapper\">\n            <span class=\"rad-kanji-title\">@data.gettext(\"Select Kanji\")</span>\n            <div class=\"d-flex flex-row wrap rad-results\">\n                <!-- Filled by JS -->\n            </div>\n        </div>\n        <div class=\"rad-suggestion-wrapper\">\n            <div id=\"suggestion-container-rad\" class=\"d-flex flex-row\">\n\n            </div>\n        </div>\n        <div class=\"rad-page-footer\">\n            <div class=\"kanji-search-wrapper\">\n                <input type=\"text\" id=\"kanji-search\" name=\"kanjiSearch\" placeholder='@data.gettext(\"Search Radicals...\")'>\n                <span class=\"searchSvg\"></span>\n                <span class=\"undoSvg\" onclick=\"resetRadPicker()\"></span>\n                <button onclick=\"onSearchStart()\" class=\"btn-search\">\n                    <div class=\"flex-center\">\n                       <div>@data.gettext(\"Search\")</div>\n                       <div class=\"searchSvg index\"></div>\n                    </div>\n                 </button>\n            </div>\n        </div>\n    </div>\n    <div id=\"rad-end-point\"></div>\n</div>"
  },
  {
    "path": "lib/frontend/templates/overlays/searchbar/speech.rs.html",
    "content": "@use crate::BaseData;\n@(data: &BaseData)\n\n<div class=\"overlay speech hidden\">\n    <span class=\"x-button noselect\" onclick=\"toggleSpeakOverlay()\">x</span>\n    <div class=\"d-flex flex-column\">\n        <span>@data.gettext(\"Current language\"): <b id=\"currentSpeechLang\">@data.gettext(\"English\")</b></span>\n        <span>@data.gettext(\"Currently listening\"): <b id=\"currentlyListening\">@data.gettext(\"No\")</b></span>\n        <br />\n        <span>@data.gettext(\"To change your language, select one of the following\"):</span>\n        <div class=\"d-flex flex-row wrap\">\n            <span class=\"clickable right-padding-10\" onclick='setRecognitionLang(\"jap\")'>@data.gettext(\"Japanese\")</span>\n            <span class=\"clickable right-padding-10\" onclick='setRecognitionLang(\"ger\")'>@data.gettext(\"German\")</span>\n            <span class=\"clickable right-padding-10\" onclick='setRecognitionLang(\"eng\")'>@data.gettext(\"English\")</span>\n            <span class=\"clickable right-padding-10\" onclick='setRecognitionLang(\"rus\")'>@data.gettext(\"Russian\")</span>\n            <span class=\"clickable right-padding-10\" onclick='setRecognitionLang(\"spa\")'>@data.gettext(\"Spanish\")</span>\n            <span class=\"clickable right-padding-10\" onclick='setRecognitionLang(\"swe\")'>@data.gettext(\"Swedish\")</span>\n            <span class=\"clickable right-padding-10\" onclick='setRecognitionLang(\"fre\")'>@data.gettext(\"French\")</span>\n            <span class=\"clickable right-padding-10\" onclick='setRecognitionLang(\"dut\")'>@data.gettext(\"Dutch\")</span>\n            <span class=\"clickable right-padding-10\" onclick='setRecognitionLang(\"hun\")'>@data.gettext(\"Hungarian\")</span>\n            <span class=\"clickable right-padding-10\" onclick='setRecognitionLang(\"svl\")'>@data.gettext(\"Slovenian\")</span>\n        </div>\n    </div>\n</div>"
  },
  {
    "path": "lib/frontend/templates/overlays/searchbar/suggestions.rs.html",
    "content": "@use crate::BaseData;\n@(_data: &BaseData)\n\n<div class=\"overlay suggestion noselect hidden\">\n    <div id=\"suggestion-container\" class=\"d-flex flex-column\"></div>\n</div>"
  },
  {
    "path": "lib/frontend/templates/pages/about.rs.html",
    "content": "@use crate::BaseData;\n@(data: &BaseData)\n\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/aboutPage.css\">\n\n\n<br>\n<div class=\"d-flex center\">\n\t<article>\n\t\t<!-- About us -->\n\t\t<div class=\"small-12 medium-6 columns small-centered\">\n\t\t\t<h2>@data.gettext(\"About\")</h2>\n\t\t\t<p>@data.gettext(\"Jotoba is a multilingual Japanese dictionary. It is easy to find translations for words or kanji, see example sentences and the way names can be written.\")</p>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"Jotoba is open source. Check out our\") <a class=\"clickable\" href=\"https://github.com/WeDontPanic/Jotoba\">@data.gettext(\"Github page\")</a> @data.gettext(\"if you want to contribute or host Jotoba yourself.\")\n\t\t\t\t<br>\n\t\t\t\t@data.gettext(\"Check out our\") <a class=\"clickable\" href=\"https://trello.com/b/nmG0xgaW/jotoba\">@data.gettext(\"Trello Board\")</a> @data.gettext(\"aswell if you are interested in upcoming features and what we are currently working on!\")\n\t\t\t</p>\n\t\t</div>\n\t\t<!-- About our Data Sources -->\n\t\t<div class=\"small-12 medium-6 columns small-centered\">\n\t\t\t<h2>@data.gettext(\"Data Sources and Inspiration\")</h2>\n\t\t\t<p>@data.gettext(\"Of course this project wouldn't have been possible without the help of some great data sources.\") <br>@data.gettext(\"Many thanks to every one of them for providing such a variety of data people can use to learn the japanese language.\")</p>\n\t\t\t<span class=\"about-title\">@data.gettext(\"Joto-kun\")</span>\n\t\t\t<div class=\"d-flex flex-row\">\n\t\t\t\t<p>\n\t\t\t\t\t@data.gettext(\"Joto-kun was created by a good friend of ours who is truly a wizard when it comes down to design!\")\n\t\t\t\t</p>\n\t\t\t\t<img class=\"joto-wizard\" src=\"/variable_assets/@data.asset_hash/assets/jotokun/JotoStaff.svg\" >\n\t\t\t</div>\n\t\t\t<hr>\n\t\t\t<a class=\"clickable title no-align\" href=\"https://jisho.org\" >@data.gettext(\"Jisho\")</a>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"Jisho, created by Kim Ahlström, Miwa Ahlström and Andrew Plummer is a pretty and powerful english-japanese dictionary.\")<br>@data.gettext(\"We took inspiration in their work and design to improve on their concept and offer it to a wider variety of people.\")\n\t\t\t</p>\n\t\t\t<hr>\n\t\t\t<a class=\"clickable title no-align\" href=\"http://www.edrdg.org/wiki/index.php/JMdict-EDICT_Dictionary_Project\" >@data.gettext(\"Words\")</a> <span class=\"about-title\">&amp;</span>\n\t\t\t<a class=\"clickable title no-align\" href=\"http://www.edrdg.org/wiki/index.php/KANJIDIC_Project\">@data.gettext(\"Kanji\")</a> <span class=\"about-title\">&amp;</span>\n\t\t\t<a class=\"clickable title no-align\" href=\"http://www.edrdg.org/enamdict/enamdict_doc.html\" >@data.gettext(\"Names\")</a>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"Words (except sound effects), Kanji and Names available on this site are publicly provided and maintained by\") <br>\n\t\t\t\t<a class=\"clickable no-align\" href=\"http://www.edrdg.org/\">EDRDG</a> (Electronic Dictionary Research and Development Group) @data.gettext(\"and available under the license\") <br>\n\t\t\t\t<a class=\"clickable no-align\" href=\"https://creativecommons.org/licenses/by-sa/3.0/\">Creative Commons Attribution-ShareAlike Licence (V3.0).</a> <br>\n\t\t\t\t@data.gettext(\"Additionally, the RADKFILE by Jin Breen is used to link Radicals to Kanji.\") \n\t\t\t</p>\n\t\t\t<hr>\n\t\t\t<a class=\"clickable title no-align\" href=\"https://github.com/tofugu/japanese-vocabulary-pronunciation-audio\" >@data.gettext(\"Audio Files\") #1</a>\n\t\t\t<span class=\"about-title\">@data.gettext(\"and\")</span>\n\t\t\t<a class=\"clickable title no-align\" href=\"https://kanjialive.com/\" >@data.gettext(\"Audio Files\") #2</a>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"The audio files #1 were graciously made public by\") <a class=\"clickable no-align\" href=\"https://www.wanikani.com/\">@data.gettext(\"WaniKani\")</a> <span class=\"highlight\">&amp;</span> <a class=\"clickable no-align\" href=\"https://www.tofugu.com/\">@data.gettext(\"Tofugo\")</a> \n\t\t\t\t@data.gettext(\"and uploaded to Github under the CC-BY-4.0 licence.\")<br>\n\t\t\t\t@data.gettext(\"The audio files #2 are provided by the\") <a class=\"clickable no-align\" href=\"https://kanjialive.com/\">@data.gettext(\"Kanji alive project\")</a> @data.gettext(\"and are also available under the CC-BY-4.0 license.\")<br>\n\t\t\t</p>\n\t\t\t<hr>\n\t\t\t<a class=\"clickable title no-align\" href=\"https://www.japanpowered.com/anime-articles/manga-sound-effect-guide\">@data.gettext(\"Manga Sound Effects\")</a>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"The data about Sound Effects is graciously provided by Chris Kincaid and is used as additional data in the word search.\")\n\t\t\t</p>\n\t\t\t<hr>\n\t\t\t<a class=\"clickable title no-align\" href=\"https://tatoeba.org/\">@data.gettext(\"Sentences\")</a>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"Sentences are provided by Tatoeba under the Creative Commons CC 1.0 and 2.0 licences.\")\n\t\t\t</p>\n\t\t\t<hr>\n\t\t\t<a class=\"clickable title no-align\" href=\"http://kanjivg.tagaini.net/\" >@data.gettext(\"Kanji Animations\")</a>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"The raw data used for kanji animations is publicly provided by KanjiVG, a project by Ulrich Apel.\") <br>@data.gettext(\"The conversion into images and animated SVG is done by a ruby script which was made by\") <a class=\"clickable no-align\" href=\"https://github.com/Kimtaro/kanjivg2svg\">@data.gettext(\"Kimtaro\")</a> @data.gettext(\"and altered by\") <a class=\"clickable no-align\" href=\"https://github.com/Yukaru-san/kanjivg2svg\">@data.gettext(\"Yukáru\")</a>.\n\t\t\t</p>\n\t\t\t<hr>\n\t\t\t<a class=\"clickable title no-align\" href=\"http://www.tanos.co.uk/jlpt/sharing/\" >@data.gettext(\"JLPT Data\")</a>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"Data about JLPT proficiencies are by provided by Jonathan Waller.\") <br> @data.gettext(\"There is also some non-free data available on his website, so check it out if you are interested.\")\n\t\t\t</p>\n\t\t\t<hr>\n\t\t\t<a class=\"clickable title no-align\" href=\"https://clrd.ninjal.ac.jp/unidic/en/about_unidic_en.html\" >@data.gettext(\"Word tokenization\")</a>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"Word tokenization is done using UniDic, by the UniDic Consortium and used for Japanese morphological analysis implementations.\")\n\t\t\t</p>\n\t\t\t<hr>\n\t\t\t<a class=\"clickable title no-align\" href=\"https://github.com/mifunetoshiro/kanjium\" >@data.gettext(\"Radicals\")</a>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"Data about Radicals used in specific Kanji are provided by Kanjium.\") <br> @data.gettext(\"On the project's Github Page you can find lots of data about Kanji.\")\n\t\t\t</p>\n\t\t\t<hr>\n\t\t\t<a class=\"clickable title no-align\" href=\"https://clrd.ninjal.ac.jp/unidic/en/\" >@data.gettext(\"Pitch accents\")</a>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"Pitch accent data has been extracted from UniDic.\")\n\t\t\t</p>\n\t\t</div>\n\t</article>\n</div>     \n"
  },
  {
    "path": "lib/frontend/templates/pages/info.rs.html",
    "content": "@use crate::BaseData;\n@(data: &BaseData)\n\n\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/infoPage.css\">\n<script defer src=\"/variable_assets/@data.asset_hash/assets/js/page/infoPage.js\"></script>\n\n<div class=\"d-flex center\">\n\t<article>\n\n\t\t<!-- Shortcuts -->\n\t\t<div id=\"shortcutInfo\" class=\"small-12 medium-6 columns small-centered\">\n\t\t\t<h2>@data.gettext(\"Shortcuts\")</h2>\n\t\t\t<p>@data.gettext(\"To improve the quality of life on Jotoba, we offer some shortcuts to quickly navigate the page:\")</p>\n\t\t\t\n         <!-- Everywhere accessable shortcuts -->\n        \t<b>@data.gettext(\"Everywhere\")</b>\n\t\t \t<div class=\"table\">\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>w | s | n | k</span>\n\t\t\t\t\t<span>@data.gettext(\"Quickly change between words | sentences | names | kanji tabs\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>/</span>\n\t\t\t\t\t<span>@data.gettext(\"Focus the search bar\")</span>\n\t\t\t\t</div>\n\t\t\t</div>\n\n         <!-- Shortcuts when focussed on search bar -->\n\t\t\t<b>@data.gettext(\"Focussed search bar\")</b>\n\t\t\t<div class=\"table\">\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>↑ | ↓</span>\n\t\t\t\t\t<span>@data.gettext(\"Iterate suggestions up | down\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>[Tab]</span>\n\t\t\t\t\t<span>@data.gettext(\"Iterate suggestions down\")</span>\n\t\t\t\t</div>\n\t\t\t</div>\n\n         <!-- Shortcuts used only by the [Words] search -->\n\t\t\t<b>@data.gettext(\"[Words] search\")</b>\n\t\t\t<div class=\"table\">\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>p</span>\n\t\t\t\t\t<span>@data.gettext(\"Play the first possible audio\")</span>\n\t\t\t\t</div>\n\t\t\t</div>\n\n\t\t<!-- Shortcuts used only by the [Words] search -->\n\t\t\t<b>@data.gettext(\"[Kanji] search\")</b>\t\n\t\t\t<div class=\"table\">\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>c</span>\n\t\t\t\t\t<span>@data.gettext(\"Show / Collapse compounds\")</span>\n\t\t\t\t</div>\n\t\t\t</div>\n\n\t\t</div>\n\n\t\t<!-- Hashtags -->\n\t\t<div class=\"small-12 medium-6 columns small-centered help-cat\">\n\t\t\t<h2>@data.gettext(\"Hashtags\")</h2>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"To specify what kind of results your search should offer, you can use shortcuts.\") <br> @data.gettext(\"Hashtags should be written at end end of your input like this:\")\n\t\t\t\t<a class=\"clickable no-align\" href=\"/search/start%20%23noun?t=0\">start #noun</a>\n\t\t\t</p>\n\n         <!-- Word search hashtags --> \n\t\t\t<b>@data.gettext(\"Available Hashtags for [Words] search\")</b>+\n\n\t\t\t<div class=\"table\">\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#noun</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for nouns\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#verb</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for verbs\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#transitive</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for transitive verbs\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#intransitive</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for intransitive verbs\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#adverb</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for adverbs\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#auxilary</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for auxilary verbs\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#adjective</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for adjectives\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#pronoun</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for pronouns\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#conjungation</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for conjugations\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#prefix</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for prefixes\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#suffix</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for suffixes\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#particle</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for japanese particles\")</span>\n\t\t\t\t</div>\n                                <div class=\"row\">\n\t\t\t\t\t<span>#Irregular-Ichidan</span>\n\t\t\t\t\t<span>@data.gettext(\"Lists iru/eru ending verbs which are conjugated as godan verbs\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#sfx</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for sfx words [comic sounds]\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#counter</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for words used for counting\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#expression</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for expressions\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#interjection</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for words used as interjections\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#numeric</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for numeric words\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#abbreviation</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for abbreviations\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#katakana</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for katakana words\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#unclassified</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for words that don't fit in any category\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#N [5-1]</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for words included in the specific JLPT level\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#word</span>\n\t\t\t\t\t<span>@data.gettext(\"Search in the [words] category\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#sentence</span>\n\t\t\t\t\t<span>@data.gettext(\"Search in the [sentences] category\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#name</span>\n\t\t\t\t\t<span>@data.gettext(\"Search in the [name] category\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#kanji</span>\n\t\t\t\t\t<span>@data.gettext(\"Search in the [kanji] category\")</span>\n\t\t\t\t</div>\n\t\t\t</div>\t\n\n         <!-- Sentence search hashtags --> \n\t\t\t<b>@data.gettext(\"Available Hashtags for [Sentence] search\")</b>\n\t\t\t<div class=\"table\">\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#N [1-5]</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for sentences included in the specific JLPT level\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#hidden</span>\n\t\t\t\t\t<span>@data.gettext(\"Hide translations by default to translate them yourself and check if its correct\")</span>\n\t\t\t\t</div>\n\t\t\t</div>\n\n         <!-- Kanji search hashtags --> \n\t\t\t<b>@data.gettext(\"Available Hashtags for [Kanji] search\")</b>\n\t\t\t<div class=\"table\">\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>#Genki [3-23]</span>\n\t\t\t\t\t<span>@data.gettext(\"Search for kanji included in the specific Genki chapter\")</span>\n\t\t\t\t</div>\n\t\t\t</div>\n\t\t</div>\n\n\t\t<!-- Radical Search info -->\n        <div class=\"small-12 medium-6 columns small-centered help-cat\">\n\t\t\t<h2>@data.gettext(\"Radical search\")</h2>\n\t\t\t<p>\n\t\t\t\t@data.gettext(\"The radical picker allows searching for radicals to make the process of picking radicals even faster. The supported inputs are as following:\")\n\t\t\t</p>\n\t\t\t<div class=\"table\">\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>Kanji</span>\n\t\t\t\t\t<span>@data.gettext(\"Results in all radicals used to build given kanji characters\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>Kana</span>\n\t\t\t\t\t<span>@data.gettext(\"Searches in words for the given query and returns in result-matching radicals\")</span>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<span>Romaji</span>\n\t\t\t\t\t<span>@data.gettext(\"Tries to find the given query in radicals names, otherwise does a word search and returns the result's kanji\")</span>\n\t\t\t\t</div>\n\t\t\t</div>\n\n        </div>\n\n\t</article>\n</div>\n"
  },
  {
    "path": "lib/frontend/templates/pages/kanji.rs.html",
    "content": "@use search::kanji::result::Item;\n@use crate::BaseData;\n@use super::search_help;\n@use crate::templ_utils::*;\n@use crate::templates::overlays::page::{decomposition_graph_html};\n\n@(data: &BaseData, kanji: Vec<Item>)\n\n@if kanji.is_empty() {\n  @:search_help(&data, data.gettext(\"kanji\").as_str())\n}\n\n<script defer src=\"/variable_assets/@data.asset_hash/assets/js/lib/d3.js\"></script>\n<script defer src=\"/variable_assets/@data.asset_hash/assets/js/page/kanjiPage.js\"></script>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/multiPage/kanji.css\">\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/kanjiPage.css\">\n\n<!-- Decomposition graph -->\n@:decomposition_graph_html(data)\n\n<!-- Inner Main Body -->\n@for (kpos, k_item) in kanji.iter().enumerate() {\n  @if kpos > 0 {\n    <br>\n  }\n  \n  <div class=\"d-flex kanji-entry-parent\">\n    \n      <!-- Left side: Kanji -->\n      <div class=\"kanji-entry left detail d-flex flex-column\">\n         <span class=\"kanji-preview x-large\"  @if k_item.has_compositions { onclick='generateTreeDiagram(\"@k_item.kanji.literal\")' }>\n           @k_item.kanji.literal\n         </span>\n         <div class=\"kanji-preview-info\">\n           <div class=\"notes stroke\"><b>@data.ngettext_fmt(\"{} stroke\", k_item.kanji.stroke_count as u64, &[k_item.kanji.stroke_count])</b> </div>\n            <div class=\"rad-parts-parent\">\n                <div class=\"notes rad\"><b>@data.gettext(\"Radical\"):</b>\n                    <br>\n                    <a class=\"clickable no-align\" href=\"/search/@k_item.kanji.radical.literal?t=1\">@k_item.get_radical()</a>\n                    @if k_item.get_rad_len() > 7 {\n                    <br>\n                    }\n                    @if let Some(ref translations) = k_item.kanji.radical.translations {\n                        (@translations.join(\", \"))\n                    }\n                </div>\n                @if !k_item.kanji.parts.is_empty() {\n                <div class=\"notes parts\"><b>@data.ngettext(\"Part\", k_item.get_parts_count() as u64):</b>\n                    <br>\n                    @for (pos, part) in k_item.kanji.parts.iter().enumerate() {\n                        @if pos > 0 {\n                            <span class=\"tags no-margin fat\">,</span>\n                        }\n                    <a class=\"clickable no-align\" href=\"/search/@part?t=1\">@part</a>\n                    }\n                </div>\n                }\n            </div>\n         </div>\n      </div>\n      <!-- Right side: Info -->\n      <div class=\"kanji-entry right detail\">\n         <!-- First Part -->\n         <div class=\"kanji-entry d-flex flex-row\">\n            <!-- Inner Left Side -->\n            <div class=\"kanji-preview-left\">\n               <div class=\"translation big\">\n                   @k_item.kanji.meanings.join(\", \")\n               </div>\n                @if !k_item.kanji.kunyomi.is_empty() {\n                 <div class=\"d-flex flex-row wrap\">\n                    <div class=\"tags no-margin fat right-padding-10 noselect\">@data.gettext(\"Kun\"):</div>\n                      @for (pos, kun) in k_item.kanji.kunyomi.iter().enumerate() {\n                        @if pos > 0 {\n                          <div class=\"tags no-margin fat right-padding-10\">,</div>\n                        }\n                        <a class=\"clickable no-align fat\" href=\"/search/@k_item.kanji.literal @kun\">@kun</a>\n                      }\n                 </div>\n                }\n                @if !k_item.kanji.onyomi.is_empty() {\n                 <div class=\"d-flex flex-row wrap\">\n                    <div class=\"tags no-margin fat right-padding-20 noselect\">@data.gettext(\"On\"):</div>\n                      @for (pos, on) in k_item.kanji.onyomi.iter().enumerate() {\n                        @if pos > 0 {\n                          <div class=\"tags no-margin fat right-padding-10\">,</div>\n                        }\n                        <a class=\"clickable no-align fat\" href=\"/search/@k_item.kanji.literal @on\">@on</a>\n                      }\n                 </div>\n               }\n            </div>\n            <!-- Inner Right Side -->\n            <div class=\"kanji-preview-right\">\n                @if let Some(grade) = k_item.kanji.grade {\n                  <div class=\"notes\">@data.gettext_fmt(\"Taught in {} grade\", &[grade])</div>\n                }\n                @if let Some(n_lvl) = k_item.kanji.jlpt {\n                 <div class=\"notes\">@data.gettext(\"JLPT level\") <b>N@n_lvl</b></div>\n                }\n                @if let Some(frequency) = k_item.kanji.frequency {\n                 <div class=\"notes\"><b>@frequency</b> @data.gettext(\"of 2500 most used kanji in newspapers\")</div>\n                }\n            </div>\n        </div>\n        <!-- Alternative Readings Part -->\n        <div>\n          @if !k_item.kanji.nanori.is_empty() {\n            <div class=\"notes d-flex flex-row wrap\"><b class=\"right-padding-10\">@data.gettext(\"Japanese names\"):</b> \n              @for (pos, n) in k_item.kanji.nanori.iter().enumerate() {\n                @if pos > 0 {\n                  <div class=\"tags no-margin slim right-padding-10\">,</div>\n                }\n                <a class=\"clickable no-align\" href=\"/search/@k_item.kanji.literal @n?t=3\">@n</a>\n              }\n            </div>\n          }\n          @if !k_item.kanji.similar_kanji.is_empty() {\n            <div class=\"notes d-flex flex-row wrap\"><b class=\"right-padding-10\">@data.gettext(\"Similar Kanji\"):</b> \n              @for (pos, n) in k_item.kanji.similar_kanji.iter().enumerate() {\n                @if pos > 0 {\n                  <div class=\"tags no-margin slim right-padding-10\">,</div>\n                }\n                <a class=\"clickable no-align\" href=\"/search/@n?t=1\">@n</a>\n              }\n            </div>\n          }\n          @if !k_item.kanji.chinese.is_empty() {\n            <div class=\"notes\"><b>@data.gettext(\"Chinese reading\"):</b> @k_item.kanji.chinese.join(\", \")</div>\n          }\n          @if let Some(ref korean) = k_item.get_korean() {\n            <div class=\"notes\"><b>@data.gettext(\"Korean reading\"):</b>&nbsp;&nbsp; @korean.join(\", \")</div>\n          }\n          @if !k_item.kanji.vietnamese.is_empty() {\n            <div class=\"notes\"><b>@data.gettext(\"Vietnamese reading\"):</b>&nbsp;&nbsp; @k_item.kanji.vietnamese.join(\", \")</div>\n          }\n        </div>\n\n         <!-- Second Part -->\n         @if k_item.kanji.has_stroke_frames(data.assets_path()) {\n           <hr/>\n           <div class=\"stroke-container\">\n              <img src=\"@k_item.kanji.get_stroke_frames_url()\" type=\"image/svg+xml\" class=\"kanji-img\">\n              @unescaped_string(k_item.get_frames(data.assets_path()).unwrap_or_default())\n           </div>\n         }\n\n         @if k_item.kanji.has_animation_file(data.assets_path()) {\n         \n          <div class=\"animation-container d-flex flex-row\">\n\n            <div class=\"animation-controller d-flex flex-column\">\n              <div>\n                <div class=\"d-flex flex-row speed-tag\">\n                  <div>Animation Speed:</div>\n                  <div id=\"@(k_item.kanji.literal)_speed\">100%</div>\n                </div>\n                <input data-kanji=\"@(k_item.kanji.literal)\" type=\"range\" min=\"0.05\" max=\"2\" value=\"1\" step=\"0.05\" class=\"slider speedSlider\"/>\n              </div>\n              <div class=\"animation-group d-flex\">\n                <button class=\"l ripple\" onclick='doAnimationStep_onClick(\"@(k_item.kanji.literal)\", -1)'>\n                  <img src=\"/assets/svg/ui/backwards.svg\" />\n                </button>\n\n                <button id=\"@(k_item.kanji.literal)_play\" onclick='doOrPauseAnimation(\"@(k_item.kanji.literal)\")' class=\"m ripple\" data-state=\"play\">\n                  <span> \n                    <img draggable=\"false\" src=\"/assets/svg/ui/play.svg\" />\n                  </span>\n                  <span class=\"hidden\">\n                    <img draggable=\"false\" src=\"/assets/svg/ui/pause.svg\" />\n                  </span>\n                </button>\n                \n                <button class=\"r ripple\" onclick='doAnimationStep_onClick(\"@(k_item.kanji.literal)\", 1)'>\n                  <img draggable=\"false\" src=\"/assets/svg/ui/forwards.svg\" />\n                </button>\n              </div>\n            </div>            \n\n            <div id=\"@(k_item.kanji.literal)_svg\" class=\"anim-container\" onmouseover='toggleNumbers(\"@(k_item.kanji.literal)\", true)' onmouseleave='toggleNumbers(\"@(k_item.kanji.literal)\", false)'>\n              @unescaped_string(k_item.get_animation(data.assets_path()).unwrap_or_default())\n\n              <div id=\"@(k_item.kanji.literal)_reset\" class=\"clickable reset-btn\" onclick='undoAnimation(\"@(k_item.kanji.literal)\")'>\n                <img draggable=\"false\" src=\"/assets/svg/ui/revert.svg\" />\n              </div>\n            </div>\n           </div>\n          }\n        \n          @if k_item.kanji.has_compounds() {\n            <div class=\"compounds-dropdown-parent\">\n              @if data.kanji_copounds_collapsed() {\n                <hr class=\"compounds-dropdown closed\"/>\n              }else{\n                <hr class=\"compounds-dropdown\"/>\n              }\n              <div class=\"compounds-click-area\" onclick=\"toggleCompounds(event)\"></div>\n            </div>\n          }else{\n            <hr/>\n          }\n\n            <!-- Reading Compounds -->\n            @if data.kanji_copounds_collapsed() {\n             <div class=\"compounds-parent d-flex flex-row hidden\">\n            }else{\n             <div class=\"compounds-parent d-flex flex-row\">\n            }\n\n            <!-- On Reading -->\n            @if let Some(ref ons) = k_item.on_dicts {\n              <div class=\"on-reading\">\n                 <h3 class=\"noselect\">\n                    @data.gettext(\"On reading compounds\")\n                 </h3>\n                 <div class=\"d-flex flex-column\">\n                   @for on in ons.iter() {\n                      <span>\n                        @if let Some(ref kanji_reading) = on.reading.kanji {\n                          <a class=\"clickable green\" href=\"/search/@kanji_reading.reading\">@kanji_reading.reading</a> 【@on.reading.kana.reading】@on.glosses_pretty()\n                        }\n                      </span>\n                    }\n                 </div>\n              </div>\n            }\n            <!-- Kun Reading -->\n            @if let Some(ref kuns) = k_item.kun_dicts {\n              <div class=\"kun-reading\">\n                <h3 class=\"noselect\">\n                   @data.gettext(\"Kun reading compounds\")\n                </h3>\n                <div class=\"d-flex flex-column\">\n                  @for kun in kuns.iter() {\n                    <span>\n                      @if let Some(ref kanji_reading) = kun.reading.kanji {\n                        <a class=\"clickable green\" href=\"/search/@kanji_reading.reading\">@kanji_reading.reading</a> 【@kun.reading.kana.reading】@kun.glosses_pretty()\n                      }\n                    </span>\n                  }\n                </div>\n              </div>\n            }\n         </div>\n      </div>\n   </div>\n}\n"
  },
  {
    "path": "lib/frontend/templates/pages/names.rs.html",
    "content": "@use types::jotoba::names::Name;\n@use super::search_help;\n@use crate::templ_utils::get_types_humanized;\n@use crate::BaseData;\n\n@(data: &BaseData, names: Vec<&Name>)\n\n  <link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/multiPage/kanji.css\">\n  <link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/namePage.css\">\n\n<div class=\"d-flex center\">\n  <div class=\"main-info d-flex flex-column\">\n\n    @if names.is_empty() {\n      @:search_help(&data, data.gettext(\"names\").as_str())\n    }\n\n    @for name in names.iter() {\n      <div class=\"list-entry\">\n         <div class=\"d-flex flex-row\">\n            @if let Some(ref kanji) = name.kanji {\n              <div class=\"kanji-preview small\">\n                 @kanji\n              </div>\n            }\n            <div class=\"kanji-preview small\">\n              @if name.kanji.is_some() {\n               【@name.kana】\n              } else {\n                @name.kana\n              }\n            </div>\n         </div>\n         <div class=\"d-flex flex-row\">\n            <div class=\"row-tag-entry\">\n               <div class=\"tags\">@data.gettext(\"Full name\")</div>\n               <div class=\"notes\">@name.transcription</div>\n            </div>\n\n            @if name.name_type.is_some() {\n              @if let Some(gender) = name.get_gender() {\n                <div class=\"row-tag-entry\">\n                  <div class=\"tags\">@data.gettext(\"Sex\")</div>\n                   <div class=\"notes\">@data.gettext(gender)</div>\n                </div>\n              } \n\n              @if name.has_non_gender_tags() {\n                <div class=\"row-tag-entry\">\n                  <div class=\"tags\">@data.gettext(\"Name origin\")</div>\n                   <div class=\"notes\">@get_types_humanized(name, data.dict, data.get_lang())</div>\n                </div>\n              }\n            }\n         </div>\n         <hr/>\n      </div>\n    }\n\n  </div>\n</div>\n"
  },
  {
    "path": "lib/frontend/templates/pages/news.rs.html",
    "content": "@use crate::BaseData;\n@use news::NewsEntry;\n@type News = Vec<NewsEntry>;\n@(data: &BaseData, news: News)\n\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/newsPage.css\">\n<link rel=\"stylesheet\" type=\"text/css\" href=\"variable_assets/@data.asset_hash/assets/css/page/multiPage/markdown.css\">\n\n<div id=\"news-list\">\n</div>\n\n<script>\n  const news = [\n      @for news_entry in news.into_iter().rev() {\n        @{  \n          creation_time: '@news_entry.creation_time',\n          title: '@news_entry.title',\n          html: '@news_entry.long.replace(\"\\n\", \"\")'\n        @},\n      }\n  ];\n</script>\n<script src=\"/variable_assets/@data.asset_hash/assets/js/page/newsPage.js\"></script>\n"
  },
  {
    "path": "lib/frontend/templates/pages/search_help.rs.html",
    "content": "@use crate::BaseData;\n@(data: &BaseData, title: &str)\n\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/helpPage.css\">\n\n<h3 class=\"noselect word-title\">@data.gettext_fmt(\"No {} found\", &[title])</h3>\n\n<div class=\"d-flex center\">\n  <div class=\"small-12 medium-6 columns small-centered\">\n\n    @if let Some(search_help) = data.get_search_help() {\n\n      @if !search_help.other_langs.is_empty() {\n        <p class=\"fat\">@data.gettext(\"Found in multiple other languages\")</p>\n        @for (lang, l_code) in search_help.iter_langs() {\n          <b> - @data.gettext(lang)</b>\n          \n          <div class=\"indented\">\n            <a class=\"clickable\" draggable=\"false\" href=\"/search/@l_code: @data.site.as_search_result().unwrap().query.get_query_encoded()\">Temporarily search</a>\n          </div>\n          \n          <div class=\"indented\">\n            <a class=\"clickable\" draggable=\"false\" href=\"#\">Change to language</a>\n          </div>\n        }\n      }\n\n      <p class=\"fat\">@data.gettext(\"Found in other search types\")</p>\n\n      @for (query_type, guess) in search_help.iter_items() {\n        <div class=\"indented\">\n          <a class=\"clickable\" draggable=\"false\" href=\"#\" onclick=\"changeSearchType(null, @query_type.get_type_id())\">@guess.format() @data.gettext(\"in\") @query_type.get_translated(data.dict, Some(data.get_lang())) </a>\n        </div>\n      }\n    }\n\n  <article style=\"height: 700px;\">\n    <div class=\"small-12 medium-6 columns small-centered\">\n        <p class=\"fat\">@data.gettext(\"Search Help\")</p>\n        <ul>\n           <li>@data.gettext(\"Your default search language might not fit your input\")</li>\n           <li>@data.gettext(\"Check your search for typos\")</li>\n           <li>@data.gettext(\"Use more generic search terms\")</li>\n           <li>@data.gettext(\"Try finding your search in a different category using\") <span class=\"highlight\"> w / s / n / k </span></li>\n           <li>@data.gettext(\"Your search request might not be included in our database yet\")</li>\n        </ul>\n\n        <p>\n           @data.gettext(\"If you think your search should be contained in our database, submit an issue on\") <a class=\"clickable\" href='https://github.com/WeDontPanic/Jotoba'>Github</a>.\n           <br>\n           @data.gettext(\"Also check our\") <a class=\"clickable\" href='https://trello.com/b/nmG0xgaW/jotoba'>Trello Board</a> @data.gettext(\"since we might be working on it!\")\n        </p>\n        <img class=\"help-joto\" src=\"/variable_assets/@data.asset_hash/assets/jotokun/JotoBook.svg\">\n    </div>\n  </article>\n</div>\n\n"
  },
  {
    "path": "lib/frontend/templates/pages/sentences.rs.html",
    "content": "@use search::sentence::result::{Sentence, ResData};\n@use search::executor::search_result::SearchResult;\n@use super::search_help;\n\n@use crate::templates::functional::{render_sentence_html};\n\n@use crate::BaseData;\n@(data: &BaseData, sentences: SearchResult<Sentence, ResData>)\n\n  <link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/multiPage/kanji.css\">\n  <link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/multiPage/kana.css\">\n  <link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/sentencePage.css\">\n  <script defer src=\"/variable_assets/@data.asset_hash/assets/js/page/sentencePage.js\"></script>\n\n<!-- Inner Main Body -->\n<div class=\"d-flex center\">\n  <div class=\"main-info d-flex flex-column\">\n\n    @if sentences.items.is_empty() {\n      @:search_help(&data, data.gettext(\"sentences\").as_str())\n    }\n\n    @for sentence in sentences.items {\n      <div class=\"list-entry sentence\">\n\n        <!-- Share Icon -->\n        <div class=\"sentence-share\">\n          <div class=\"shareSvg\" onclick='JotoTools.copyTextAndEcho(JotoTools.pathToUrl(\"/direct/2/@sentence.id\"), \"QOL_LINK_COPIED\");'></div>\n          <div class=\"searchSvg\" onclick='Util.loadUrl(JotoTools.createUrl(\"@sentence.content\"))'></div>\n        </div>\n\n        <!-- Sentence -->\n        <div class=\"d-flex flex-row wrap sentence\">\n          @:render_sentence_html(sentence.furigana_pairs(), \"small\", data.user_settings.sentence_furigana)\n        </div>\n\n         <!-- Translation -->\n        <div class=\"sentence-translation original\" style=\"@if sentences.other_data.hidden { display: none; }\">\n          @sentence.translation\n        </div>\n        @if let Some(ref english) = sentence.get_english() {\n          <hr class=\"lang-separator\" style=\"@if sentences.other_data.hidden { display: none; }\"/>\n          <div class=\"sentence-translation\" style=\"@if sentences.other_data.hidden { display: none; }\">\n            @english\n          </div>\n        }\n\n        <!-- Toggle Btn -->\n        @if sentences.other_data.hidden {\n          <div class=\"sentence-toggle hide hidden noselect\" onclick='toggleTranslation(this)'>[@data.gettext(\"hide\")]</div>\n          <div class=\"sentence-toggle show noselect\" onclick='toggleTranslation(this)'>[@data.gettext(\"show\")]</div>\n        }\n      </div>\n     <hr class=\"res-separator sentence\" />\n    }\n\n  </div>\n</div>\n\n"
  },
  {
    "path": "lib/frontend/templates/pages/words.rs.html",
    "content": "@use search::word::result::{selected, AddResData};\n@use types::jotoba::words::Word;\n@use types::jotoba::language::Language;\n@use search::executor::search_result::SearchResult;\n@use search::query::Query;\n@use jp_utils::furi::parse::FuriParser;\n@use crate::templ_utils::*;\n@use super::{search_help};\n@use crate::templates::functional::{render_sentence_html};\n@use crate::templates::overlays::info::*;\n\n@use crate::BaseData;\n@(data: &BaseData, query: &Query, result: SearchResult<Word, AddResData>)\n\n<link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/tools/ripple.css\">\n\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/multiPage/kanji.css\">\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/multiPage/kana.css\">\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/wordPage.css\">\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/wordExtensions/sentenceReader.css\">\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/page/wordExtensions/searchAnnotation.css\">\n\n<script defer src=\"/variable_assets/@data.asset_hash/assets/js/page/wordPage.js\"></script>\n\n  @if let Some(ref inflection) = result.inflection {\n    <div class=\"search-annotation\">\n\n      <!-- Search Inflections -->\n      <div class=\"search-inflection\">\n        <span>\n          @data.ngt_search_links(\"{} could be an inflection of {}, with this form:\", 1,\n          &[&result.other_data.raw_query, &inflection.lexeme], inflection.inflections.len() as u64)\n          <br>\n          <span class=\"forms\">\n            @for inflection in inflection.inflections.iter() {\n              <li>@data.gettext(*inflection)</li>\n            }\n          </span>\n        </span>\n      </div>\n\n      @if result.has_sentence() {\n        <br>\n      }\n\n    </div>\n  }\n\n  @if let Some(sentence_parts) = result.sentence_parts() {\n    <div class=\"search-annotation\">\n      <!-- Sentence Info -->\n      <div id=\"sr\" class=\"d-flex wrap\">\n        @for part in sentence_parts.iter() {\n          @if let Some(furigana) = part.furigana() {\n            <a id=\"p@part.pos()\" href=\"/search/@query.get_query_encoded()?i=@part.pos()\"\n              class=\"@part.word_class_lower().unwrap_or_default() sentence-part @selected(part.pos(), result.sentence_index())\">\n                @:render_sentence_html(FuriParser::new(furigana).to_vec().unwrap(), \"\", true)\n              </a>\n          } else {\n            <!-- TODO find a better way to keep this consistent. Implement with reader_sentence.rs.html -->\n            <a id=\"p@part.pos()\" href=\"/search/@query.get_query_encoded()?i=@part.pos()\"\n              class=\"@part.word_class_lower().unwrap_or_default() sentence-part @selected(part.pos(), result.sentence_index())\">\n              <div class=\"furigana-kanji-container\">\n                <ruby>\n                  <span class=\"kanji-preview\">@part.get_inflected()</span>\n                  <rt class=\"furigana-preview\">&nbsp;</rt>\n                </ruby>\n              </div>\n            </a>\n          }\n        }\n      </div>\n\n      <!-- Sentence Tooltips -->\n      @for part in sentence_parts.iter() {\n        @if let Some(ref info) = part.word_class() {\n          <div class=\"mdl-tooltip\" for=\"p@part.pos()\">\n            @data.gettext(*info)\n          </div>\n        }\n      }\n    </div>\n    <br>\n  }\n   \n  <!-- Inner Main Body -->\n  <div id=\"content-container\" class=\"d-flex left\">\n    <div class=\"main-info d-flex flex-column\">\n\n      @*\n      @if let Some(new_lang) = result.changed_lang {\n        @data.gettext_fmt(\"Temporarily switched language to {}\", &[data.gettext(new_lang)])\n      }*@\n\n      @if !result.is_empty() {\n        <h3 class=\"noselect word-title\">@data.gettext(\"Words\")</h3>\n      } else {\n        @:search_help(&data, data.gettext(\"words\").as_str())\n      }\n\n      <!-- Word, Sentence, Name Templates here -->\n      @for word in result.items.iter() {\n          @if word.get_reading().len() > 3 {\n            <div class=\"d-flex flex-row wrap\"> \n              @if let Some(s_pairs) = word.get_furigana() {\n                @:render_sentence_html(s_pairs, \"\", true)\n              } else {\n                <div class=\"inline-kana-preview\">\n                  @word.get_reading().reading\n                </div>\n              }\n            </div>\n          }\n\n          <div class=\"d-flex center\">\n            <!-- Left side: Writing -->\n            <div class=\"kanji-entry left fixed d-flex flex-column\">\n\n              @if word.get_reading().len() <= 3 {\n                <div class=\"d-flex flex-row\"> \n                  @if let Some(s_pairs) = word.get_furigana() {\n                    @:render_sentence_html(s_pairs, \"\", true)\n                  } else {\n                    <div class=\"inline-kana-preview\">\n                      @word.get_reading().reading\n                    </div>\n                  }\n                </div>\n              }\n\n              @if word.is_common() {\n                <div class=\"word-frequency common\">\n                  @data.gettext(\"common word\")\n                </div>\n              }\n              @if let Some(lvl) =  word.jlpt_lvl {\n                <div class=\"word-frequency\">\n                  @data.gettext_fmt(\"JLPT N{}\", &[lvl])\n                </div>\n              }\n              @if let Some(audio) = word.audio_file(data.config.server.get_audio_files()) {\n                <span class=\"clickable audioBtn p\" data-p='\"button\", @{\"props\":@{\"name\": \"Play audio\", \"category\": \"util\"@}@}' data=\"/audio/@audio\">@data.gettext(\"Play audio\")\n                  <audio preload=\"none\">\n                    <source src=\"/audio/@audio\" type=\"audio/mp3\">\n                  </audio>\n                </span>\n              }\n            </div>\n\n            <div class=\"kanji-entry right\">\n              <div class=\"entry-min-height-@word.get_word_tag_count()\">\n                <!-- Right side: Info -->\n\n                \n                <!-- Tooltip -->\n                  <div id=\"m@word.sequence\" class=\"word-tooltip tooltipSvg d-flex flex-column noselect p\" data-p='\"button\", @{\"props\":@{\"name\": \"3-dot\", \"category\": \"popup\"@}@}'></div>\n\n                  <ul id=\"info-dropdown\" class=\"mdl-menu mdl-menu--bottom-right mdl-js-menu mdl-js-ripple-effect\" for=\"m@word.sequence\">\n                    @if word.get_inflections().is_some() {\n                      <li id=\"conjugationBtn\" class=\"info-entry noselect p\" data-p='\"button\", @{\"props\":@{\"name\": \"Show Conjugations\", \"category\": \"3-dot\"@}@}' data-toggle=\"modal\" data-target=\"#conj@word.sequence\" class=\"mdl-menu__item\">\n                        <div class=\"conjugationSvg\"></div> \n                        <span class=\"text\">@data.gettext(\"Show Conjugations\")</span>\n                      </li>\n                      <li disabled class=\"mdl-menu__item\"><hr></li>\n                    }\n                    \n                    @if let Some(ref collocations) = word.collocations {\n                      <li id=\"collocationBtn\" class=\"info-entry noselect p\" data-p='\"button\", @{\"props\":@{\"name\": \"Show Collocations\", \"category\": \"3-dot\"@}@}' data-toggle=\"modal\" data-target=\"#coll@word.sequence\" class=\"mdl-menu__item\">\n                        <div class=\"conjugationSvg\"></div>\n                        <span class=\"text\">@data.ngettext(\"Show collocation\", collocations.len() as u64)</span>\n                      </li>\n                      <li disabled class=\"mdl-menu__item dot-menu\"><hr></li>\n                    }\n\n                    <!-- TODO Implement if statement here + Translate -->\n                    @* @if \n                      <li id=\"jdefBtn\" class=\"info-entry noselect p\" data-p='\"button\", @{\"props\":@{\"name\": \"Show Japanese definitions\", \"category\": \"3-dot\"@}@}' data-toggle=\"modal\" data-target=\"#jdef@word.sequence\" class=\"mdl-menu__item\">\n                        <div class=\"conjugationSvg\"></div>\n                        <span class=\"text\">@data.gettext(\"Japanese definitions\")</span>\n                      </li>\n                      <li disabled class=\"mdl-menu__item dot-menu\"><hr></li>\n                    @if_end *@\n\n                    @if let Some(intransitive) = get_intransitive_counterpart(&word) {\n                      <li class=\"info-entry noselect p\" data-p='\"button\", @{\"props\":@{\"name\": \"Show Intransitive\", \"category\": \"3-dot\"@}@}' class=\"mdl-menu__item\"> \n                        <div class=\"transitivitySvg\"></div>\n                        <span class=\"text\">\n                          <a class=\"no-highlight\" draggable=\"false\" href=\"/search/@intransitive.get_reading().reading\">@data.gettext(\"Intransitive word\")</a>\n                        </span>\n                      </li>\n                      <li disabled class=\"mdl-menu__item dot-menu\"><hr></li>\n                    }\n\n                    @if let Some(transitive) = get_transitive_counterpart(&word) {\n                      <li class=\"info-entry noselect p\" data-p='\"button\", @{\"props\":@{\"name\": \"Show Transitive\", \"category\": \"3-dot\"@}@}' class=\"mdl-menu__item\">\n                        <div class=\"transitivitySvg\"></div>\n                        <span class=\"text\">\n                          <a class=\"no-highlight\" draggable=\"false\" href=\"/search/@transitive.get_reading().reading\">@data.gettext(\"Transitive word\")</a>\n                        </span> \n                      </li>\n                      <li disabled class=\"mdl-menu__item dot-menu\"><hr></li>\n                    }\n\n                    @if word.has_sentence(data.user_settings.user_lang) || (data.user_settings.show_english && word.has_sentence(Language::English)) {\n                      <li class=\"info-entry noselect p\" data-p='\"button\", @{\"props\":@{\"name\": \"Sentence Search\", \"category\": \"3-dot\"@}@}' class=\"mdl-menu__item\">\n                        <div class=\"sentenceSvg\"></div>\n                        <span class=\"text\">\n                          <a class=\"no-highlight\" draggable=\"false\" href=\"/search/@word.get_reading().reading?t=2\">@data.gettext(\"Sentence search\")</a>\n                        </span> \n                      </li>\n                    }\n\n                    @if let Some(audio) = word.audio_file(data.config.server.get_audio_files()) {\n                      <li disabled class=\"mdl-menu__item p\" data-p='\"button\", @{\"props\":@{\"name\": \"Download audio\", \"category\": \"3-dot\"@}@}'><hr></li>\n                      <li id=\"AudioDlBtn\" class=\"info-entry noselect\" class=\"mdl-menu__item\">\n                        <div class=\"downloadSvg\"></div>\n                        <span class=\"text\">\n                          <a class=\"no-highlight\" draggable=\"false\" href=\"/audio/@audio\">@data.gettext(\"Download audio\")</a>\n                        </span> \n                      </li>\n                    }\n\n                    @if data.config.is_debug() {\n                      <li disabled class=\"mdl-menu__item\"><hr></li>\n                      <li class=\"info-entry noselect\" class=\"mdl-menu__item\">\n                        <span class=\"text\" onclick='JotoTools.copyTextAndEcho(@word.sequence, \"Copied sequence\");'>Seq: @word.sequence</span> \n                      </li>\n                    }\n\n                    <li disabled class=\"mdl-menu__item\"><hr></li>\n                    <li class=\"info-entry noselect p\" data-p='\"button\", @{\"props\":@{\"name\": \"Direct reference\", \"category\": \"3-dot\"@}@}'>\n                      <div class=\"linkSvg\"></div>\n                      <span class=\"text\">\n                        <a class=\"no-highlight\" draggable=\"false\" href=\"/direct/0/@word.sequence\">@data.gettext(\"Direct reference\")</a>\n                      </span>\n                      <div class=\"extra\">\n                        <div class=\"copySvg\" onclick='JotoTools.copyTextAndEcho(JotoTools.pathToUrl(\"/direct/0/@word.sequence\"), \"QOL_LINK_COPIED\");'></div>\n                      </div>\n                    </li>\n\n                  </ul>\n\n                @for (spos, senses_compound) in word.get_senses_orderd(query.settings.english_on_top, query.settings.user_lang).iter().enumerate() { \n                  @for (pos, sense) in senses_compound.iter().enumerate() {\n                    @if !sense.glosses.is_empty() {\n                      <!-- Sense entry -->\n                      <div class=\"list-entry\">\n                        <div class=\"tags\">\n                          @sense.get_parts_of_speech(&data.dict, data.get_lang())\n                        </div>\n                        <div class=\"definition-wrapper d-flex flex-row\">\n                          <div class=\"entry-count\">\n                            @(pos+1).\n                          </div>\n                          <div class=\"translation\">\n                            @sense.get_glosses()\n                          </div>\n                        </div>\n                        @if let Some((info, xref, antonym, dialect, gairaigo)) = sense.get_infos(&data.dict, data.get_lang()) {\n                          <div class=\"tags no-margin\">\n                            @if let Some(ref gro) = gairaigo {\n                              @gro\n                            }\n                            @if let Some(ref info) = info {\n                              @if gairaigo.is_some() {\n                                .\n                              }\n                              @info\n                            }\n                            @if let Some(ref dialect) = dialect {\n                              @data.gettext_custom(*dialect)\n                            }\n                            @if let Some(ref antonym) = antonym {\n                              @data.gt_search_link(\"Antonym of {}\", antonym) \n                            }\n                            @if let Some(ref xref) = xref {\n                              @if antonym.is_some() || dialect.is_some() {\n                                .\n                              }\n                              @data.gt_search_link(\"See also {}\", xref) \n                            }\n                          </div>\n                        }\n\n                        @if query.settings.show_example_sentences {\n                          @if let Some((furi, translation)) = ext_sentence(sense, &query.settings.user_lang) {\n                            <div class=\"d-flex flex-row\">\n                              <div class=\"tags no-margin example-sentence collapsed\"> \n                                <div class=\"d-flex flex-row wrap\">               \n                                  @:render_sentence_html(furi, \"\", true)\n                                </div>\n                              </div>\n                              <div class=\"expander\">\n\n                              </div>\n                            </div>\n                            <div class=\"tags no-margin\">\n                              @translation\n                            </div>\n                          }\n                        }\n                      </div>\n                    }\n                  }\n\n                  @if spos == 0 && !word.get_senses_with_en()[1].is_empty() && !senses_compound.is_empty() {\n                    <hr class=\"lang-separator\"/>\n                  }\n                }\n              \n                @if let Some(pitch) = word.get_first_pitch() {\n                  <div class=\"tags\">@data.gettext(\"Pitch accent\")</div>\n                  <div class=\"pitch\">\n                    @for (classes, kana) in pitch.render() {\n                      <span class=\"pitch @classes\">@kana</span>\n                    }\n                  </div>\n                }\n\n                <!-- Additional Informations -->\n                @if !word.reading.alternative.is_empty() {\n                  <div class=\"tags\">@data.gettext(\"Other forms\")</div>\n                  <div class=\"notes\">@word.alt_readings_beautified()</div>\n                }\n              </div>\n\n              <!-- Collocation Modal -->\n              @if word.has_collocations() {\n                @:collocations_html(&data, &word)\n              }\n\n              <!-- Inflections Modal -->\n              @if let Some(inflections) = word.get_inflections() {\n                @:inflections_html(&data, &word, &inflections)\n              }\n\n              <!-- JP definitions Modal -->\n              <!-- TODO Implement + Translate -->\n              @* @if @@_get_me_some_html!\n                @:definitions_jp(&data, &word)\n              @if_end *@\n\n              <hr class=\"res-separator\" />\n            </div>\n          </div>\n      }\n      \n    </div>\n    <div id=\"secondaryInfo\" class=\"secondary-info @if !has_kanji(&result) { hidden }\">\n      <h3 class=\"noselect\">@data.gettext(\"Kanji\")</h3>\n      <!-- Kanji Templates here -->\n      @for kanji in word_kanji(&result) {\n        <div class=\"kanji-parent\">\n          <!-- Tags -->\n          <div class=\"tags\">\n            @data.ngettext_fmt(\"{} stroke\", kanji.stroke_count as u64, &[kanji.stroke_count]).\n            @if let Some(jlpt_lvl) = kanji.jlpt {\n              @data.gettext_fmt(\"JLPT N{}\", &[jlpt_lvl])}@*\n            *@@if let Some(grade) = kanji.grade {@if kanji.jlpt.is_some() {. }\n              @data.gettext_fmt(\"Taught in {} grade\", &[grade])\n            }\n          </div>\n\n          <!-- Kanji and translation -->\n          <div class=\"kanji-entry left d-flex flex-row\">\n            <a class=\"kanji-preview large black\" href=\"/search/@kanji.literal?t=1\">\n              @kanji.literal\n            </a>\n            <div class=\"translation kanji\">\n              @kanji.meanings.join(\", \")\n            </div>\n          </div>\n\n          <!-- Kun readings -->\n          @if !kanji.kunyomi.is_empty() {\n            <div class=\"tags no-margin\">@data.gettext(\"Kun\")</div>\n            <div class=\"d-flex flex-row wrap noselect\">\n              @for (pos, reading) in kanji.kunyomi.iter().enumerate() {\n                @if pos > 0 {\n                  <span class=\"tags no-margin fat right-padding-10\">,</span>\n                }\n                <a class=\"clickable no-align green\" href=\"/search/@kanji.literal @reading\">@reading</a>\n              }\n            </div>\n          }\n\n          <!-- On readings -->\n          @if !kanji.onyomi.is_empty() {\n            <div class=\"tags no-margin\">@data.gettext(\"On\")</div>\n            <div class=\"d-flex flex-row wrap noselect\">\n              @for (pos, reading) in kanji.onyomi.iter().enumerate() {\n                @if pos > 0 {\n                  <span class=\"tags no-margin fat right-padding-10\">,</span>\n                }\n                <a class=\"clickable no-align green\" href=\"/search/@kanji.literal @reading\">@reading</a>\n              }\n            </div>\n          }\n\n          <hr />\n        </div>\n      }\n    </div>\n  </div>\n"
  },
  {
    "path": "lib/frontend/templates/subtemplates/footer.rs.html",
    "content": "@use super::{paginator};\n@use crate::BaseData;\n@use resources::GIT_HASH;\n@(data: &BaseData, show_ref: bool)\n\n<footer>\n\n   @:paginator(data)\n\n    @if show_ref {\n    <div class=\"ref-row\">\n        <div class=\"discordSvg\" onclick='if (window.plausible) plausible(\"ref_opened\", @{props: @{target: \"Discord\"@}@}); window.open(\"https://discord.gg/ysSkFFxmjr\", \"_blank\");'></div>\n        <div class=\"donation\">\n            <div class=\"donationSvg\" onclick='if (window.plausible) plausible(\"ref_opened\", @{props: @{target: \"DonationIcon\"@}@}); window.open(\"https://paypal.me/yukaru1\", \"_blank\");'></div>\n            <span class=\"tooltip\">Donations will always be shared between the developers!</span>\n        </div>\n        <div class=\"githubSvg\" onclick='if (window.plausible) plausible(\"ref_opened\", @{props: @{target: \"Github\"@}@}); window.open(\"https://github.com/WeDontPanic/Jotoba\", \"_blank\");'></div>\n    </div>\n    }\n\n   <div class=\"center-text\">\n\n        <hr class=\"footer-hr\">\n        <span>\n            @data.gettext(\"Jotoba wouldn't be able to exist without the help of many open-source data sources.\") <br> @data.gettext(\"Check out the\") <a class=\"clickable no-align green\" href=\"/about\">@data.gettext(\"About Page\")</a> @data.gettext(\"for a list of all contributors in this project.\")\n        </span>\n        @if data.config.is_debug() {\n        <br><br> Git hash: @(GIT_HASH)\n        }\n      \n    </div>\n\n </footer>\n"
  },
  {
    "path": "lib/frontend/templates/subtemplates/head.rs.html",
    "content": "@use crate::BaseData;\n@(data: &BaseData)\n\n<head>\n    <!-- Meta Info -->\n    <title>@data.gettext(\"Jotoba\")</title>\n    <link rel=\"shortcut icon\" type=\"image/png\" href=\"/variable_assets/@data.asset_hash/assets/jotokun/favicon.png\">\n    <meta http-equiv=\"Content-type\" content=\"text/html; charset=utf-8\">\n    <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n    <meta name=\"description\" content=\"Jotoba is a powerful and free Japanese dictionary supporting words, kanji, sentences, and many different languages.\">\n    <link rel=\"search\" type=\"application/opensearchdescription+xml\" title=\"Jotoba\" href=\"/variable_assets/@data.asset_hash/assets/settings/opensearch.xml\">\n    <link rel=\"manifest\" href=\"/variable_assets/@data.asset_hash/assets/settings/manifest.json\">\n    \n    <!-- OG Tags -->\n  @if let Some(tags) = data.get_og_tags() {\n    @tags.render_unescaped()\n  }\n\n    <!-- Fonts -->\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/fonts/fonts.css\">\n\n    <!-- External Stylesheets -->\n    <link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/lib/bootstrap.min.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/lib/material.min.css\">\n\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/lib/alertify.min.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/lib/alertifyThemes/alertify_btstrap.min.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/lib/imagecrop.min.css\">\n\n    <!-- Internal Stylesheets -->\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/tools/alerts.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/overlay/overlayBase.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/overlay/imgUploadOverlay.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/overlay/suggestionOverlay.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/overlay/settingsOverlay.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/overlay/radicalOverlay.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/overlay/footerOverlay.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/overlay/croppingOverlay.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/tools/ripple.css\">\n\n    <link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/main.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/search/searchRow.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/search/choices.css\">\n    \n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/page/footer.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" media=\"print\" onload=\"this.media='all'\" href=\"/variable_assets/@data.asset_hash/assets/css/tools/pagination.css\">\n    <link rel=\"stylesheet\" type=\"text/css\" href=\"/variable_assets/@data.asset_hash/assets/css/mobile.css\">\n\n    <!-- External JS -->\n    <script src=\"/variable_assets/@data.asset_hash/assets/js/lib/jquery.min.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/lib/bootstrap.min.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/lib/material.min.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/lib/alertify.min.js\"></script>\n    <script src =\"/variable_assets/@data.asset_hash/assets/js/lib/jc.js\"></script>\n    <script defer src=\"/variable_assets/@data.asset_hash/assets/js/lib/jotobaChoices.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/lib/imagecrop.min.js\"></script>\n    \n    <!-- Internal JS -->\n    <script src=\"/variable_assets/@data.asset_hash/assets/js/tools/utils.js\"></script>\n    <script src=\"/variable_assets/@data.asset_hash/assets/js/tools/theme.js\"></script>\n    <script src=\"/variable_assets/@data.asset_hash/assets/js/tools/jotoTools.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/tools/utils2.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/locales/collection.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/tools/ripple.js\"></script>\n    <script defer src=\"/variable_assets/@data.asset_hash/assets/js/qol.js\"></script>\n    <script src=\"/variable_assets/@data.asset_hash/assets/js/search/api.js\"></script>\n    <script src=\"/variable_assets/@data.asset_hash/assets/js/search/search.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/search/shared.js\"></script>\n    <script src= \"/variable_assets/@data.asset_hash/assets/js/search/suggestions.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/search/eventHandler.js\"></script>\n    <script async src= \"/variable_assets/@data.asset_hash/assets/js/search/overlay/suggestionOverlay.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/search/overlay/speechSearch.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/search/overlay/radicalSearch.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/search/overlay/imageSearch.js\"></script>\n    <script async src=\"/variable_assets/@data.asset_hash/assets/js/mobile.js\"></script>\n    <script defer src=\"/variable_assets/@data.asset_hash/assets/js/page/overlay/settings_overlay.js\"></script>\n    <script defer src=\"/variable_assets/@data.asset_hash/assets/js/page/overlay/settings.js\"></script>\n    <script defer src=\"/variable_assets/@data.asset_hash/assets/js/lib/loadAnalytics.js\"></script>\n</head>\n"
  },
  {
    "path": "lib/frontend/templates/subtemplates/input_dropdown.rs.html",
    "content": "@use crate::BaseData;\n@(data: &BaseData)\n\n<div class=\"index-btn-container\">\n    <div class=\"input-field first-wrap\">\n       <div class=\"input-select\">\n          <div class=\"choices main noselect\" data-type=\"select-one\" tabindex=\"1\">\n             <div class=\"choices__inner\">\n                <select id=\"search-type\" data-trigger=\"\" name=\"t\" data-onchange=\"changeSearchType\" class=\"hidden\">\n                   <option value=\"@data.get_search_site_id()\">@data.get_search_site_name()</option>\n                </select>\n                <div class=\"choices__list choices__list--single\">\n                   <div class=\"choices__item choices__item--selectable index\">@data.get_search_site_name()</div>\n                </div>\n             </div>\n             <div class=\"choices__list choices__list--dropdown index\">\n                <div class=\"choices__list\">\n                   <div data-value=\"0\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"Words\")</div>\n                   <div data-value=\"1\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"Kanji\")</div>\n                   <div data-value=\"2\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"Sentences\")</div>\n                   <div data-value=\"3\" class=\"choices__item choices__item--choice choices__item--selectable\">@data.gettext(\"Names\")</div>\n                </div>\n             </div>\n          </div>\n       </div>\n    </div>\n </div>\n"
  },
  {
    "path": "lib/frontend/templates/subtemplates/main_body.rs.html",
    "content": "@use crate::templates::pages::{words_html, kanji_html, names_html, sentences_html, about_html, info_html, news_html};\n\n@use crate::{BaseData, ResultData, Site};\n@(data: &BaseData)\n\n<!-- Main Body -->\n<div id=\"page-container\">\n   <div class=\"d-flex center\">\n      <div class=\"main-container\">\n         <br>\n\n         @match data.site.clone() {\n            Site::SearchResult(search_result) => {\n               @match search_result.result {\n                  ResultData::Word(result) => {\n                     @:words_html(&data, &search_result.query, result)\n                  }\n                  ResultData::KanjiInfo(result) => {\n                     @:kanji_html(&data, result)\n                  }\n                  ResultData::Name(result) => {\n                     @:names_html(&data, result)\n                  }\n                  ResultData::Sentence(result) => {\n                     @:sentences_html(&data, result)\n                  }\n               }\n            }\n            Site::Index => {\n               @*@:index_html(&data)*@\n            }\n            Site::About => {\n               @:about_html(&data)\n            }\n            Site::InfoPage => {\n               @:info_html(&data)\n            }\n            Site::News(news) => {\n               @:news_html(&data, news)\n            }\n         }\n\n      </div>\n   </div>\n</div>\n"
  },
  {
    "path": "lib/frontend/templates/subtemplates/paginator.rs.html",
    "content": "@use crate::BaseData;\n@(data: &BaseData)\n\n@if let Some(pagination) = data.pagination {\n  <nav>\n     <ul class=\"pagination noselect\">\n        <li target-page=\"1\" class=\"pagination-item @if pagination.is_first() { disabled }\">\n           <button class=\"pagination-circle ripple\">@data.gettext(\"First\")</button>\n        </li>\n        <li target-page=\"@(pagination.curr_page - 1)\" class=\"pagination-item @if pagination.is_first() { disabled }\">\n           <button class=\"pagination-circle ripple\">&laquo;</button>\n        </li>\n\n        @for button in pagination.gen_page_buttons() {\n        <li target-page=\"@button.page_nr\" class=\"pagination-item\">\n           <button class=\"pagination-circle ripple @if button.active { active }\">@button.page_nr</button>\n        </li>\n        }\n\n        <li target-page=\"@(pagination.curr_page + 1)\" class=\"pagination-item @if pagination.is_last() { disabled }\">\n           <button class=\"pagination-circle ripple\">&raquo;</button>\n        </li>\n        <li target-page=\"@pagination.get_last()\" class=\"pagination-item @if pagination.is_last() { disabled }\">\n           <button class=\"pagination-circle ripple\" last-page=\"@pagination.get_last()\">@data.gettext(\"Last\")</button>\n        </li>\n     </ul>\n  </nav>\n}\n"
  },
  {
    "path": "lib/indexes/Cargo.toml",
    "content": "[package]\nname = \"indexes\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\ntypes = { path = \"../types\" }\nautocompletion = { git = \"https://github.com/WeDontPanic/AutoCompletionFramework\" }\n#autocompletion = { path = \"../../../AutoCompletionFramework\" }\nbktree = { git = \"https://github.com/JojiiOfficial/bktree\" }\nserde = { version = \"1.0.171\", features = [\"derive\"] }\nbyteorder = \"1.4.3\"\nbitflags = { git = \"https://github.com/JojiiOfficial/BitFlags\" }\nonce_cell = { version = \"1.18.0\", default-features = false }\nbincode = \"1.3.3\"\nlog = \"0.4.19\"\nrayon = { version = \"1.7.0\", optional = true }\nqp-trie = { git = \"https://github.com/sdleffler/qp-trie-rs\", features = [\n  \"serde\",\n] }\n#ngindex = { path = \"../../../ngindex\" }\nngindex = { git = \"https://github.com/JojiiOfficial/ngindex\" }\n#vsm = { path = \"../../../vsm\"}\nvsm = { git = \"https://github.com/JojiiOfficial/VSM\"}\nindex_framework = { git = \"https://github.com/WeDontPanic/index_framework\"}\nngram-tools = { git = \"https://github.com/JojiiOfficial/ngram-tools\"}\n#sparse_vec = { path = \"../../../sparse_vec\"}\nsparse_vec = { git = \"https://github.com/JojiiOfficial/SparseVec\"}\nnum-traits = \"0.2.15\"\n\n[dev-dependencies]\ntest-case = '*'\n\n[features]\ndefault = []\nparallel = [\"rayon\"]\n"
  },
  {
    "path": "lib/indexes/src/hashtag.rs",
    "content": "use ngindex::{\n    index_framework::retrieve::retriever::{ngram::NGramRetriever, Retriever},\n    NgramIndex,\n};\nuse qp_trie::{wrapper::BString, Trie};\nuse serde::{Deserialize, Serialize};\nuse types::jotoba::{indexes::hashtag::RawHashtag, search::SearchTarget};\n\n/// Index for hashtag auto completion\n#[derive(Deserialize, Serialize)]\npub struct HashTagIndex {\n    tags: Vec<RawHashtag>,\n    pub index: NgramIndex<2, u32>,\n    trie: Trie<BString, u32>,\n}\n\nimpl HashTagIndex {\n    /// Create a new HashTagIndex\n    pub fn new(tags: Vec<RawHashtag>, index: NgramIndex<2, u32>, trie: Trie<BString, u32>) -> Self {\n        Self { tags, index, trie }\n    }\n\n    #[inline]\n    pub fn get(&self, pos: usize) -> Option<&RawHashtag> {\n        self.tags.get(pos)\n    }\n\n    #[inline]\n    pub fn get_filtered(&self, pos: usize, s_targets: &[SearchTarget]) -> Option<&RawHashtag> {\n        let tag = self.get(pos)?;\n        if s_targets.iter().any(|i| tag.s_targets.contains(i)) || s_targets.is_empty() {\n            return Some(tag);\n        }\n        None\n    }\n\n    #[inline]\n    pub fn trie_search(&self, query: &str, s_targets: &[SearchTarget]) -> Vec<&RawHashtag> {\n        let id = self.trie.subtrie_str(&query.to_lowercase());\n\n        let mut out = vec![];\n        for (_, id) in id.iter() {\n            if let Some(v) = self.get_filtered(*id as usize, s_targets) {\n                out.push(v);\n            }\n        }\n\n        out\n    }\n\n    pub fn ngram_search(&self, query: &str, s_targets: &[SearchTarget]) -> Vec<(&RawHashtag, f32)> {\n        let mut posts: Vec<_> = s_targets.iter().map(|i| i.get_type_id() as u32).collect();\n        if posts.is_empty() {\n            posts = vec![0, 1, 2, 3];\n        }\n\n        let retr = self\n            .index\n            .retriever_for(query)\n            .in_postings(posts)\n            .unique()\n            .get::<NGramRetriever<'_, 2, _, _, _>>();\n\n        let q = retr.q_term_ids().to_vec();\n\n        let mut out = retr\n            .filter_map(|i| {\n                let item = self.get(*i.item() as usize)?;\n                let dice = i.terms().dice_weighted(&q, 0.5);\n                Some((item, dice))\n            })\n            .filter(|i| i.1 > 0.2)\n            .collect::<Vec<_>>();\n        out.sort_by(|a, b| a.1.total_cmp(&b.1).reverse());\n        out\n    }\n}\n"
  },
  {
    "path": "lib/indexes/src/kanji/mod.rs",
    "content": "pub mod reading;\npub mod reading_freq;\n"
  },
  {
    "path": "lib/indexes/src/kanji/reading.rs",
    "content": "use index_framework::backend::memory::presets::Simple;\n\n// Index shortcuts\npub type Index = Simple<String, u32>;\n"
  },
  {
    "path": "lib/indexes/src/kanji/reading_freq/k_freq_item.rs",
    "content": "use super::reading::ReadingFreq;\nuse serde::{Deserialize, Serialize};\n\n/// All reading data for a single kanji\n#[derive(Serialize, Deserialize, Debug)]\npub struct KFreqItem {\n    pub readings: Vec<ReadingFreq>,\n    pub total: usize,\n}\n\nimpl KFreqItem {\n    /// Creates a new Kanji frequency item with the provided readings\n    pub fn new(readings: Vec<String>) -> Self {\n        let readings = readings\n            .into_iter()\n            .map(|i| ReadingFreq::new(i))\n            .collect::<Vec<_>>();\n        Self { readings, total: 0 }\n    }\n\n    /// Get the total amount of counted readings for a kanji\n    #[inline]\n    pub fn total(&self) -> usize {\n        self.total\n    }\n\n    /// Increase the total value of counted readings for a kanji\n    #[inline]\n    pub fn inc_total(&mut self, add: usize) {\n        self.total += add\n    }\n\n    /// Returns `true` if the kanji readings are completely empty\n    pub fn is_empty(&self) -> bool {\n        self.readings.is_empty() || (self.readings.iter().all(|i| i.is_empty()) && self.total == 0)\n    }\n\n    /// Gets all reading freq items that match the given matcher\n    #[inline]\n    pub fn get_readings<'a, F: Fn(&str) -> bool>(\n        &'a self,\n        r: F,\n    ) -> impl Iterator<Item = &ReadingFreq> {\n        self.readings.iter().filter(move |i| r(&i.reading))\n    }\n\n    /// Gets a reading freq item with the given string\n    #[inline]\n    pub fn get_reading<S: AsRef<str>>(&self, s: S) -> Option<&ReadingFreq> {\n        self.readings.iter().find(|i| i.reading == s.as_ref())\n    }\n}\n"
  },
  {
    "path": "lib/indexes/src/kanji/reading_freq/mod.rs",
    "content": "pub mod k_freq_item;\npub mod reading;\n\nuse self::k_freq_item::KFreqItem;\nuse serde::{Deserialize, Serialize};\nuse std::collections::HashMap;\nuse types::jotoba::kanji::Kanji;\n\n/// An index that can hold kanji along with their various readings which this lovely language\n/// 'supports'. Each reading entry has a frequency assigned how often it occurrs for the given\n/// Kanji.\n#[derive(Serialize, Deserialize)]\npub struct FrequencyIndex {\n    pub data: HashMap<char, KFreqItem>,\n}\n\nimpl FrequencyIndex {\n    /// Create a new FrequencyIndex with a given set of kanji that will be supported\n    pub fn new(all_kanji: &[Kanji]) -> FrequencyIndex {\n        let mut data = HashMap::new();\n\n        for kanji in all_kanji {\n            let mut readings = vec![];\n\n            let on = kanji.onyomi.clone();\n            let kun = kanji.kunyomi.clone();\n\n            for reading in on.into_iter().chain(kun.into_iter()) {\n                readings.push(reading);\n            }\n\n            data.insert(kanji.literal, KFreqItem::new(readings));\n        }\n\n        FrequencyIndex { data }\n    }\n\n    /// Inserts a new reading for the given kanji. All readings of the kanji for those `matches`\n    /// returns `true` will be incremented\n    pub fn add_reading<F>(&mut self, kanji_lit: char, matches: F) -> bool\n    where\n        F: Fn(&str) -> bool,\n    {\n        let entry = match self.data.get_mut(&kanji_lit) {\n            Some(s) => s,\n            None => return false,\n        };\n\n        let c = entry\n            .readings\n            .iter_mut()\n            .filter(|i| matches(&i.reading))\n            .map(|i| i.count += 1)\n            .count();\n\n        if c == 0 {\n            return false;\n        }\n\n        // We're passing one reading. If there are multiple entries for one single entry,\n        // they're treated equally, so we're counting up all matches but only counting one\n        // total\n        entry.inc_total(1);\n\n        true\n    }\n\n    /// Removes all empty items from the index\n    pub fn clear(&mut self) {\n        self.data.retain(|_, v| !v.is_empty());\n    }\n\n    /// Returns a FreqData for the kanji `c`\n    #[inline]\n    pub fn get(&self, c: char) -> Option<&KFreqItem> {\n        self.data.get(&c)\n    }\n\n    /// Returns the normalized frequency for `reading`\n    #[inline]\n    pub fn norm_reading_freq(&self, kanji: char, reading: &str) -> Option<f32> {\n        self.norm_reading_freq_th(kanji, reading, 200)\n    }\n\n    /// Returns the normalized frequency for `reading`\n    #[inline]\n    pub fn norm_reading_freq_th(&self, kanji: char, reading: &str, th: usize) -> Option<f32> {\n        let freq_data = self.data.get(&kanji)?;\n        let read_freq = freq_data.get_reading(reading)?.count;\n        if freq_data.total() < th {\n            return None;\n        }\n        Some(read_freq as f32 / freq_data.total() as f32)\n    }\n}\n"
  },
  {
    "path": "lib/indexes/src/kanji/reading_freq/reading.rs",
    "content": "use serde::{Deserialize, Serialize};\n\n/// Reading and its frequency\n#[derive(Serialize, Deserialize, Debug, Default, PartialEq, Ord, Eq)]\npub struct ReadingFreq {\n    pub reading: String,\n    pub count: u32,\n}\n\nimpl ReadingFreq {\n    /// Creates a new Reading\n    #[inline]\n    pub fn new(reading: String) -> Self {\n        Self { reading, count: 0 }\n    }\n\n    /// Increment the reading\n    #[inline]\n    pub fn inc(&mut self, c: u32) {\n        self.count += c;\n    }\n\n    #[inline]\n    pub fn is_empty(&self) -> bool {\n        self.count == 0\n    }\n}\n\nimpl PartialOrd for ReadingFreq {\n    #[inline]\n    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n        self.count.partial_cmp(&other.count)\n    }\n}\n"
  },
  {
    "path": "lib/indexes/src/lib.rs",
    "content": "pub mod hashtag;\npub mod kanji;\npub mod names;\npub mod ng_freq;\npub mod radical;\npub mod regex;\npub mod sentences;\npub mod storage;\npub mod term_freq;\npub mod words;\n\npub use storage::{get, suggestions::get_suggestions};\n"
  },
  {
    "path": "lib/indexes/src/names.rs",
    "content": "pub const FOREIGN_NGRAM: usize = 3;\npub type ForeignIndex = ngindex::NgramIndex<FOREIGN_NGRAM, u32>;\n\npub const NATIVE_NGRAM: usize = 3;\npub type NativeIndex = ngindex::NgramIndex<NATIVE_NGRAM, u32>;\n"
  },
  {
    "path": "lib/indexes/src/ng_freq.rs",
    "content": "use ngram_tools::iter::wordgrams::Wordgrams;\nuse serde::{Deserialize, Serialize};\nuse sparse_vec::{SpVec32, VecExt};\n\nuse crate::term_freq::{TermFreqIndex, VecBuilder};\n\n/// Wrapper around Term frequency index counting ngrams of terms instead of the terms intelf.\n#[derive(Serialize, Deserialize)]\npub struct NgFreqIndex {\n    n: usize,\n    index: TermFreqIndex,\n}\n\nimpl NgFreqIndex {\n    pub fn new(n: usize) -> Self {\n        let index = TermFreqIndex::new();\n        Self { n, index }\n    }\n\n    /// Returns the amount of indexed terms\n    #[inline]\n    pub fn len(&self) -> usize {\n        self.index.len()\n    }\n\n    #[inline]\n    pub fn is_empty(&self) -> bool {\n        self.len() == 0\n    }\n\n    pub fn compress(&mut self, threshold: usize) {\n        self.index.compress(threshold)\n    }\n\n    pub fn insert(&mut self, gloss: &str) {\n        if gloss.trim().is_empty() {\n            return;\n        }\n        let padded = self.get_padded(gloss);\n        let n = Self::n_for(gloss, self.n);\n        let ngrams = Wordgrams::new(&padded, n);\n\n        for ngram in ngrams {\n            self.index.insert(ngram.to_string());\n        }\n    }\n\n    #[inline]\n    pub fn build_vec_cntx<A: AsRef<str>>(&self, builder: &mut VecBuilder, inp: A) -> SpVec32 {\n        self.build_custom_vec_cntx(builder, inp, |freq, tot| (tot / freq).log2())\n    }\n\n    #[inline]\n    pub fn build_vec<A: AsRef<str>>(&self, inp: A) -> SpVec32 {\n        self.build_custom_vec(inp, |freq, tot| (tot / freq).log2())\n    }\n\n    #[inline]\n    pub fn vec_builder(&self) -> VecBuilder {\n        VecBuilder::new(&self.index)\n    }\n\n    pub fn build_custom_vec<A, F>(&self, inp: A, inv_freq: F) -> SpVec32\n    where\n        A: AsRef<str>,\n        F: Fn(f32, f32) -> f32,\n    {\n        if inp.as_ref().trim().is_empty() {\n            return SpVec32::default();\n        }\n\n        let inp = inp.as_ref();\n        let padded = self.get_padded(inp);\n        let n = Self::n_for(inp, self.n);\n\n        let mut no_hit_counter = 0;\n        let ng_ids: Vec<_> = Wordgrams::new(&padded, n)\n            .map(|i| {\n                let id = self.index.t_ids.get(i).copied().unwrap_or_else(|| {\n                    no_hit_counter += 1;\n                    self.index.total as u32 + no_hit_counter\n                });\n\n                //let freq = self.index.inv_freq_oov(i);\n                let t_freq = self.index.freq_by_id(id).unwrap_or(1) as f32;\n                let weight = (inv_freq)(t_freq, self.index.total as f32);\n                (id, weight)\n            })\n            .collect();\n\n        SpVec32::create_new_raw(ng_ids)\n    }\n\n    pub fn build_custom_vec_cntx<A, F>(\n        &self,\n        builder: &mut VecBuilder,\n        inp: A,\n        inv_freq: F,\n    ) -> SpVec32\n    where\n        A: AsRef<str>,\n        F: Fn(f32, f32) -> f32,\n    {\n        if inp.as_ref().trim().is_empty() {\n            return SpVec32::default();\n        }\n\n        let inp = inp.as_ref();\n        let padded = self.get_padded(inp);\n        let n = Self::n_for(inp, self.n);\n\n        let ng_ids: Vec<_> = Wordgrams::new(&padded, n)\n            .map(|i| {\n                let id = builder.get_or_insert_id(i);\n\n                let t_freq = self.index.freq_by_id(id).unwrap_or(1) as f32;\n                let weight = (inv_freq)(t_freq, self.index.total as f32);\n\n                (id, weight)\n            })\n            .collect();\n\n        SpVec32::create_new_raw(ng_ids)\n    }\n\n    #[inline]\n    fn n_for(inp: &str, n: usize) -> usize {\n        n.min(inp.len())\n    }\n\n    #[inline]\n    fn get_padded(&self, inp: &str) -> String {\n        let n = Self::n_for(inp, self.n);\n        ngram_tools::padding(inp, n - 1)\n    }\n}\n\n// TODO: Put this function into some lib (maybe sparse vector?)\n#[inline]\npub fn term_dist(a: &SpVec32, b: &SpVec32) -> f32 {\n    if a.is_empty() || b.is_empty() {\n        return 0.0;\n    }\n\n    let both = a\n        .intersect_iter(b)\n        .map(|(_, a_w, b_w)| a_w + b_w)\n        .sum::<f32>();\n\n    let sum = a.weights().chain(b.weights()).sum::<f32>();\n\n    both / sum\n}\n\n#[cfg(test)]\nmod test {\n    use test_case::test_case;\n\n    use super::*;\n\n    #[test_case(\"musik\", 1)]\n    #[test_case(\"musik\", 2)]\n    #[test_case(\"musik\", 3)]\n    #[test_case(\"ki\", 1)]\n    #[test_case(\"ki\", 2)]\n    #[test_case(\"ki\", 3)]\n    fn test_single(term: &str, n: usize) {\n        let mut ngindex = NgFreqIndex::new(n);\n        ngindex.insert(term);\n\n        let music_vec = ngindex.build_vec(term);\n        let term_len = term.len();\n\n        // Check proper length of vectors\n        let pad_len = n.saturating_sub(1);\n        let tot_len = pad_len * 2 + term_len;\n        if term_len < n {\n            assert_eq!(music_vec.dim_count(), tot_len - n);\n        } else {\n            assert_eq!(music_vec.dim_count(), tot_len - n + 1);\n        }\n    }\n\n    #[test]\n    fn test_freq() {\n        let mut ngindex = NgFreqIndex::new(2);\n        ngindex.insert(\"huhu\");\n\n        let freq = ngindex.index.freq(\"hu\");\n        assert_eq!(freq, Some(2));\n    }\n\n    #[test]\n    fn test_sim() {\n        let mut ngindex = NgFreqIndex::new(3);\n        ngindex.insert(\"freund\");\n        ngindex.insert(\"hund\");\n        ngindex.insert(\"kunde\");\n        ngindex.insert(\"bund\");\n\n        let kund = ngindex.build_vec(\"kund\");\n\n        let kunde = ngindex.build_vec(\"kunde\");\n        let hund = ngindex.build_vec(\"hund\");\n\n        let sim_kund_kunde = term_dist(&kund, &kunde);\n        let sim_kund_hund = term_dist(&kund, &hund);\n\n        assert!(sim_kund_kunde > sim_kund_hund);\n    }\n}\n"
  },
  {
    "path": "lib/indexes/src/radical.rs",
    "content": "use std::collections::HashMap;\n\nuse bktree::BkTree;\nuse serde::{Deserialize, Serialize};\nuse types::jotoba::kanji::radical::SearchRadicalInfo;\n\n/// Radicals indexed by its meanings\n#[derive(Serialize, Deserialize)]\npub struct RadicalIndex {\n    pub meaning_map: HashMap<String, Vec<SearchRadicalInfo>>,\n    pub term_tree: BkTree<String>,\n}\n\nimpl RadicalIndex {\n    pub fn new(\n        meaning_map: HashMap<String, Vec<SearchRadicalInfo>>,\n        term_tree: BkTree<String>,\n    ) -> Self {\n        Self {\n            meaning_map,\n            term_tree,\n        }\n    }\n\n    /// Returns `true` if the index contains `term`\n    #[inline(always)]\n    pub fn has_term(&self, term: &str) -> bool {\n        self.meaning_map.contains_key(term)\n    }\n\n    /// Returns `SearchRadicalInfo` from the index by its term or `None` if term is not found\n    #[inline(always)]\n    pub fn get(&self, term: &str) -> Option<&Vec<SearchRadicalInfo>> {\n        self.meaning_map.get(term)\n    }\n}\n"
  },
  {
    "path": "lib/indexes/src/regex.rs",
    "content": "use serde::{Deserialize, Serialize};\nuse std::collections::{hash_map::Iter, HashMap, HashSet};\n\n/// Index to allow fast and efficient regex search queries.\n#[derive(Serialize, Deserialize)]\npub struct RegexSearchIndex {\n    data: HashMap<char, HashSet<u32>>,\n}\n\nimpl RegexSearchIndex {\n    /// Creates a new empty Index\n    #[inline]\n    pub fn new() -> Self {\n        RegexSearchIndex {\n            data: HashMap::new(),\n        }\n    }\n\n    /// Returns an iterator over all items in the index\n    #[inline]\n    pub fn iter(&self) -> Iter<char, HashSet<u32>> {\n        self.data.iter()\n    }\n\n    /// Returns a HashSet with all words (seq_ids) that contain the given character\n    #[inline(always)]\n    pub fn get_words_with(&self, character: char) -> Option<&HashSet<u32>> {\n        self.data.get(&character)\n    }\n\n    /// Adds a new term to the index\n    #[inline]\n    pub fn add_term(&mut self, term: &str, seq_id: u32) {\n        for c in term.chars() {\n            self.data.entry(c).or_default().insert(seq_id);\n        }\n    }\n}\n"
  },
  {
    "path": "lib/indexes/src/sentences.rs",
    "content": "use vsm::presets::VSMIndexSimple;\n\n// Shortcut for type of index\npub type NativeIndex = VSMIndexSimple<u32>;\npub type ForeignIndex = VSMIndexSimple<u32>;\n"
  },
  {
    "path": "lib/indexes/src/storage/kanji.rs",
    "content": "use crate::kanji::reading_freq::FrequencyIndex;\nuse std::{error::Error, fs::File, io::BufReader, path::Path};\n\npub const K_READINGS_FREQ_FILE: &str = \"kreading_freq_index\";\n\n/// Store for name indexes\npub struct KanjiStore {\n    kread_frequency: FrequencyIndex,\n}\n\nimpl KanjiStore {\n    pub fn new(kread_frequency: FrequencyIndex) -> Self {\n        Self { kread_frequency }\n    }\n\n    #[inline(always)]\n    pub fn reading_freq(&self) -> &FrequencyIndex {\n        &self.kread_frequency\n    }\n}\n\npub(crate) fn load<P: AsRef<Path>>(path: P) -> Result<KanjiStore, Box<dyn Error + Send + Sync>> {\n    let kread_file = Path::new(path.as_ref()).join(K_READINGS_FREQ_FILE);\n    let kread_frequency: FrequencyIndex =\n        bincode::deserialize_from(BufReader::new(File::open(kread_file)?))?;\n    Ok(KanjiStore::new(kread_frequency))\n}\n"
  },
  {
    "path": "lib/indexes/src/storage/mod.rs",
    "content": "pub mod kanji;\npub mod name;\npub mod radical;\npub mod sentence;\npub mod suggestions;\npub(crate) mod utils;\npub mod word;\n\nuse once_cell::sync::OnceCell;\nuse std::{error::Error, path::Path};\nuse {\n    kanji::KanjiStore, name::NameStore, radical::RadicalStore, sentence::SentenceStore,\n    word::WordStore,\n};\n\n/// In-memory store for all indexes\npub(crate) static INDEX_STORE: OnceCell<IndexStore> = OnceCell::new();\n\n/// Store for all indexes\npub struct IndexStore {\n    word: WordStore,\n    sentence: SentenceStore,\n    name: NameStore,\n    radical: RadicalStore,\n    kanji: KanjiStore,\n}\n\nimpl IndexStore {\n    #[inline(always)]\n    pub fn word(&self) -> &WordStore {\n        &self.word\n    }\n\n    #[inline(always)]\n    pub fn sentence(&self) -> &SentenceStore {\n        &self.sentence\n    }\n\n    #[inline(always)]\n    pub fn name(&self) -> &NameStore {\n        &self.name\n    }\n\n    #[inline(always)]\n    pub fn radical(&self) -> &RadicalStore {\n        &self.radical\n    }\n\n    #[inline(always)]\n    pub fn kanji(&self) -> &KanjiStore {\n        &self.kanji\n    }\n\n    /// Returns `true` if all indexes are properly loaded\n    pub fn check(&self) -> bool {\n        self.word.check() && self.sentence.check() && self.name.check() && self.radical.check()\n    }\n}\n\n/// Returns an IndexStore which can be used to retrieve all indexes\n#[inline(always)]\npub fn get() -> &'static IndexStore {\n    unsafe { INDEX_STORE.get_unchecked() }\n}\n\n/// Loads all indexes\npub fn load<P: AsRef<Path>>(index_folder: P) -> Result<bool, Box<dyn Error + Send + Sync>> {\n    if is_loaded() {\n        return Ok(true);\n    }\n\n    let store = load_raw(index_folder)?;\n\n    if !store.check() {\n        return Ok(false);\n    }\n\n    INDEX_STORE.set(store).ok();\n\n    Ok(true)\n}\n\npub fn is_loaded() -> bool {\n    INDEX_STORE.get().is_some()\n}\n\n/// Needed for tests only\npub fn wait() {\n    INDEX_STORE.wait();\n}\n\npub fn load_raw<P: AsRef<Path>>(\n    index_folder: P,\n) -> Result<IndexStore, Box<dyn Error + Send + Sync>> {\n    log::debug!(\"Loading word index\");\n    let word = word::load(index_folder.as_ref())?;\n\n    log::debug!(\"Loading sentence index\");\n    let sentence = sentence::load(index_folder.as_ref())?;\n\n    log::debug!(\"Loading name index\");\n    let name = name::load(index_folder.as_ref())?;\n\n    log::debug!(\"Loading radical index\");\n    let radical = radical::load(index_folder.as_ref())?;\n\n    log::debug!(\"Loading kanji reading frequency index\");\n    let kanji = kanji::load(index_folder.as_ref())?;\n\n    Ok(IndexStore {\n        word,\n        sentence,\n        name,\n        radical,\n        kanji,\n    })\n}\n"
  },
  {
    "path": "lib/indexes/src/storage/name.rs",
    "content": "use super::utils;\nuse crate::names::{ForeignIndex, NativeIndex};\nuse std::{error::Error, path::Path};\n\npub const FOREIGN_FILE: &str = \"name_foreign_index\";\npub const NATIVE_FILE: &str = \"name_jp_index\";\n\n/// Store for name indexes\npub struct NameStore {\n    foreign: ForeignIndex,\n    native: NativeIndex,\n}\n\nimpl NameStore {\n    pub(crate) fn new(foreign: ForeignIndex, native: NativeIndex) -> Self {\n        Self { foreign, native }\n    }\n\n    #[inline(always)]\n    pub fn foreign(&self) -> &ForeignIndex {\n        &self.foreign\n    }\n\n    #[inline(always)]\n    pub fn native(&self) -> &NativeIndex {\n        &self.native\n    }\n\n    pub(crate) fn check(&self) -> bool {\n        true\n    }\n}\n\npub(crate) fn load<P: AsRef<Path>>(path: P) -> Result<NameStore, Box<dyn Error + Send + Sync>> {\n    let foreign = utils::deser_file(path.as_ref(), FOREIGN_FILE)?;\n    let native = utils::deser_file(path.as_ref(), NATIVE_FILE)?;\n    Ok(NameStore::new(foreign, native))\n}\n"
  },
  {
    "path": "lib/indexes/src/storage/radical.rs",
    "content": "use super::utils;\nuse crate::radical::RadicalIndex;\nuse std::{error::Error, path::Path};\n\npub const RAD_INDEX_FILE: &str = \"radical_index\";\n\n/// Store for radical indexes\npub struct RadicalStore {\n    rad_index: RadicalIndex,\n}\n\nimpl RadicalStore {\n    pub(crate) fn new(rad_index: RadicalIndex) -> Self {\n        Self { rad_index }\n    }\n\n    /// Returns the meaning index for radicals\n    #[inline]\n    pub fn meaning_index(&self) -> &RadicalIndex {\n        &self.rad_index\n    }\n\n    /// Returns true if data is valid\n    pub(crate) fn check(&self) -> bool {\n        !self.rad_index.meaning_map.is_empty()\n    }\n}\n\npub(crate) fn load<P: AsRef<Path>>(path: P) -> Result<RadicalStore, Box<dyn Error + Send + Sync>> {\n    let index = utils::deser_file(path, RAD_INDEX_FILE)?;\n    let store = RadicalStore::new(index);\n    Ok(store)\n}\n"
  },
  {
    "path": "lib/indexes/src/storage/sentence.rs",
    "content": "use super::utils;\nuse crate::sentences::{ForeignIndex, NativeIndex};\nuse std::{error::Error, path::Path};\n\npub const NATIVE_FILE: &str = \"sentences_jp_index\";\npub const FOREIGN_FILE: &str = \"sentences_fg_index\";\n\n/// Store for sentence indexes\npub struct SentenceStore {\n    native: NativeIndex,\n    foreign: ForeignIndex,\n}\n\nimpl SentenceStore {\n    pub(crate) fn new(native: NativeIndex, foreign: ForeignIndex) -> Self {\n        Self { foreign, native }\n    }\n\n    /// Returns the foreign index for the given language or `None` if not loaded\n    #[inline(always)]\n    pub fn foreign(&self) -> &ForeignIndex {\n        &self.foreign\n    }\n\n    /// Returns the japanese sentence index\n    #[inline(always)]\n    pub fn native(&self) -> &NativeIndex {\n        &self.native\n    }\n\n    pub(crate) fn check(&self) -> bool {\n        true\n    }\n}\n\npub(crate) fn load<P: AsRef<Path>>(path: P) -> Result<SentenceStore, Box<dyn Error + Send + Sync>> {\n    let native = utils::deser_file(path.as_ref(), NATIVE_FILE)?;\n    let foreign = utils::deser_file(path.as_ref(), FOREIGN_FILE)?;\n    Ok(SentenceStore::new(native, foreign))\n}\n"
  },
  {
    "path": "lib/indexes/src/storage/suggestions.rs",
    "content": "use crate::hashtag::HashTagIndex;\n\nuse super::utils;\nuse autocompletion::index::{basic::BasicIndex, japanese::JapaneseIndex};\nuse once_cell::sync::OnceCell;\nuse serde::{Deserialize, Serialize};\nuse std::{collections::HashMap, error::Error, path::Path};\nuse types::jotoba::language::Language;\n\npub const K_MEANING_NGRAM: usize = 3;\n\npub const FG_WORDS_NGRAM: usize = 3;\npub const JP_WORDS_NGRAM: usize = 2;\n\npub const FG_NAMES_NGRAM: usize = 3;\npub const JP_NAMES_NGRAM: usize = 2;\n\npub const SUGGESTION_FILE: &str = \"suggestions\";\n\n/// In-memory store for all suggestion indexes\npub(crate) static SUGGESTION_STORE: OnceCell<SuggestionStorage> = OnceCell::new();\n\n/// Contains all suggestion index data\n#[derive(Serialize, Deserialize)]\npub struct SuggestionStorage {\n    jp_words: JapaneseIndex<JP_WORDS_NGRAM>,\n    foreign_words: HashMap<Language, BasicIndex<FG_WORDS_NGRAM>>,\n\n    kanji_meanings: JapaneseIndex<K_MEANING_NGRAM>,\n\n    names_native: JapaneseIndex<JP_NAMES_NGRAM>,\n    names_foreign: BasicIndex<FG_NAMES_NGRAM>,\n\n    hashtag: HashTagIndex,\n}\n\nimpl SuggestionStorage {\n    pub fn new(\n        jp_words: JapaneseIndex,\n        foreign_words: HashMap<Language, BasicIndex<FG_WORDS_NGRAM>>,\n        kanji_meanings: JapaneseIndex<K_MEANING_NGRAM>,\n        names_native: JapaneseIndex<JP_NAMES_NGRAM>,\n        names_foreign: BasicIndex<FG_NAMES_NGRAM>,\n        hashtag: HashTagIndex,\n    ) -> Self {\n        Self {\n            jp_words,\n            foreign_words,\n            kanji_meanings,\n            names_native,\n            names_foreign,\n            hashtag,\n        }\n    }\n\n    #[inline]\n    pub fn jp_words(&self) -> &JapaneseIndex {\n        &self.jp_words\n    }\n\n    #[inline]\n    pub fn foreign_words(&self, language: Language) -> Option<&BasicIndex<FG_WORDS_NGRAM>> {\n        self.foreign_words.get(&language)\n    }\n\n    #[inline]\n    pub fn kanji_meanings(&self) -> &JapaneseIndex<K_MEANING_NGRAM> {\n        &self.kanji_meanings\n    }\n\n    #[inline]\n    pub fn names_native(&self) -> &JapaneseIndex<JP_NAMES_NGRAM> {\n        &self.names_native\n    }\n\n    #[inline]\n    pub fn names_foreign(&self) -> &BasicIndex<FG_NAMES_NGRAM> {\n        &self.names_foreign\n    }\n\n    #[inline]\n    pub fn hashtags(&self) -> &HashTagIndex {\n        &self.hashtag\n    }\n\n    pub fn check(&self) -> bool {\n        utils::check_lang_map(&self.foreign_words)\n    }\n}\n\npub fn load_raw<P: AsRef<Path>>(\n    file: P,\n) -> Result<SuggestionStorage, Box<dyn Error + Send + Sync>> {\n    utils::deser_file(file, \"\")\n}\n\npub fn load<P: AsRef<Path>>(path: P) -> Result<bool, Box<dyn Error + Sync + Send>> {\n    let store = load_raw(path)?;\n    Ok(SUGGESTION_STORE.set(store).is_ok())\n}\n\n#[inline]\npub fn get_suggestions() -> &'static SuggestionStorage {\n    unsafe { SUGGESTION_STORE.get_unchecked() }\n}\n"
  },
  {
    "path": "lib/indexes/src/storage/utils.rs",
    "content": "use serde::de::DeserializeOwned;\nuse std::{\n    collections::HashMap,\n    error::Error,\n    fs::File,\n    io::{BufReader, Read},\n    path::Path,\n};\nuse types::jotoba::language::Language;\n\n/// Deserializes a file from `path` with `name`\npub fn deser_file<O: DeserializeOwned, P: AsRef<Path>>(\n    path: P,\n    name: &str,\n) -> Result<O, Box<dyn Error + Send + Sync>> {\n    let path = if name.is_empty() {\n        path.as_ref().to_path_buf()\n    } else {\n        path.as_ref().join(name)\n    };\n    Ok(fast_deser(path)?)\n}\n\npub fn load_by_language<O, F, P: AsRef<Path>>(\n    path: P,\n    prefix: &str,\n    load: F,\n) -> Result<HashMap<Language, O>, Box<dyn Error + Send + Sync>>\nwhere\n    F: Fn(&Path) -> Result<Option<(Language, O)>, Box<dyn Error + Sync + Send>>,\n{\n    let mut map = HashMap::with_capacity(10);\n\n    // All index files in index source folder\n    let files = std::fs::read_dir(path)?.map(|res| res.map(|e| e.path()));\n\n    for file in files {\n        let file = file?;\n\n        let file_name = file.file_name().and_then(|i| i.to_str()).unwrap();\n        if !file_name.starts_with(prefix) {\n            continue;\n        }\n\n        match load(file.as_ref())? {\n            Some((lang, deser)) => {\n                map.insert(lang, deser);\n            }\n            None => (),\n        };\n    }\n\n    Ok(map)\n}\n\n/* pub fn lang_from_file<F: AsRef<Path>>(file: F, prefix: &str) -> Option<Language> {\n    let file_name = file.as_ref().file_name()?.to_str()?.to_string();\n    let lang_str = file_name.strip_prefix(prefix).unwrap();\n    Language::from_str(lang_str).ok()\n} */\n\n/// Returns true if `map` has an entry for all language keys\npub fn check_lang_map<T>(map: &HashMap<Language, T>) -> bool {\n    Language::iter_word().all(|i| map.contains_key(&i))\n}\n\n// A bit faster. Who cares about memory consumption anyways\nfn fast_deser<O: DeserializeOwned, P: AsRef<Path>>(\n    file_path: P,\n) -> Result<O, Box<dyn Error + Sync + Send>> {\n    let file = File::open(file_path)?;\n    let len = file.metadata()?.len();\n    let mut buf = vec![0u8; len as usize];\n    let mut reader = BufReader::new(file);\n    reader.read_exact(&mut buf)?;\n    Ok(bincode::deserialize(&buf)?)\n}\n"
  },
  {
    "path": "lib/indexes/src/storage/word.rs",
    "content": "use super::utils;\nuse crate::{\n    kanji,\n    regex::RegexSearchIndex,\n    words::{ForeignIndex, NativeIndex},\n};\nuse log::debug;\nuse std::{collections::HashMap, error::Error, path::Path, str::FromStr};\nuse types::jotoba::language::Language;\n\npub const FOREIGN_PREFIX: &str = \"word_index_\";\npub const NATIVE_FILE: &str = \"jp_index\";\npub const REGEX_FILE: &str = \"regex_index\";\npub const KANJI_READING_INDEX: &str = \"word_kr_index\";\n\n/// Store for words\npub struct WordStore {\n    foreign: HashMap<Language, ForeignIndex>,\n    native: NativeIndex,\n\n    regex: RegexSearchIndex,\n\n    k_reading: kanji::reading::Index,\n}\n\nimpl WordStore {\n    pub(crate) fn new(\n        foreign: HashMap<Language, ForeignIndex>,\n        native: NativeIndex,\n        regex: RegexSearchIndex,\n        k_reading: kanji::reading::Index,\n    ) -> Self {\n        Self {\n            foreign,\n            native,\n            regex,\n            k_reading,\n        }\n    }\n\n    /// Returns the foreign index for the given language\n    #[inline]\n    pub fn foreign(&self, language: Language) -> Option<&ForeignIndex> {\n        self.foreign.get(&language)\n    }\n\n    #[inline]\n    pub fn regex(&self) -> &RegexSearchIndex {\n        &self.regex\n    }\n\n    #[inline]\n    pub fn k_reading(&self) -> &kanji::reading::Index {\n        &self.k_reading\n    }\n\n    #[inline]\n    pub fn native(&self) -> &NativeIndex {\n        &self.native\n    }\n\n    pub(crate) fn check(&self) -> bool {\n        utils::check_lang_map(&self.foreign)\n    }\n}\n\n#[cfg(not(feature = \"parallel\"))]\npub(crate) fn load<P: AsRef<Path>>(path: P) -> Result<WordStore, Box<dyn Error + Sync + Send>> {\n    let start = std::time::Instant::now();\n    let foreign = load_foreign(path.as_ref())?;\n    let native = utils::deser_file(path.as_ref(), NATIVE_FILE)?;\n    let regex = utils::deser_file(path.as_ref(), REGEX_FILE)?;\n    let k_reading = utils::deser_file(path.as_ref(), KANJI_READING_INDEX)?;\n    debug!(\"Loading indexes sync took: {:?}\", start.elapsed());\n    Ok(WordStore::new(foreign, native, regex, k_reading))\n}\n\n#[cfg(feature = \"parallel\")]\npub(crate) fn load<P: AsRef<Path> + Send + Sync>(\n    path: P,\n) -> Result<WordStore, Box<dyn Error + Send + Sync>> {\n    let start = std::time::Instant::now();\n    let mut foreign = None;\n    let mut native = None;\n    let mut regex: Option<Result<RegexSearchIndex, Box<dyn Error + Send + Sync>>> = None;\n    let mut k_reading = None;\n    rayon::scope(|s| {\n        s.spawn(|_| {\n            foreign = Some(load_foreign(path.as_ref()));\n        });\n        s.spawn(|_| {\n            native = Some(utils::deser_file(path.as_ref(), NATIVE_FILE));\n        });\n        s.spawn(|_| {\n            regex = Some(utils::deser_file(path.as_ref(), REGEX_FILE));\n        });\n        s.spawn(|_| {\n            k_reading = Some(utils::deser_file(path.as_ref(), KANJI_READING_INDEX));\n        });\n    });\n    let foreign = foreign.unwrap()?;\n    let native = native.unwrap()?;\n    let regex = regex.unwrap()?;\n    let k_reading = k_reading.unwrap()?;\n    debug!(\"Loading indexes parallel took: {:?}\", start.elapsed());\n    Ok(WordStore::new(foreign, native, regex, k_reading))\n}\n\nfn load_foreign<P: AsRef<Path>>(\n    path: P,\n) -> Result<HashMap<Language, ForeignIndex>, Box<dyn Error + Send + Sync>> {\n    utils::load_by_language(path, FOREIGN_PREFIX, |p| {\n        //let index = ForeignIndex::open(p)?;\n        let index: ForeignIndex = utils::deser_file(p, \"\").unwrap();\n        let file_name = p\n            .file_name()\n            .unwrap()\n            .to_str()\n            .unwrap()\n            .strip_prefix(FOREIGN_PREFIX)\n            .unwrap();\n        let lang = Language::from_str(file_name).unwrap();\n        //let lang = index.get_metadata().language;\n        Ok(Some((lang, index)))\n    })\n}\n"
  },
  {
    "path": "lib/indexes/src/term_freq.rs",
    "content": "use serde::{Deserialize, Serialize};\nuse std::collections::HashMap;\n\n/// Index to index frequencies of terms\n#[derive(Serialize, Deserialize)]\npub struct TermFreqIndex {\n    pub(crate) freqs: HashMap<u32, u32>,\n    pub(crate) t_ids: HashMap<String, u32>,\n    pub(crate) total: usize,\n}\n\nimpl TermFreqIndex {\n    pub fn new() -> Self {\n        Self {\n            freqs: HashMap::new(),\n            t_ids: HashMap::new(),\n            total: 0,\n        }\n    }\n\n    /// Returns the amount of indexed terms\n    #[inline]\n    pub fn len(&self) -> usize {\n        self.freqs.len()\n    }\n\n    #[inline]\n    pub fn is_empty(&self) -> bool {\n        self.len() == 0\n    }\n\n    /// Insert a new term into the index or increases the\n    /// frequency value of an existing term\n    pub fn insert(&mut self, term: String) {\n        self.total += 1;\n\n        let freq = self.t_ids.get(&term).and_then(|id| self.freqs.get_mut(&id));\n        if let Some(freq) = freq {\n            *freq += 1;\n            return;\n        }\n\n        let new_id = self.t_ids.len() as u32;\n        self.t_ids.insert(term, new_id);\n        self.freqs.insert(new_id, 1);\n    }\n\n    // Remove all terms with frequency `threshold` and treat out of dict\n    // ngrams as frequency = `1` to reduce memory usage.\n    pub fn compress(&mut self, threshold: usize) {\n        self.t_ids.retain(|_, id| {\n            let freq = *self.freqs.get(id).unwrap();\n            if freq < threshold as u32 {\n                self.freqs.remove(id).unwrap();\n                return false;\n            }\n            true\n        });\n    }\n\n    #[inline]\n    pub fn vec_builder(&self) -> VecBuilder {\n        VecBuilder::new(self)\n    }\n\n    #[inline]\n    pub fn get_id(&self, term: &str) -> Option<u32> {\n        self.t_ids.get(term).copied()\n    }\n\n    #[inline]\n    pub fn freq(&self, term: &str) -> Option<u32> {\n        let id = self.get_id(term)?;\n        self.freq_by_id(id)\n    }\n\n    #[inline]\n    pub fn freq_by_id(&self, id: u32) -> Option<u32> {\n        self.freqs.get(&id).copied()\n    }\n\n    /// Inverted frequency. Out-of-vocab terms return `None`\n    #[inline]\n    pub fn inv_freq(&self, term: &str) -> Option<f32> {\n        let freq = self.freq(term)? as f32;\n        let total = self.total as f32;\n        Some((total / freq).log2())\n    }\n\n    /// Inverted frequency but out-of-vocab terms are treated as freq=1\n    #[inline]\n    pub fn inv_freq_oov(&self, term: &str) -> f32 {\n        let freq = self.freq(term).unwrap_or(1) as f32;\n        let total = self.total as f32;\n        (total / freq).log2()\n    }\n}\n\n/// Helper for building correct term frequency vectors\npub struct VecBuilder<'index> {\n    index: &'index TermFreqIndex,\n    new_terms: HashMap<String, u32>,\n}\n\nimpl<'index> VecBuilder<'index> {\n    #[inline]\n    pub(crate) fn new(index: &'index TermFreqIndex) -> Self {\n        Self {\n            index,\n            new_terms: HashMap::new(),\n        }\n    }\n\n    /// Retrieves the ID of a term or creates a new one and returns it\n    #[inline]\n    pub fn get_or_insert_id<S: AsRef<str>>(&mut self, term: S) -> u32 {\n        let term = term.as_ref();\n        // Try indexed ID\n        self.index.t_ids.get(term).copied().unwrap_or_else(|| {\n            // Try newly created term ID\n            self.new_terms.get(term).copied().unwrap_or_else(|| {\n                // Insert new ID\n                let new_id = (self.new_terms.len() + self.index.t_ids.len()) as u32;\n                self.new_terms.insert(term.to_string(), new_id);\n                new_id\n            })\n        })\n    }\n}\n"
  },
  {
    "path": "lib/indexes/src/words/foreign.rs",
    "content": "use crate::ng_freq::NgFreqIndex;\nuse serde::{Deserialize, Serialize};\nuse std::ops::Deref;\nuse vsm::presets::VSMIndexSimple;\n\n/// N-gram with for string similarity (NgFreqIndex) index\npub type WordVecIndex = VSMIndexSimple<u32>;\npub const NG_FREQ_N: usize = 3;\n\n#[derive(Serialize, Deserialize)]\npub struct ForeignIndex {\n    pub vsm_index: WordVecIndex,\n    pub ng_index: NgFreqIndex,\n}\n\nimpl ForeignIndex {\n    pub fn new(vsm_index: WordVecIndex, ng_index: NgFreqIndex) -> Self {\n        Self {\n            vsm_index,\n            ng_index,\n        }\n    }\n\n    #[inline]\n    pub fn vsm_index(&self) -> &WordVecIndex {\n        &self.vsm_index\n    }\n\n    #[inline]\n    pub fn ng_index(&self) -> &NgFreqIndex {\n        &self.ng_index\n    }\n}\n\nimpl Deref for ForeignIndex {\n    type Target = WordVecIndex;\n\n    #[inline]\n    fn deref(&self) -> &Self::Target {\n        self.vsm_index()\n    }\n}\n"
  },
  {
    "path": "lib/indexes/src/words/mod.rs",
    "content": "pub mod foreign;\npub mod native;\n\n// Shortcut for types of index\n\npub type ForeignIndex = foreign::ForeignIndex;\npub type NativeIndex = native::NativeIndex;\n"
  },
  {
    "path": "lib/indexes/src/words/native.rs",
    "content": "use serde::{Deserialize, Serialize};\n\nuse crate::ng_freq::NgFreqIndex;\nuse std::ops::Deref;\n\npub const N: usize = 3;\npub type WordVecIndex = ngindex::NgramIndex<N, u32>;\n\n/// Japanese word index\n#[derive(Serialize, Deserialize)]\npub struct NativeIndex {\n    /// Japanese Word index\n    pub index: WordVecIndex,\n    /// Ng-Term frequency index\n    pub tf_index: NgFreqIndex,\n}\n\nimpl NativeIndex {\n    pub fn new(vsm_index: WordVecIndex, ng_index: NgFreqIndex) -> Self {\n        Self {\n            index: vsm_index,\n            tf_index: ng_index,\n        }\n    }\n\n    #[inline]\n    pub fn index(&self) -> &WordVecIndex {\n        &self.index\n    }\n\n    #[inline]\n    pub fn tf_index(&self) -> &NgFreqIndex {\n        &self.tf_index\n    }\n}\n\nimpl Deref for NativeIndex {\n    type Target = WordVecIndex;\n\n    #[inline]\n    fn deref(&self) -> &Self::Target {\n        self.index()\n    }\n}\n"
  },
  {
    "path": "lib/japanese/Cargo.toml",
    "content": "[package]\nname = \"japanese\"\nversion = \"0.1.0\"\nauthors = [\"jojii <jojii@gmx.net>\"]\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\nitertools = \"0.11.0\"\n# wana_kana = { git = \"https://github.com/WeDontPanic/wana_kana_rust\" }\nwana_kana = { git = \"https://github.com/PSeitz/wana_kana_rust\" }\njp_utils = { git = \"https://github.com/JojiiOfficial/jp_utils\", features = [\"furigana\"] }\nonce_cell = \"*\"\n\n[dev-dependencies]\ntest-case = \"3.1.0\"\nresources = { path = \"../resources\" }\njapanese = { path = \"../japanese\" }\n"
  },
  {
    "path": "lib/japanese/src/furigana/generate/mod.rs",
    "content": "pub mod traits;\n\npub use traits::ReadingRetrieve;\n\nuse super::map_readings;\nuse crate::ToKanaExt;\nuse itertools::Itertools;\nuse jp_utils::{\n    furi::{parse::FuriParser, segment::AsSegment},\n    reading::ReadingRef,\n    JapaneseExt,\n};\nuse std::collections::HashSet;\n\n/// Generates furigana readings for the given `kanji` input based on the provided `kana` reading and\n/// kanji readings which are being passed using `retrieve`. In case a reading can't be correctly\n/// identified, the full kanji<->kana furigana block is getting returned instead of an error.\npub fn checked<R: ReadingRetrieve>(retrieve: R, kanji: &str, kana: &str) -> String {\n    let unchecked_furi = match unchecked(retrieve, kanji, kana) {\n        Some(u) => u,\n        // None => return single_block(kanji, kana),\n        None => return ReadingRef::new_with_kanji(kana, kanji).encode().into(),\n    };\n\n    let furi_parsed = FuriParser::new(&unchecked_furi)\n        .map(|i| i.unwrap().get_kana_reading())\n        .join(\"\");\n\n    // check if built correctly\n    check(&furi_parsed, kana)\n        .then(|| unchecked_furi)\n        // if not correct use one block for all\n        // .unwrap_or_else(|| single_block(kanji, kana))\n        .unwrap_or_else(|| ReadingRef::new_with_kanji(kana, kanji).encode().into())\n}\n\nfn check(gen: &str, kana: &str) -> bool {\n    let gen = gen\n        .chars()\n        .filter(|c| !c.is_symbol())\n        .collect::<String>()\n        .to_hiragana();\n    let kana = kana\n        .chars()\n        .filter(|c| !c.is_symbol())\n        .collect::<String>()\n        .to_hiragana();\n    gen == kana\n}\n\n/// Generates furigana readings for the given `kanji` input based on the provided `kana` reading and\n/// kanji readings which are being passed using `retrieve`\npub fn unchecked<R: ReadingRetrieve>(retrieve: R, kanji: &str, kana: &str) -> Option<String> {\n    let kanji_mappings = map_readings(kanji, kana)?;\n    Some(gen_iter(retrieve, kanji, kanji_mappings).join(\"\"))\n}\n\n/// Returns an iterator over all encoded furigana parts\npub fn gen_iter<'a, R>(\n    retrieve: R,\n    kanji_text: &'a str,\n    readings: Vec<(String, String)>,\n) -> impl Iterator<Item = String> + 'a\nwhere\n    R: ReadingRetrieve + 'a,\n{\n    let mut text_parts = jp_utils::tokenize::by_alphabet(kanji_text, true);\n    let mut furi = readings.into_iter();\n    std::iter::from_fn(move || {\n        let curr_part = text_parts.next()?;\n\n        // No need to encode kana parts\n        if !curr_part.is_kanji() {\n            return Some(curr_part.to_string());\n        }\n\n        let (kanji, reading) = furi.next()?;\n        if let Some(readings) = assign_readings(&retrieve, &kanji, &reading) {\n            if readings.len() != kanji.chars().count() {\n                // return Some(single_block(kanji, reading));\n                return Some(ReadingRef::new_with_kanji(&reading, &kanji).encode().into());\n            }\n\n            let reading = readings.into_iter().map(|i| i.1).join(\"|\");\n            // return Some(single_block(kanji, reading));\n            return Some(ReadingRef::new_with_kanji(&reading, &kanji).encode().into());\n        }\n\n        // Some(single_block(kanji, reading))\n        return Some(ReadingRef::new_with_kanji(&reading, &kanji).encode().into());\n    })\n}\n\n/// Takes a kanji(compound) and the assigned kana reading and returns (hopefully) a list of the\n/// provided kanji with the\npub fn assign_readings<R: ReadingRetrieve>(\n    retrieve: R,\n    kanji: &str,\n    kana: &str,\n) -> Option<Vec<(String, String)>> {\n    let kanji_len = kanji.real_len();\n    let kana_len = kana.real_len();\n\n    // If both have len of 2 the readings are obv\n    if kanji_len == kana_len {\n        return Some(\n            kanji\n                .chars()\n                .zip(kana.chars())\n                .map(|(kanji, kana)| (kanji.to_string(), kana.to_string()))\n                .collect(),\n        );\n    }\n\n    let kanji_lits = get_kanji_literals(kanji);\n    if kanji_lits.len() == 1 {\n        return Some(vec![(kanji.to_owned(), kana.to_owned())]);\n    }\n\n    let kanji_readings = kanji_lits\n        .iter()\n        .map(|i| (*i, format_readings(retrieve.all(*i))))\n        .collect::<Vec<_>>();\n\n    if kanji_readings.is_empty() {\n        return None;\n    }\n\n    find_kanji_combo(kanji_readings, kana)\n}\n\n/// Find the exact readings of a kanji literal within a kanji compound\nfn find_kanji_combo(\n    readings_map: Vec<(char, HashSet<String>)>,\n    kana: &str,\n) -> Option<Vec<(String, String)>> {\n    let mut routes: Vec<(usize, Vec<String>, &str)> = vec![(0, vec![], kana)];\n\n    for (pos, (_, readings)) in readings_map.iter().enumerate() {\n        let route_pos = pos + 1;\n        let last_routes = routes\n            .clone()\n            .into_iter()\n            .filter(|i| i.0 == pos)\n            .collect_vec();\n\n        if last_routes.is_empty() {\n            return None;\n        }\n\n        for route in last_routes.iter() {\n            let pref = find_prefix(&readings, route.2);\n            for pref in pref {\n                let mut curr_route_readings = route.1.clone();\n                curr_route_readings.push(pref.clone());\n                let new_route = (\n                    route_pos,\n                    curr_route_readings,\n                    &route.2[pref.bytes().len()..],\n                );\n                routes.push(new_route);\n            }\n        }\n    }\n\n    let valid_routes = routes\n        .iter()\n        .filter(|i| i.2.is_empty())\n        .cloned()\n        .collect_vec();\n\n    let valid_routes = if valid_routes.is_empty() && !routes.is_empty() {\n        let lasti = routes.last().as_ref().unwrap().2.to_owned();\n        let mut last = routes.last().unwrap().to_owned();\n        let last_count = routes\n            .iter()\n            .filter(|i| i.0 + 1 == readings_map.len())\n            .count();\n\n        // If only one last kanji reading is missing, just apply the kana char\n        if last.1.len() + 1 == readings_map.len() && last_count == 1 {\n            last.1.push(lasti);\n            // Check if this is really the same as the kana reading\n            if last.1.clone().join(\"\") == kana {\n                vec![last]\n            } else {\n                valid_routes\n            }\n        } else {\n            valid_routes\n        }\n    } else {\n        valid_routes\n    };\n\n    // No or multiple routes found should be treated as invalid\n    if valid_routes.is_empty() || valid_routes.len() > 1 {\n        return None;\n    }\n\n    let route = valid_routes[0].1.clone();\n\n    Some(\n        readings_map\n            .into_iter()\n            .map(|i| i.0.to_string())\n            .zip(route)\n            .collect_vec(),\n    )\n}\n\nfn get_kanji_literals(inp: &str) -> Vec<char> {\n    inp.chars().filter(|i| i.is_kanji()).collect()\n}\n\nfn find_prefix(prefixe: &HashSet<String>, text: &str) -> Vec<String> {\n    prefixe\n        .iter()\n        .filter(|i| text.to_hiragana().starts_with(&i.to_hiragana()))\n        .cloned()\n        .collect_vec()\n}\n\nfn format_readings(r: Vec<String>) -> HashSet<String> {\n    r.into_iter()\n        .map(|i| i.replace(\"-\", \"\"))\n        .map(|i| {\n            if i.contains('.') {\n                // On kun readigs, replace everything after the '.'\n                let fmt1 = i.split('.').next().unwrap().to_owned().to_hiragana();\n                let fmt2 = i.replace('.', \"\").to_hiragana();\n                vec![fmt1, fmt2]\n            } else {\n                vec![i.to_hiragana()]\n            }\n        })\n        .flatten()\n        .collect()\n}\n"
  },
  {
    "path": "lib/japanese/src/furigana/generate/traits.rs",
    "content": "pub trait ReadingRetrieve {\n    fn onyomi(&self, lit: char) -> Vec<String>;\n    fn kunyomi(&self, lit: char) -> Vec<String>;\n\n    fn all(&self, lit: char) -> Vec<String> {\n        self.kunyomi(lit)\n            .into_iter()\n            .chain(self.onyomi(lit).into_iter())\n            .collect()\n    }\n}\n\nimpl<T: ReadingRetrieve> ReadingRetrieve for &T {\n    fn onyomi(&self, lit: char) -> Vec<String> {\n        (*self).onyomi(lit)\n    }\n\n    fn kunyomi(&self, lit: char) -> Vec<String> {\n        (*self).kunyomi(lit)\n    }\n}\n"
  },
  {
    "path": "lib/japanese/src/furigana/mod.rs",
    "content": "pub mod generate;\nmod tests;\n\nuse itertools::Itertools;\nuse jp_utils::JapaneseExt;\n\nuse crate::ToKanaExt;\n\n/// Generates all kanji readins from a kanji and kana string an returns them (kanji, kana)\nfn map_readings(kanji: &str, kana: &str) -> Option<Vec<(String, String)>> {\n    let kana = kana.chars().filter(|s| !s.is_symbol()).collect::<Vec<_>>();\n    let mut kana_pos = strip_until_kanji(kanji.chars());\n    let mut kanji_iter = kanji.chars().filter(|i| !i.is_symbol()).skip(kana_pos);\n\n    let mut result: Vec<(String, String)> = Vec::new();\n\n    let mut curr_kanji = Vec::new();\n    loop {\n        if kana_pos >= kana.len() {\n            break;\n        }\n\n        // Kana from current position to end\n        let curr_kana = &kana[kana_pos..];\n\n        let kk = kanji_iter.clone().collect_vec();\n\n        // Get all chars until next kanji\n        let (part_kana, part_kanji) = to_next_kanji(&mut kanji_iter);\n\n        // If last part is kanji only take rest of kana reading\n        if part_kana.is_empty() {\n            result.push((part_kanji.iter().collect(), curr_kana.iter().collect()));\n            break;\n        }\n\n        // Current kanji buff\n        curr_kanji.clear();\n        let mut counter = 1;\n        let found = loop {\n            if kana_pos >= kana.len() {\n                break false;\n            }\n            curr_kanji.push(kana[kana_pos]);\n            kana_pos += 1;\n\n            // Require at least as much kana characters as kanji characters\n            if counter < part_kanji.len() {\n                counter += 1;\n                continue;\n            }\n\n            if starts_with(\n                curr_kana,\n                &curr_kanji,\n                &part_kana,\n                !has_kanji_after(&kk, part_kanji.len() + part_kana.len()),\n            ) {\n                break true;\n            }\n\n            if curr_kanji.len() >= curr_kana.len() || kana_pos >= kana.len() {\n                break false;\n            }\n            counter += 1;\n        };\n\n        if !found {\n            // Error\n            return None;\n        }\n\n        result.push((\n            char_arr_to_string(&part_kanji),\n            char_arr_to_string(&curr_kanji),\n        ));\n\n        for _ in 0..(part_kana.len() + part_kanji.len()) {\n            kanji_iter.next();\n        }\n\n        kana_pos += part_kana.len();\n    }\n\n    Some(result)\n}\n\n/// Returns true if there are kanji elements within arr after the given offset\nfn has_kanji_after<T>(arr: &[T], offset: usize) -> bool\nwhere\n    T: JapaneseExt,\n{\n    if offset >= arr.len() {\n        return false;\n    }\n\n    arr[offset..]\n        .iter()\n        .any(|i| i.is_kanji() || i.is_roman_letter())\n}\n\n/// Checks whether 'arr' starts with a*b or not\nfn starts_with<T>(arr: &[T], a: &[T], b: &[T], last: bool) -> bool\nwhere\n    T: PartialEq + ToKanaExt + JapaneseExt,\n{\n    if last {\n        if a.len() + b.len() != arr.len() {\n            return false;\n        }\n    } else if a.len() + b.len() > arr.len() {\n        return false;\n    }\n\n    for (pos, item) in a.iter().enumerate() {\n        if arr[pos].to_hiragana() != *item.to_hiragana() {\n            return false;\n        }\n    }\n\n    for (pos, item) in b.iter().enumerate() {\n        if arr[pos + a.len()].to_hiragana() != *item.to_hiragana() {\n            return false;\n        }\n    }\n\n    true\n}\n\n/// Helper method to collect all items in a\n/// Vec<char> into a newly allocated String\n#[inline]\nfn char_arr_to_string(vec: &[char]) -> String {\n    vec.iter().collect()\n}\n\n/// Returns all Kanji and kana elements until a new kanji(compound) is reached\nfn to_next_kanji<T>(kanji_iter: &mut T) -> (Vec<char>, Vec<char>)\nwhere\n    T: Iterator<Item = char> + Clone,\n{\n    let mut kanji_iter = kanji_iter.clone();\n    let kanji = kanji_iter\n        .take_while_ref(|i| i.is_kanji() || i.is_symbol() || i.is_roman_letter())\n        .collect::<Vec<_>>();\n    let kana = kanji_iter\n        .take_while_ref(|i| i.is_kana())\n        .collect::<Vec<_>>();\n    (kana, kanji)\n}\n\n/// Truncates everything from a kanji_iterator until a kanji element has reached and returns the\n/// amount of trimmed characters\nfn strip_until_kanji<T>(mut kanji_iter: T) -> usize\nwhere\n    T: Iterator<Item = char>,\n{\n    let mut i = 0;\n    loop {\n        if kanji_iter\n            .next()\n            .map(|i| i.is_kanji() || i.is_symbol() || i.is_roman_letter())\n            .unwrap_or(true)\n        {\n            break i;\n        }\n\n        i += 1;\n    }\n}\n"
  },
  {
    "path": "lib/japanese/src/furigana/tests.rs",
    "content": "#[cfg(test)]\nmod tests {\n    use crate::furigana::map_readings;\n    //use resources::LAZY_STORAGE;\n    use test_case::test_case;\n\n    #[test_case(\"\", \"\", &vec![]; \"Empty\")]\n    //#[test_case(\"音楽が好き\", \"おんがくがすき\", &[(\"音楽\",\"おんがく\"),(\"好\",\"す\")]; \"Simple 1\")] // TODO: fix this one lol\n    #[test_case(\"音楽は好き\", \"おんがくはすき\", &[(\"音楽\",\"おんがく\"),(\"好\",\"す\")]; \"Simple 1\")]\n    #[test_case(\"お金を払いたくない\", \"おかねをはらいたくない\", &[(\"金\",\"かね\"),(\"払\",\"はら\")]; \"Simple 2\")]\n    #[test_case(\"おかねをはらいたくない\", \"おかねをはらいたくない\", &[]; \"Kana only\")]\n    #[test_case(\"漢字\", \"かんじ\", &[(\"漢字\",\"かんじ\")]; \"Kanji only\")]\n    #[test_case(\"水気\",\"みずけ\",&[(\"水気\",\"みずけ\")]; \"Kanji only 2\")]\n    #[test_case(\"いつも眠い感じがします\", \"いつもねむいかんじがします\", &[(\"眠\",\"ねむ\"),(\"感\",\"かん\")]; \"Simple 3\")]\n    #[test_case(\"今日もとても眠い\", \"きょうもとてもねむい\", &[(\"今日\",\"きょう\"),(\"眠\",\"ねむ\")]; \"Simple 4\")]\n    #[test_case(\"５日\", \"いつか\", &[(\"５日\",\"いつか\")]; \"With roman letter\")]\n    #[test_case(\"かば、夕べに\",\"かばゆうべに\",&[(\"夕\",\"ゆう\")]; \"Special char\")]\n    fn test_map_readings(kanji: &str, kana: &str, expected: &[(&str, &str)]) {\n        let parsed = map_readings(kanji, kana).unwrap();\n        let parsed = parsed\n            .iter()\n            .map(|i| (i.0.as_str(), i.1.as_str()))\n            .collect::<Vec<_>>();\n        assert_eq!(parsed, expected);\n    }\n}\n"
  },
  {
    "path": "lib/japanese/src/guessing.rs",
    "content": "use jp_utils::JapaneseExt;\n\nuse crate::ToKanaExt;\n\n#[cfg(test)]\nmod test {\n    use super::*;\n\n    #[test]\n    fn test_true() {\n        test(\"shinjitakunakatta\", true);\n        test(\"ongakunante\", true);\n        test(\"shukudai\", true);\n        test(\"akogaredake\", true);\n        test(\"daijoubudesuyo\", true);\n        test(\"denshaninotteru\", true);\n        test(\"sonoshukudaiwokanseishimashitawa\", true);\n        test(\"korewanagaibunshodayone\", true);\n        test(\"atarashiibunwokangaenai\", true);\n        test(\"atarashiibunwokangaenakute\", true);\n        test(\"shinjitai\", true);\n        test(\"ongaku\", true);\n        test(\"sore wa ongaku desu yo\", true);\n        test(\"kirishima\", true);\n        test(\"deine oma\", true);\n        test(\"kyotou\", true);\n        test(\"onsen\", true);\n        test(\"onsei\", true);\n        test(\"otagai\", true);\n        test(\"kansei\", true);\n        test(\"kanpeki\", true);\n        test(\"fuben\", true);\n        test(\"kansetsu\", true);\n        test(\"chokusetsu\", true);\n    }\n\n    #[test]\n    fn test_false() {\n        test(\"kind\", false);\n        test(\"jinjc\", false);\n        test(\"gx\", false);\n        test(\"kochen macht spaß\", false);\n        test(\"kinderarbeit\", false);\n        test(\"hausaufgaben sind toll\", false);\n        test(\"I can't think of proper sentences lol\", false);\n        test(\"Mir fallen keine weiteren sätze ein lol\", false);\n        test(\"this is a laptop\", false);\n    }\n\n    fn test(inp: &str, assert: bool) {\n        if could_be_romaji(inp) != assert {\n            panic!(\"{:?} should be {}\", inp, assert);\n        }\n    }\n}\n\n/// Returns `true` if input could be romanized japanese text\n///\n/// Example: \"sore wa ongaku desu yo\" -> true\n/// Example: \"this is ugly\" -> false\npub fn could_be_romaji(inp: &str) -> bool {\n    is_romaji_repl(inp).is_some()\n}\n\npub fn is_romaji_repl(inp: &str) -> Option<String> {\n    let mut inp = inp.to_string();\n    let to_replace = &['.', '(', ')', '、', '。', '「', '」', ' ', '\\'', '\"'];\n    for to_repl in to_replace {\n        inp = inp.replace(*to_repl, \"\");\n    }\n    inp.to_hiragana().is_japanese().then(|| inp)\n}\n"
  },
  {
    "path": "lib/japanese/src/lib.rs",
    "content": "pub mod furigana;\npub mod guessing;\npub mod radicals;\n\npub trait ToKanaExt {\n    fn to_hiragana(&self) -> String;\n    fn to_katakana(&self) -> String;\n}\n\nimpl ToKanaExt for char {\n    #[inline]\n    fn to_hiragana(&self) -> String {\n        wana_kana::ConvertJapanese::to_hiragana(self.to_string().as_ref())\n    }\n\n    #[inline]\n    fn to_katakana(&self) -> String {\n        wana_kana::ConvertJapanese::to_katakana(self.to_string().as_ref())\n    }\n}\n\nimpl ToKanaExt for String {\n    #[inline]\n    fn to_hiragana(&self) -> String {\n        wana_kana::ConvertJapanese::to_hiragana(self.as_ref())\n    }\n\n    #[inline]\n    fn to_katakana(&self) -> String {\n        wana_kana::ConvertJapanese::to_katakana(self.as_ref())\n    }\n}\n\nimpl ToKanaExt for &str {\n    #[inline]\n    fn to_hiragana(&self) -> String {\n        wana_kana::ConvertJapanese::to_hiragana(self.as_ref())\n    }\n\n    #[inline]\n    fn to_katakana(&self) -> String {\n        wana_kana::ConvertJapanese::to_katakana(self.as_ref())\n    }\n}\n\npub fn to_kk_fmt(inp: &str) -> String {\n    let inp = inp.to_lowercase();\n    let i = inp.replace(\"nn\", \"ン\");\n    wana_kana::ConvertJapanese::to_katakana(i.as_str())\n}\n\npub fn to_hira_fmt(inp: &str) -> String {\n    let inp = inp.to_lowercase();\n    let i = inp.replace(\"nn\", \"ん\");\n    wana_kana::ConvertJapanese::to_hiragana(i.as_str())\n}\n\n/// Returns `true` if `romaji` is a prefix of `hira` where romaji is romaji text and `hira` is text written in hiragana\n#[inline]\npub fn romaji_prefix(romaji: &str, hira: &str) -> bool {\n    wana_kana::ConvertJapanese::to_romaji(hira)\n        .to_lowercase()\n        .starts_with(&romaji.to_lowercase())\n}\n"
  },
  {
    "path": "lib/japanese/src/radicals.rs",
    "content": "use once_cell::sync::Lazy;\nuse std::collections::HashMap;\n\n/// Maps radicals to their stroke counts.\nstatic RAD_STROKE_MAP: Lazy<HashMap<char, u32>> = Lazy::new(|| {\n    let mut map: HashMap<char, u32> = HashMap::default();\n    for rads in RADICALS.iter() {\n        for rad in rads.1 {\n            map.insert(rad.chars().next().unwrap(), rads.0);\n        }\n    }\n    map\n});\n\npub const RADICALS: &[(u32, &[&str]); 15] = &[\n    (1, &[\"一\", \"｜\", \"丶\", \"ノ\", \"乙\", \"亅\"]),\n    (\n        2,\n        &[\n            \"二\", \"亠\", \"人\", \"⺅\", \"𠆢\", \"儿\", \"入\", \"ハ\", \"丷\", \"冂\", \"冖\", \"冫\", \"几\", \"凵\",\n            \"刀\", \"⺉\", \"力\", \"勹\", \"匕\", \"匚\", \"十\", \"卜\", \"卩\", \"厂\", \"厶\", \"又\", \"マ\", \"九\",\n            \"ユ\", \"乃\", \"𠂉\",\n        ],\n    ),\n    (\n        3,\n        &[\n            \"⻌\", \"口\", \"囗\", \"土\", \"士\", \"夂\", \"夕\", \"大\", \"女\", \"子\", \"宀\", \"寸\", \"小\", \"⺌\",\n            \"尢\", \"尸\", \"屮\", \"山\", \"川\", \"巛\", \"工\", \"已\", \"巾\", \"干\", \"幺\", \"广\", \"廴\", \"廾\",\n            \"弋\", \"弓\", \"ヨ\", \"彑\", \"彡\", \"彳\", \"⺖\", \"⺘\", \"⺡\", \"⺨\", \"⺾\", \"⻏\", \"⻖\", \"也\",\n            \"亡\", \"及\", \"久\",\n        ],\n    ),\n    (\n        4,\n        &[\n            \"⺹\", \"心\", \"戈\", \"戸\", \"手\", \"支\", \"攵\", \"文\", \"斗\", \"斤\", \"方\", \"无\", \"日\", \"曰\",\n            \"月\", \"木\", \"欠\", \"止\", \"歹\", \"殳\", \"比\", \"毛\", \"氏\", \"气\", \"水\", \"火\", \"⺣\", \"爪\",\n            \"父\", \"爻\", \"爿\", \"片\", \"牛\", \"犬\", \"⺭\", \"王\", \"元\", \"井\", \"勿\", \"尤\", \"五\", \"屯\",\n            \"巴\", \"毋\",\n        ],\n    ),\n    (\n        5,\n        &[\n            \"玄\", \"瓦\", \"甘\", \"生\", \"用\", \"田\", \"疋\", \"疒\", \"癶\", \"白\", \"皮\", \"皿\", \"目\", \"矛\",\n            \"矢\", \"石\", \"示\", \"禸\", \"禾\", \"穴\", \"立\", \"⻂\", \"世\", \"巨\", \"冊\", \"母\", \"⺲\", \"牙\",\n        ],\n    ),\n    (\n        6,\n        &[\n            \"瓜\", \"竹\", \"米\", \"糸\", \"缶\", \"羊\", \"羽\", \"而\", \"耒\", \"耳\", \"聿\", \"肉\", \"自\", \"至\",\n            \"臼\", \"舌\", \"舟\", \"艮\", \"色\", \"虍\", \"虫\", \"血\", \"行\", \"衣\", \"西\",\n        ],\n    ),\n    (\n        7,\n        &[\n            \"臣\", \"見\", \"角\", \"言\", \"谷\", \"豆\", \"豕\", \"豸\", \"貝\", \"赤\", \"走\", \"足\", \"身\", \"車\",\n            \"辛\", \"辰\", \"酉\", \"釆\", \"里\", \"舛\", \"麦\",\n        ],\n    ),\n    (\n        8,\n        &[\n            \"金\", \"長\", \"門\", \"隶\", \"隹\", \"雨\", \"青\", \"非\", \"奄\", \"岡\", \"免\", \"斉\",\n        ],\n    ),\n    (\n        9,\n        &[\n            \"面\", \"革\", \"韭\", \"音\", \"頁\", \"風\", \"飛\", \"食\", \"首\", \"香\", \"品\",\n        ],\n    ),\n    (\n        10,\n        &[\"馬\", \"骨\", \"高\", \"髟\", \"鬥\", \"鬯\", \"鬲\", \"鬼\", \"竜\", \"韋\"],\n    ),\n    (11, &[\"魚\", \"鳥\", \"鹵\", \"鹿\", \"麻\", \"亀\", \"啇\", \"黄\", \"黒\"]),\n    (12, &[\"黍\", \"黹\", \"無\", \"歯\"]),\n    (13, &[\"黽\", \"鼎\", \"鼓\", \"鼠\"]),\n    (14, &[\"鼻\", \"齊\"]),\n    (17, &[\"龠\"]),\n];\n\n/// Returns true if `lit` is a radical\n#[inline]\npub fn is_radical(lit: char) -> bool {\n    RADICALS\n        .iter()\n        .any(|i| i.1.iter().any(|j| j.chars().next().unwrap() == lit))\n}\n\n/// Returns a radical literal with its stroke count if found\n#[inline]\npub fn get_stroke_count(lit: char) -> Option<u32> {\n    RAD_STROKE_MAP.get(&lit).copied()\n}\n"
  },
  {
    "path": "lib/localization/Cargo.toml",
    "content": "[package]\nname = \"localization\"\nversion = \"0.1.0\"\nauthors = [\"jojii <jojii@gmx.net>\"]\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\ndyn-fmt = \"0.3.0\"\ngettext = \"0.4.0\"\nlog = \"0.4.19\"\nstrum = \"0.25.0\"\nstrum_macros = \"0.25.1\"\n"
  },
  {
    "path": "lib/localization/src/error.rs",
    "content": "#[derive(Debug)]\npub enum Error {\n    Gettext(gettext::Error),\n    Io(std::io::Error),\n    DefaultNotFound,\n}\n\nimpl From<gettext::Error> for Error {\n    fn from(err: gettext::Error) -> Self {\n        Self::Gettext(err)\n    }\n}\n\nimpl From<std::io::Error> for Error {\n    fn from(err: std::io::Error) -> Self {\n        Self::Io(err)\n    }\n}\n"
  },
  {
    "path": "lib/localization/src/language.rs",
    "content": "use strum_macros::{AsRefStr, Display, EnumString};\n\nuse crate::traits::Translatable;\n\n/// Supported languages for translation\n#[derive(Copy, Clone, AsRefStr, EnumString, Display, Eq, PartialEq, Hash, Debug)]\n#[repr(u8)]\npub enum Language {\n    #[strum(serialize = \"en\", serialize = \"en-US\")]\n    English,\n    #[strum(serialize = \"de\", serialize = \"de-DE\")]\n    German,\n    #[strum(serialize = \"ru\")]\n    Russain,\n    #[strum(serialize = \"sp\", serialize = \"es-ES\")]\n    Spanish,\n    #[strum(serialize = \"sw\", serialize = \"sv-SE\")]\n    Swedish,\n    #[strum(serialize = \"fr\", serialize = \"fr-FR\")]\n    French,\n    #[strum(serialize = \"nl\", serialize = \"nl-NL\")]\n    Dutch,\n    #[strum(serialize = \"hu\")]\n    Hungarian,\n    #[strum(serialize = \"sv\", serialize = \"sl-SL\", serialize = \"svl\")]\n    Slovenian,\n    #[strum(serialize = \"jp\", serialize = \"ja-JP\")]\n    Japanese,\n}\n\nimpl Default for Language {\n    #[inline]\n    fn default() -> Self {\n        Self::English\n    }\n}\n\nimpl Translatable for Language {\n    #[inline]\n    fn get_id(&self) -> &'static str {\n        match self {\n            Language::English => \"English\",\n            Language::German => \"German\",\n            Language::Russain => \"Russian\",\n            Language::Spanish => \"Spanish\",\n            Language::Swedish => \"Swedish\",\n            Language::French => \"French\",\n            Language::Dutch => \"Dutch\",\n            Language::Hungarian => \"Hungarian\",\n            Language::Slovenian => \"Slovenian\",\n            Language::Japanese => \"Japanese\",\n        }\n    }\n}\n"
  },
  {
    "path": "lib/localization/src/lib.rs",
    "content": "pub mod error;\npub mod language;\npub mod traits;\n\nuse std::{collections::HashMap, fmt::Display, fs::File, str::FromStr};\n\nuse error::Error;\nuse gettext::Catalog;\n\nuse language::Language;\nuse log::{debug, error};\n\n/// A Dictionary of multiple catalogs assigned to its languages. Requires at least one cataloge\n/// for the defined [`default_lang`]\npub struct TranslationDict {\n    catalogs: HashMap<Language, Catalog>,\n    default_lang: Language,\n}\n\nimpl TranslationDict {\n    /// Creates a new [`TranslationDict`] value with the catalogs available in [`path`]. Parses the\n    /// file names based into their representing [`Language`].\n    pub fn new(path: &str, default_lang: Language) -> Result<TranslationDict, Error> {\n        let mut catalogs = HashMap::new();\n\n        debug!(\"Loading locales from: {}\", path);\n\n        // Initialize catalogs\n        for file in std::fs::read_dir(path)? {\n            let file = file?;\n            let file_path = file.path();\n            let stem = file_path.file_stem().unwrap().to_str().unwrap();\n\n            // Ignore non .mo files\n            if file_path\n                .extension()\n                .and_then(|ext| ext.to_str())\n                .and_then(|ext| (ext.ends_with(\"mo\")).then(|| 1))\n                .is_none()\n            {\n                continue;\n            }\n\n            if let Ok(language) = Language::from_str(stem) {\n                let catalog = Catalog::parse(File::open(file_path)?)?;\n                catalogs.insert(language, catalog);\n\n                debug!(\"Loaded locale: {:?}\", language);\n            } else {\n                error!(\"Unknown language: {}\", stem);\n            }\n        }\n\n        // Check if `default_lang` is included\n        if catalogs.get(&default_lang).is_none() {\n            return Err(Error::DefaultNotFound);\n        }\n\n        Ok(TranslationDict {\n            catalogs,\n            default_lang,\n        })\n    }\n\n    /// Returns the singular translation of `msg_id` from the given catalog\n    /// or `msg_id` itself if a translation does not exist.\n    pub fn gettext<'a>(&'a self, msg_id: &'a str, language: Option<Language>) -> &'a str {\n        self.get_catalog(language).gettext(msg_id)\n    }\n\n    /// Returns the plural translation of `msg_id` from the given catalog\n    /// with the correct plural form for the number `n` of objects.\n    /// Returns msg_id if a translation does not exist and `n == 1`,\n    /// msg_id_plural otherwise.\n    pub fn ngettext<'a>(\n        &'a self,\n        msg_id: &'a str,\n        msg_id_plural: &'a str,\n        n: u64,\n        language: Option<Language>,\n    ) -> &'a str {\n        self.get_catalog(language)\n            .ngettext(msg_id, msg_id_plural, n)\n    }\n\n    /// Returns the singular translation of `msg_id`\n    /// in the context `msg_context`\n    /// or `msg_id` itself if a translation does not exist.\n    pub fn pgettext<'a>(\n        &'a self,\n        msg_context: &'a str,\n        msg_id: &'a str,\n        language: Option<Language>,\n    ) -> &'a str {\n        self.get_catalog(language).pgettext(msg_context, msg_id)\n    }\n\n    /// Returns the plural translation of `msg_id` in the context `msg_context`\n    /// with the correct plural form for the number `n` of objects.\n    /// Returns msg_id if a translation does not exist and `n == 1`,\n    /// msg_id_plural otherwise.\n    pub fn npgettext<'a>(\n        &'a self,\n        msg_context: &'a str,\n        msg_id: &'a str,\n        msg_id_plural: &'a str,\n        n: u64,\n        language: Option<Language>,\n    ) -> &'a str {\n        self.get_catalog(language)\n            .npgettext(msg_context, msg_id, msg_id_plural, n)\n    }\n\n    /// Returns the singular translation of `msg_id` from the given catalog\n    /// or `msg_id` itself if a translation does not exist.\n    pub fn gettext_fmt<T: Display + Sized + Clone>(\n        &self,\n        msg_id: &str,\n        values: &[T],\n        language: Option<Language>,\n    ) -> String {\n        format(self.gettext(msg_id, language), values)\n    }\n\n    /// Returns the plural translation of `msg_id` from the given catalog\n    /// with the correct plural form for the number `n` of objects.\n    /// Returns msg_id if a translation does not exist and `n == 1`,\n    /// msg_id_plural otherwise.\n    pub fn ngettext_fmt<T: Display + Sized + Clone>(\n        &self,\n        msg_id: &str,\n        msg_id_plural: &str,\n        n: u64,\n        values: &[T],\n        language: Option<Language>,\n    ) -> String {\n        format(self.ngettext(msg_id, msg_id_plural, n, language), values)\n    }\n\n    /// Returns the singular translation of `msg_id`\n    /// in the context `msg_context`\n    /// or `msg_id` itself if a translation does not exist.\n    pub fn pgettext_fmt<T: Display + Sized + Clone>(\n        &self,\n        msg_context: &str,\n        msg_id: &str,\n        values: &[T],\n        language: Option<Language>,\n    ) -> String {\n        format(self.pgettext(msg_context, msg_id, language), values)\n    }\n\n    /// Returns the plural translation of `msg_id` in the context `msg_context`\n    /// with the correct plural form for the number `n` of objects.\n    /// Returns msg_id if a translation does not exist and `n == 1`,\n    /// msg_id_plural otherwise.\n    pub fn npgettext_fmt<T: Display + Sized + Clone>(\n        &self,\n        msg_context: &str,\n        msg_id: &str,\n        msg_id_plural: &str,\n        n: u64,\n        values: &[T],\n        language: Option<Language>,\n    ) -> String {\n        format(\n            self.npgettext(msg_context, msg_id, msg_id_plural, n, language),\n            values,\n        )\n    }\n\n    /// Returns the catalog for the given language\n    pub fn get_catalog(&self, language: Option<Language>) -> &Catalog {\n        let language = language.unwrap_or_default();\n        self.catalogs\n            .get(&language)\n            .unwrap_or_else(|| self.get_default_catalog())\n    }\n\n    /// Returns the default catalog\n    pub fn get_default_catalog(&self) -> &Catalog {\n        self.catalogs\n            .get(&self.default_lang)\n            .expect(\"Missing default catalog\")\n    }\n}\n\n/// Formats the input with the passed values and returns a newly allocated owned String\nfn format<T: Display + Sized + Clone>(inp: &str, values: &[T]) -> String {\n    use dyn_fmt::AsStrFormatExt;\n\n    let placeholder_count = count_placeholder(inp);\n    if placeholder_count != values.len() {\n        if values.len() == 1 {\n            let first = values[0].clone();\n            let mut values = values.to_vec();\n            for _ in 0..placeholder_count - 1 {\n                values.push(first.clone());\n            }\n            return inp.format(&values);\n        }\n    }\n\n    inp.format(values)\n}\n\nfn count_placeholder(inp: &str) -> usize {\n    inp.matches(\"{}\").count()\n}\n"
  },
  {
    "path": "lib/localization/src/traits.rs",
    "content": "use std::fmt::Display;\n\nuse super::language::Language;\nuse super::TranslationDict;\n\nimpl Translatable for &'static str {\n    #[inline]\n    fn get_id(&self) -> &'static str {\n        self\n    }\n}\n\nimpl TranslatablePlural for &'static str {\n    #[inline]\n    fn get_plural_id(&self) -> &'static str {\n        self\n    }\n}\n\n/// This trait allows any objects after implementation to be translated (in singular) using `dict`\npub trait Translatable {\n    /// Has to return a unique MsgID which has to represent a msgid within the po file(s)\n    fn get_id(&self) -> &'static str;\n\n    /// Returns the singular translation of `msg_id` from the given catalog\n    /// or `msg_id` itself if a translation does not exist.\n    fn gettext<'a>(&self, dict: &'a TranslationDict, language: Option<Language>) -> &'a str {\n        dict.gettext(self.get_id(), language)\n    }\n\n    /// Returns the singular translation of `msg_id` in the context `msg_context`\n    /// or `msg_id` itself if a translation does not exist.\n    fn pgettext<'a>(\n        &self,\n        dict: &'a TranslationDict,\n        context: &'a str,\n        language: Option<Language>,\n    ) -> &'a str {\n        dict.pgettext(context, self.get_id(), language)\n    }\n\n    /// Returns the singular translation of `msg_id` from the given catalog\n    /// or `msg_id` itself if a translation does not exist.\n    fn gettext_fmt<'a, T: Display + Sized + Clone>(\n        &self,\n        dict: &'a TranslationDict,\n        values: &[T],\n        language: Option<Language>,\n    ) -> String {\n        dict.gettext_fmt(self.get_id(), values, language)\n    }\n\n    /// Returns the singular translation of `msg_id` in the context `msg_context`\n    /// or `msg_id` itself if a translation does not exist.\n    fn pgettext_fmt<T: Display + Sized + Clone>(\n        &self,\n        dict: &TranslationDict,\n        context: &str,\n        values: &[T],\n        language: Option<Language>,\n    ) -> String {\n        dict.pgettext_fmt(context, self.get_id(), values, language)\n    }\n\n    /// Like gettext but returns an owned string\n    fn gettext_custom(&self, dict: &TranslationDict, language: Option<Language>) -> String {\n        dict.gettext(self.get_id(), language).to_owned()\n    }\n}\n\n/// This trait allows any objects after implementation to be translated (in plural) using `dict`\npub trait TranslatablePlural: Translatable {\n    /// Has to return a unique MsgID which has to represent a msgid_plural within the po file(s)\n    fn get_plural_id(&self) -> &'static str;\n\n    /// Returns the singular translation of `msg_id` from the given catalog\n    /// or `msg_id` itself if a translation does not exist.\n    fn ngettext<'a>(\n        &self,\n        dict: &'a TranslationDict,\n        n: u64,\n        language: Option<Language>,\n    ) -> &'a str {\n        dict.ngettext(self.get_id(), self.get_plural_id(), n, language)\n    }\n\n    /// Returns the singular translation of `msg_id` in the context `msg_context`\n    /// or `msg_id` itself if a translation does not exist.\n    fn npgettext<'a>(\n        &self,\n        dict: &'a TranslationDict,\n        context: &'a str,\n        n: u64,\n        language: Option<Language>,\n    ) -> &'a str {\n        dict.npgettext(context, self.get_id(), self.get_plural_id(), n, language)\n    }\n\n    /// Returns the singular translation of `msg_id` from the given catalog\n    /// or `msg_id` itself if a translation does not exist.\n    fn ngettext_fmt<'a, T: Display + Sized + Clone>(\n        &self,\n        dict: &'a TranslationDict,\n        n: u64,\n        values: &[T],\n        language: Option<Language>,\n    ) -> String {\n        dict.ngettext_fmt(self.get_id(), self.get_plural_id(), n, values, language)\n    }\n\n    /// Returns the singular translation of `msg_id` in the context `msg_context`\n    /// or `msg_id` itself if a translation does not exist.\n    fn npgettext_fmt<T: Display + Sized + Clone>(\n        &self,\n        dict: &TranslationDict,\n        context: &str,\n        n: u64,\n        values: &[T],\n        language: Option<Language>,\n    ) -> String {\n        dict.npgettext_fmt(\n            context,\n            self.get_id(),\n            self.get_plural_id(),\n            n,\n            values,\n            language,\n        )\n    }\n}\n"
  },
  {
    "path": "lib/news/Cargo.toml",
    "content": "[package]\nname = \"news\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\nutils = { path = \"../utils\" }\nitertools = \"0.11.0\"\nonce_cell = { version = \"1.18.0\", default-features = false }\ncomrak = { version = \"0.18.0\", default-features = false }\n\n[features]\ndefault = []\n"
  },
  {
    "path": "lib/news/src/lib.rs",
    "content": "use std::{\n    path::Path,\n    sync::{Arc, Mutex},\n};\n\nuse comrak::ComrakOptions;\n\n#[cfg(feature = \"news_inotify\")]\nuse inotify::{EventMask, Inotify, WatchMask};\n\nuse itertools::Itertools;\nuse once_cell::sync::Lazy;\n\npub static NEWS_RETRIEVE: Lazy<Mutex<Arc<News>>> =\n    Lazy::new(|| Mutex::new(Arc::new(News::default())));\n\n/// Contains a set of News entries ordered by oldest -> newest\n#[derive(Default, Debug, Clone)]\npub struct News {\n    pub entries: Vec<NewsEntry>,\n}\n\n#[derive(Default, Debug, Clone)]\npub struct NewsEntry {\n    pub id: u32,\n    pub title: String,\n    pub long: String,\n    pub short: String,\n    pub creation_time: u64,\n    pub was_trimmed: bool,\n}\n\nimpl News {\n    /// Load news from a folder\n    pub fn init<P: AsRef<Path>>(path: P) -> Result<(), Box<dyn std::error::Error>> {\n        let p = path.as_ref().to_str().unwrap().to_string();\n\n        let update = |p: &str| {\n            *NEWS_RETRIEVE.lock().unwrap() = Arc::new(Self::load(p).unwrap());\n        };\n\n        update(&p);\n\n        #[cfg(feature = \"news_inotify\")]\n        fs_changed_update(p, update);\n\n        Ok(())\n    }\n\n    pub fn load<P: AsRef<Path>>(path: P) -> Result<Self, Box<dyn std::error::Error>> {\n        let mut entries: Vec<NewsEntry> = Vec::new();\n\n        for (pos, file) in std::fs::read_dir(path)?.enumerate() {\n            let file = file?;\n\n            let file_name = file.file_name().to_string_lossy().to_string();\n            if !file_name.contains(';') {\n                continue;\n            }\n\n            let mut fn_split = file_name.split(';');\n            let creation_time: u64 = fn_split.next().unwrap().parse()?;\n            let title = fn_split.join(\";\");\n\n            let id = pos as u32;\n\n            let (short, long) = parse_markdown(file.path())?;\n\n            entries.push(NewsEntry {\n                id,\n                title,\n                creation_time,\n                was_trimmed: short != long,\n                long,\n                short,\n            });\n        }\n\n        entries.sort_by(|a, b| a.creation_time.cmp(&b.creation_time));\n\n        let entry_count = entries.len();\n        // Only load 15 latest news\n        let entries = entries\n            .into_iter()\n            .skip(entry_count.saturating_sub(15))\n            .collect::<Vec<_>>();\n\n        Ok(News { entries })\n    }\n\n    /// Returns an iterator over last `limit` news elements from old -> newest\n    pub fn last_entries(&self, limit: usize) -> impl Iterator<Item = &NewsEntry> {\n        self.entries\n            .iter()\n            .skip(self.entries.len() - limit.min(self.entries.len()))\n    }\n\n    /// Returns a news entry by its ID\n    pub fn by_id(&self, id: u32) -> Option<&NewsEntry> {\n        self.entries.iter().find(|i| i.id == id)\n    }\n}\n\n/// Returns a reference to the loaded news entries\n#[inline]\npub fn get() -> Arc<News> {\n    NEWS_RETRIEVE.lock().unwrap().clone()\n}\n\nfn parse_markdown<P: AsRef<Path>>(file: P) -> Result<(String, String), Box<dyn std::error::Error>> {\n    let contents = std::fs::read_to_string(file)?;\n\n    let short_md = shorten_markdown(&contents);\n\n    let mut md_options = ComrakOptions::default();\n    md_options.render.unsafe_ = true;\n    md_options.extension.autolink = true;\n    md_options.extension.tasklist = true;\n    md_options.extension.strikethrough = true;\n\n    let short_html = comrak::markdown_to_html(&short_md, &md_options);\n    let full_html = comrak::markdown_to_html(&contents, &md_options);\n\n    Ok((short_html, full_html))\n}\n\nfn shorten_markdown(full: &str) -> String {\n    let line_count = full.split('\\n').count().max(1);\n    let conten_len = utils::real_string_len(full);\n\n    let mut text_iter = full.split('\\n').filter(|i| !i.trim().starts_with('#'));\n\n    let out;\n    if conten_len > 100 {\n        if line_count > 3 {\n            out = text_iter.take(3).join(\"\\n\");\n        } else {\n            out = text_iter.join(\"\\n\");\n        }\n    } else {\n        out = text_iter.join(\"\\n\");\n    }\n\n    out\n}\n"
  },
  {
    "path": "lib/resources/Cargo.toml",
    "content": "[package]\nname = \"resources\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\ntypes = { path = \"../types\", features = [\"jotoba_intern\"] }\njapanese = { path = \"../japanese\" }\nintmap = { git = \"https://github.com/JojiiOfficial/rust-intmap\" }\nserde = { version = \"1.0.171\", features = [\"derive\"] }\nbincode = \"1.3.3\"\nonce_cell = \"1.18.0\"\nsorted_intersection = \"1.2.0\"\nstrum = \"0.25.0\"\nstrum_macros = \"0.25.1\"\nids_parser = { git = \"https://github.com/JojiiOfficial/IDS-Parser\" }\n#ids_parser = { path = \"../../../ids_parser\" }\n"
  },
  {
    "path": "lib/resources/build.rs",
    "content": "use std::process::Command;\n\nfn main() {\n    let git_hash = Command::new(\"git\")\n        .args(&[\"rev-parse\", \"HEAD\"])\n        .output()\n        .ok()\n        .and_then(|output| String::from_utf8(output.stdout).ok())\n        .unwrap_or_else(|| String::from(\"<Not built in git repository>\"));\n\n    println!(\"cargo:rustc-env=GIT_HASH={}\", git_hash);\n}\n"
  },
  {
    "path": "lib/resources/src/lib.rs",
    "content": "pub mod retrieve;\npub mod storage;\n\npub use storage::{feature::Feature, ResourceStorage};\n\nuse once_cell::sync::{Lazy, OnceCell};\nuse std::{\n    error::Error,\n    fs::File,\n    io::{BufReader, Write},\n    path::Path,\n};\n\n/// Static git hash of current build\npub const GIT_HASH: &str = env!(\"GIT_HASH\");\n\n/// List of features that are required for Jotoba to run properly\npub const REQUIRED_FEATURES: &[Feature] = &[\n    Feature::Words,\n    Feature::Sentences,\n    Feature::Names,\n    Feature::Kanji,\n    Feature::RadicalKanjiMap,\n    // Feature::RadicalData,\n];\n\n/// InMemory storage for all data\nstatic STORAGE: OnceCell<ResourceStorage> = OnceCell::new();\n\n/// Lazy resource storage for tests\npub static LAZY_STORAGE: Lazy<ResourceStorage> = Lazy::new(|| {\n    let path = std::env::var(\"STORAGE_DATA\").expect(\"missing STORAGE_DATA\");\n    load_raw(&path).expect(\"Failed to load test resources\")\n});\n\n/// Get loaded storage data\n#[inline(always)]\npub fn get() -> &'static ResourceStorage {\n    // Safety:\n    // The STORAGE cell gets initialized once at the beginning which is absolutely necessary for\n    // the program to work. It won't be unset so its always safe\n    unsafe { STORAGE.get_unchecked() }\n}\n\n/// Returns `true` if the storage is loaded\n#[inline(always)]\npub fn is_loaded() -> bool {\n    STORAGE.get().is_some()\n}\n\n/// Load the resource storage and returns it\npub fn load_raw<P: AsRef<Path>>(path: P) -> Result<ResourceStorage, Box<dyn Error>> {\n    let mut reader = BufReader::new(File::open(path)?);\n    Ok(bincode::deserialize_from(&mut reader)?)\n}\n\n/// Load the resource storage from a file. Returns `true` if it wasn't loaded before\npub fn load<P: AsRef<Path>>(path: P) -> Result<bool, Box<dyn Error>> {\n    if is_loaded() {\n        return Ok(true);\n    }\n    Ok(STORAGE.set(load_raw(path)?).is_ok())\n}\n\n/// Serializes a ResourceStorage into `output`\npub fn store<W: Write>(output: W, storage: &ResourceStorage) -> Result<(), Box<dyn Error>> {\n    bincode::serialize_into(output, storage)?;\n    Ok(())\n}\n\npub fn set(res_storage: ResourceStorage) {\n    STORAGE.set(res_storage).ok();\n}\n\npub fn wait() {\n    STORAGE.wait();\n}\n"
  },
  {
    "path": "lib/resources/src/retrieve/kanji.rs",
    "content": "use ids_parser::IDS;\nuse sorted_intersection::SortedIntersection;\nuse types::jotoba::kanji::{radical::DetailedRadical, Kanji};\n\nuse super::super::storage::kanji::KanjiStorage;\n\n#[derive(Clone, Copy)]\npub struct KanjiRetrieve<'a> {\n    storage: &'a KanjiStorage,\n}\n\nimpl<'a> KanjiRetrieve<'a> {\n    #[inline(always)]\n    pub(crate) fn new(storage: &'a KanjiStorage) -> Self {\n        KanjiRetrieve { storage }\n    }\n\n    /// Get a kanji by its sequence id\n    #[inline]\n    pub fn by_literal(&self, literal: char) -> Option<&'a Kanji> {\n        self.storage.literal_index.get(literal as u32)\n    }\n\n    /// Returns `true` if the index has the literal\n    #[inline]\n    pub fn has_literal(&self, literal: char) -> bool {\n        self.storage.literal_index.contains_key(literal as u32)\n    }\n\n    /// Returns all kanji with the given radicals\n    #[inline]\n    pub fn by_radicals(&self, radicals: &[char]) -> Vec<&'a Kanji> {\n        let rad_map = &self.storage.radical_map;\n\n        let mut maps = radicals\n            .iter()\n            .filter_map(|i| rad_map.get(i).map(|i| i.iter()))\n            .collect::<Vec<_>>();\n\n        if maps.is_empty() {\n            return vec![];\n        }\n\n        SortedIntersection::new(&mut maps)\n            .filter_map(|i| self.by_literal(*i))\n            .collect::<Vec<_>>()\n    }\n\n    /// Returns all kanji with given jlpt level\n    #[inline]\n    pub fn by_jlpt(&self, jlpt: u8) -> Option<&'a Vec<char>> {\n        self.storage.jlpt_data.get(&jlpt)\n    }\n\n    /// Returns an iterator over all radicals\n    #[inline]\n    pub fn radicals(&self) -> impl Iterator<Item = &'a DetailedRadical> {\n        self.storage.radical_data.iter().map(|i| i.1)\n    }\n\n    /// Returns a list of kanji taught in given genki_lesson\n    #[inline]\n    pub fn by_genki_lesson(&self, genki_lektion: u8) -> Option<&'a Vec<char>> {\n        self.storage.genki_levels.get(&genki_lektion)\n    }\n\n    #[inline]\n    pub fn iter(&self) -> impl Iterator<Item = &'a Kanji> {\n        self.storage.literal_index.iter().map(|i| i.1)\n    }\n\n    #[inline]\n    pub fn all(&self) -> Vec<Kanji> {\n        self.iter().cloned().collect()\n    }\n\n    #[inline]\n    pub fn ids(&self, kanji_lit: char) -> Option<&'a IDS> {\n        self.storage.ids_index.get(&kanji_lit)\n    }\n\n    /// Returns the count of kanji\n    #[inline]\n    pub fn count(&self) -> usize {\n        self.storage.literal_index.len()\n    }\n}\n\nimpl japanese::furigana::generate::ReadingRetrieve for KanjiRetrieve<'_> {\n    #[inline]\n    fn onyomi(&self, lit: char) -> Vec<String> {\n        self.by_literal(lit)\n            .map(|i| i.onyomi.clone())\n            .unwrap_or_default()\n    }\n\n    #[inline]\n    fn kunyomi(&self, lit: char) -> Vec<String> {\n        self.by_literal(lit)\n            .map(|i| i.kunyomi.clone())\n            .unwrap_or_default()\n    }\n}\n"
  },
  {
    "path": "lib/resources/src/retrieve/mod.rs",
    "content": "pub mod kanji;\npub mod name;\npub mod sentence;\npub mod word;\n"
  },
  {
    "path": "lib/resources/src/retrieve/name.rs",
    "content": "use super::super::storage::name::NameStorage;\nuse types::jotoba::names::Name;\n\n#[derive(Clone, Copy)]\npub struct NameRetrieve<'a> {\n    storage: &'a NameStorage,\n}\n\nimpl<'a> NameRetrieve<'a> {\n    #[inline(always)]\n    pub(crate) fn new(storage: &'a NameStorage) -> Self {\n        NameRetrieve { storage }\n    }\n\n    /// Get a name by its sequence id\n    #[inline]\n    pub fn by_sequence(&self, seq_id: u32) -> Option<&'a Name> {\n        self.storage.names.get(&seq_id)\n    }\n\n    /// Returns the amount of names\n    #[inline]\n    pub fn count(&self) -> usize {\n        self.storage.names.len()\n    }\n\n    /// Returns an iterator over all names\n    #[inline]\n    pub fn iter(&self) -> impl Iterator<Item = &'a Name> {\n        self.storage.names.iter().map(|i| i.1)\n    }\n}\n"
  },
  {
    "path": "lib/resources/src/retrieve/sentence.rs",
    "content": "use super::super::storage::sentence::SentenceStorage;\nuse types::jotoba::sentences::{tag::Tag, Sentence};\n\n#[derive(Clone, Copy)]\npub struct SentenceRetrieve<'a> {\n    storage: &'a SentenceStorage,\n}\n\nimpl<'a> SentenceRetrieve<'a> {\n    #[inline(always)]\n    pub(crate) fn new(storage: &'a SentenceStorage) -> Self {\n        SentenceRetrieve { storage }\n    }\n\n    /// Returns a sentence by its id or `None` if no sentence for the given ID exists\n    #[inline]\n    pub fn by_id(&self, id: u32) -> Option<&'a Sentence> {\n        self.storage.sentences.get(id)\n    }\n\n    /// Returns an iterator over all sentences with given `jlpt` level\n    #[inline]\n    pub fn ids_by_jlpt(&self, jlpt: u8) -> impl Iterator<Item = u32> + 'a {\n        self.storage\n            .jlpt_map\n            .get(&jlpt)\n            .into_iter()\n            .flatten()\n            .copied()\n    }\n\n    /// Returns an iterator over all sentences with given `tag`\n    #[inline]\n    pub fn by_tag<'b>(&'b self, tag: &Tag) -> impl Iterator<Item = &'a Sentence> + 'b {\n        self.storage\n            .tag_map\n            .get(tag)\n            .into_iter()\n            .flatten()\n            .filter_map(move |i| self.by_id(*i))\n    }\n\n    /// Returns an iterator over all sentences with given `jlpt` level\n    #[inline]\n    pub fn by_jlpt<'b>(&'b self, jlpt: u8) -> impl Iterator<Item = &'a Sentence> + 'b {\n        self.storage\n            .jlpt_map\n            .get(&jlpt)\n            .into_iter()\n            .flatten()\n            .filter_map(move |i| self.by_id(*i))\n    }\n\n    #[inline]\n    pub fn count(&self) -> usize {\n        self.storage.sentences.len()\n    }\n\n    #[inline]\n    pub fn iter(&self) -> impl Iterator<Item = &'a Sentence> {\n        self.storage.sentences.iter().map(|i| i.1)\n    }\n}\n"
  },
  {
    "path": "lib/resources/src/retrieve/word.rs",
    "content": "use super::super::storage::word::WordStorage;\nuse types::jotoba::words::{misc::Misc, part_of_speech::PosSimple, Word};\n\n#[derive(Clone, Copy)]\npub struct WordRetrieve<'a> {\n    storage: &'a WordStorage,\n}\n\nimpl<'a> WordRetrieve<'a> {\n    #[inline(always)]\n    pub(crate) fn new(storage: &'a WordStorage) -> Self {\n        WordRetrieve { storage }\n    }\n\n    /// Get a word by its sequence id\n    #[inline]\n    pub fn by_sequence(&self, seq_id: u32) -> Option<&'a Word> {\n        self.storage.words.get(seq_id)\n    }\n\n    /// Returns an iterator over all words\n    #[inline]\n    pub fn iter(&self) -> impl Iterator<Item = &'a Word> {\n        self.storage.words.iter().map(|i| i.1)\n    }\n\n    /// returns an iterator over all katakana words\n    pub fn katakana<'b>(&'b self) -> impl Iterator<Item = &'a Word> + 'b + DoubleEndedIterator {\n        self.storage\n            .katakana_words\n            .iter()\n            .copied()\n            .filter_map(|seq| self.by_sequence(seq))\n    }\n\n    /// returns an iterator over all irregular ichidan words\n    pub fn irregular_ichidan<'b>(\n        &'b self,\n    ) -> impl Iterator<Item = &'a Word> + 'b + DoubleEndedIterator {\n        self.storage\n            .irregular_ichidan\n            .iter()\n            .copied()\n            .filter_map(|seq| self.by_sequence(seq))\n    }\n\n    /// Returns the amount of irregular ichidan words that have been indexed\n    #[inline]\n    pub fn irregular_ichidan_len(&self) -> usize {\n        self.storage.irregular_ichidan.len()\n    }\n\n    /// Returns the amount of katakana words that have been indexed\n    #[inline]\n    pub fn katakana_len(&self) -> usize {\n        self.storage.katakana_words.len()\n    }\n\n    /// Returns an iterator over all words with given `jlpt` level\n    #[inline]\n    pub fn by_jlpt<'b>(\n        &'b self,\n        jlpt: u8,\n    ) -> impl Iterator<Item = &'a Word> + 'b + DoubleEndedIterator {\n        self.storage\n            .jlpt_word_map\n            .get(&jlpt)\n            .into_iter()\n            .flatten()\n            .filter_map(move |i| self.by_sequence(*i))\n    }\n\n    /// Returns the amount of words indexed for given jlpt level\n    #[inline]\n    pub fn jlpt_len(&self, jlpt: u8) -> Option<usize> {\n        self.storage.jlpt_word_map.get(&jlpt).map(|i| i.len())\n    }\n\n    /// Returns an iterator over all words with given `misc`\n    #[inline]\n    pub fn by_pos_simple<'b>(\n        &'b self,\n        pos: PosSimple,\n    ) -> impl Iterator<Item = &'a Word> + 'b + DoubleEndedIterator {\n        self.storage\n            .pos_map\n            .get(&(pos as u8))\n            .into_iter()\n            .flatten()\n            .filter_map(move |i| self.by_sequence(*i))\n    }\n\n    /// Returns the amount of words indexed for `pos`\n    #[inline]\n    pub fn pos_simple_len(&self, pos: &PosSimple) -> Option<usize> {\n        self.storage.pos_map.get(&(*pos as u8)).map(|i| i.len())\n    }\n\n    /// Returns an iterator over all words with given `misc`\n    #[inline]\n    pub fn by_misc<'b>(\n        &'b self,\n        misc: Misc,\n    ) -> impl Iterator<Item = &'a Word> + 'b + DoubleEndedIterator {\n        self.storage\n            .misc_map\n            .get(&(misc as u8))\n            .into_iter()\n            .flatten()\n            .filter_map(move |i| self.by_sequence(*i))\n    }\n\n    /// Returns the amount of words indexed for misc\n    #[inline]\n    pub fn misc_len(&self, misc: &Misc) -> Option<usize> {\n        self.storage.misc_map.get(&(*misc as u8)).map(|i| i.len())\n    }\n\n    /// Returns the total count of words\n    #[inline]\n    pub fn count(&self) -> usize {\n        self.storage.count()\n    }\n}\n"
  },
  {
    "path": "lib/resources/src/storage/feature.rs",
    "content": "use strum::{EnumIter, IntoEnumIterator};\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug, EnumIter)]\npub enum Feature {\n    // ----- Basic ones -----\n    Words,\n    Sentences,\n    Names,\n    Kanji,\n\n    /// RadicalToKanji\n    RadicalKanjiMap,\n\n    /// DetailedRadicals\n    RadicalData,\n\n    // ----- Other ------\n\n    // Sentences\n    SentenceJLPT,\n    SentenceTags,\n\n    // Words\n    WordIrregularIchidan,\n    WordKatakana,\n    WordPitch,\n    SentenceAvailable,\n    WordJlpt,\n\n    // Kanji\n    GenkiTags,\n    SimilarKanji,\n    KanjiDecompositions,\n}\n\nimpl Feature {\n    pub fn all() -> Vec<Feature> {\n        Feature::iter().collect()\n    }\n}\n"
  },
  {
    "path": "lib/resources/src/storage/kanji.rs",
    "content": "use ids_parser::IDS;\nuse serde::{Deserialize, Serialize};\nuse std::collections::HashMap;\nuse types::jotoba::kanji::{radical::DetailedRadical, Kanji};\n\nuse super::feature::Feature;\n\n/// Storage containing all data related to kanji\n#[derive(Serialize, Deserialize, Default, Clone)]\npub struct KanjiStorage {\n    /// Index mapping kanji literals to `Kanji` data\n    pub literal_index: intmap::IntMap<Kanji>,\n\n    /// Mapping from a radical to a list of kanji using this radical\n    pub radical_map: HashMap<char, Vec<char>>,\n\n    /// Maps radical literal to its detailed radical data\n    pub radical_data: HashMap<char, DetailedRadical>,\n\n    /// Jlpt mapping for kanji\n    pub jlpt_data: HashMap<u8, Vec<char>>,\n\n    // Search tags\n    pub genki_levels: HashMap<u8, Vec<char>>,\n\n    /// IDS index for kanji decomposition graph\n    pub ids_index: HashMap<char, IDS>,\n\n    has_similar_kanji: bool,\n}\n\nimpl KanjiStorage {\n    pub fn new() -> Self {\n        Self::default()\n    }\n\n    /// Insert kanji into the KanjiStorage\n    pub fn insert_kanji(&mut self, kanji: Vec<Kanji>) {\n        self.literal_index.clear();\n        self.jlpt_data.clear();\n\n        for kanji in kanji {\n            if let Some(jlpt) = kanji.jlpt {\n                self.jlpt_data.entry(jlpt).or_default().push(kanji.literal);\n            }\n\n            if !self.has_similar_kanji && !kanji.similar_kanji.is_empty() {\n                self.has_similar_kanji = true;\n            }\n            self.literal_index.insert(kanji.literal as u32, kanji);\n        }\n    }\n\n    /// Insert radical detail data\n    pub fn insert_radicals(&mut self, radicals: Vec<DetailedRadical>) {\n        self.radical_data.clear();\n        for radical in radicals {\n            self.radical_data.insert(radical.literal, radical);\n        }\n    }\n\n    pub fn get_features(&self) -> Vec<Feature> {\n        let mut out = vec![];\n\n        if !self.literal_index.is_empty() {\n            out.push(Feature::Kanji);\n        }\n\n        if !self.genki_levels.is_empty() {\n            out.push(Feature::GenkiTags);\n        }\n\n        if !self.radical_data.is_empty() {\n            out.push(Feature::RadicalData);\n        }\n\n        if !self.radical_map.is_empty() {\n            out.push(Feature::RadicalKanjiMap);\n        }\n\n        if self.has_similar_kanji {\n            out.push(Feature::SimilarKanji);\n        }\n\n        if !self.ids_index.is_empty() {\n            out.push(Feature::KanjiDecompositions);\n        }\n\n        out\n    }\n}\n"
  },
  {
    "path": "lib/resources/src/storage/mod.rs",
    "content": "pub mod feature;\npub mod kanji;\npub mod name;\npub mod sentence;\npub mod word;\n\nuse super::retrieve::{\n    kanji::KanjiRetrieve, name::NameRetrieve, sentence::SentenceRetrieve, word::WordRetrieve,\n};\n\nuse self::{\n    feature::Feature, kanji::KanjiStorage, name::NameStorage, sentence::SentenceStorage,\n    word::WordStorage,\n};\nuse serde::{Deserialize, Serialize};\nuse strum::IntoEnumIterator;\n\n/// Storage holding all data of Jotoba\n#[derive(Serialize, Deserialize, Default, Clone)]\npub struct ResourceStorage {\n    pub words: WordStorage,\n    pub kanji: KanjiStorage,\n    pub names: NameStorage,\n    pub sentences: SentenceStorage,\n}\n\nimpl ResourceStorage {\n    /// Create a new empty `ResourceStorage`\n    pub fn new() -> Self {\n        Self::default()\n    }\n\n    /// Returns `true` if all necessary features are present\n    pub fn check(&self) -> bool {\n        self.missing_but_required().is_empty()\n    }\n\n    pub fn missing_but_required(&self) -> Vec<Feature> {\n        let missing = self.missing_features();\n        let mut out = vec![];\n\n        for req_feature in super::REQUIRED_FEATURES {\n            if missing.contains(req_feature) {\n                out.push(*req_feature);\n            }\n        }\n\n        out\n    }\n\n    /// Returns a list of features that are missing but required\n    pub fn missing_features(&self) -> Vec<Feature> {\n        let features = self.get_features();\n\n        let mut missing = vec![];\n\n        for feature in Feature::iter() {\n            if !features.contains(&feature) {\n                missing.push(feature);\n            }\n        }\n\n        missing\n    }\n\n    /// Returns `true` if ResourceStorage has the given feature\n    #[inline]\n    pub fn has_feature(&self, feature: Feature) -> bool {\n        self.get_features().contains(&feature)\n    }\n\n    /// Returns a list of all features of the ResourceStorage's data\n    pub fn get_features(&self) -> Vec<Feature> {\n        let mut out = vec![];\n        out.extend(self.words.get_features());\n        out.extend(self.kanji.get_features());\n        out.extend(self.names.get_features());\n        out.extend(self.sentences.get_features());\n        out\n    }\n}\n\n// Retrieve functions\n// `ResourceStorage::check` is supposed to be called at the begininng to ensure\n// those fields are not unset\nimpl ResourceStorage {\n    /// Get a reference to the resource storage's words.\n    #[inline(always)]\n    pub fn words<'a>(&'a self) -> WordRetrieve<'a> {\n        WordRetrieve::new(&self.words)\n    }\n\n    /// Get a reference to the resource storage's kanji.\n    #[inline(always)]\n    pub fn kanji(&self) -> KanjiRetrieve {\n        KanjiRetrieve::new(&self.kanji)\n    }\n\n    /// Get a reference to the resource storage's names.\n    #[inline(always)]\n    pub fn names(&self) -> NameRetrieve {\n        NameRetrieve::new(&self.names)\n    }\n\n    /// Get a reference to the resource storage's sentences.\n    #[inline(always)]\n    pub fn sentences(&self) -> SentenceRetrieve {\n        SentenceRetrieve::new(&self.sentences)\n    }\n}\n"
  },
  {
    "path": "lib/resources/src/storage/name.rs",
    "content": "use std::collections::HashMap;\n\nuse super::feature::Feature;\nuse serde::{Deserialize, Serialize};\nuse types::jotoba::names::Name;\n\n/// Storage containing all data related to names\n#[derive(Serialize, Deserialize, Default, Clone)]\npub struct NameStorage {\n    /// Index mapping name id to its `Name` value\n    pub names: HashMap<u32, Name>,\n}\n\nimpl NameStorage {\n    pub fn new() -> Self {\n        Self::default()\n    }\n\n    /// Insert names into the NameStorage\n    pub fn insert_names(&mut self, names: Vec<Name>) {\n        self.names.clear();\n\n        for name in names {\n            self.names.insert(name.sequence, name);\n        }\n    }\n\n    pub fn get_features(&self) -> Vec<Feature> {\n        let mut out = vec![];\n        if !self.names.is_empty() {\n            out.push(Feature::Names);\n        }\n        out\n    }\n}\n"
  },
  {
    "path": "lib/resources/src/storage/sentence.rs",
    "content": "use super::feature::Feature;\nuse intmap::IntMap;\nuse serde::{Deserialize, Serialize};\nuse std::collections::HashMap;\nuse types::jotoba::sentences::{tag::Tag, Sentence};\n\n/// Storage for sentence related data\n#[derive(Serialize, Deserialize, Clone, Default)]\npub struct SentenceStorage {\n    /// Mapping sentence by its ID\n    pub sentences: IntMap<Sentence>,\n\n    /// Mappings of tags to sentences with this tag\n    pub tag_map: HashMap<Tag, Vec<u32>>,\n\n    // Search tags\n    pub jlpt_map: HashMap<u8, Vec<u32>>,\n}\n\nimpl SentenceStorage {\n    pub fn new() -> Self {\n        Self::default()\n    }\n\n    pub fn get_features(&self) -> Vec<Feature> {\n        let mut out = vec![];\n\n        if !self.sentences.is_empty() {\n            out.push(Feature::Sentences);\n        }\n\n        if !self.tag_map.is_empty() {\n            out.push(Feature::SentenceTags);\n        }\n\n        if !self.jlpt_map.is_empty() {\n            out.push(Feature::SentenceJLPT);\n        }\n\n        out\n    }\n}\n"
  },
  {
    "path": "lib/resources/src/storage/word.rs",
    "content": "use intmap::IntMap;\nuse serde::{Deserialize, Serialize};\nuse std::collections::HashMap;\nuse types::jotoba::words::Word;\n\nuse super::feature::Feature;\n\n/// Storage containing all data related to words\n#[derive(Serialize, Deserialize, Default, Clone)]\npub struct WordStorage {\n    /// Word index\n    pub words: IntMap<Word>,\n\n    // Search tags\n    pub jlpt_word_map: HashMap<u8, Vec<u32>>,\n    pub irregular_ichidan: Vec<u32>,\n    pub pos_map: HashMap<u8, Vec<u32>>,\n    pub misc_map: HashMap<u8, Vec<u32>>,\n    pub katakana_words: Vec<u32>,\n\n    // Feature information\n    has_accents: bool,\n    has_sentence_mapping: bool,\n    has_jlpt: bool,\n}\n\nimpl WordStorage {\n    pub fn new() -> Self {\n        Self::default()\n    }\n\n    /// Returns the amounot of words in the WordStorage\n    #[inline]\n    pub fn count(&self) -> usize {\n        self.words.len()\n    }\n\n    /// Inserts words into the WordStorage\n    pub fn insert_words(&mut self, words: Vec<Word>) {\n        self.clear_words();\n\n        for word in words {\n            if let Some(jlpt) = word.get_jlpt_lvl() {\n                self.jlpt_word_map\n                    .entry(jlpt)\n                    .or_default()\n                    .push(word.sequence);\n                self.has_jlpt = true;\n            }\n\n            if !self.has_accents && word.accents.count() > 0 {\n                self.has_accents = true;\n            }\n\n            self.words.insert(word.sequence, word);\n        }\n\n        for (_, v) in self.jlpt_word_map.iter_mut() {\n            v.sort();\n        }\n    }\n\n    pub fn update_sentence_mapping(&mut self) {\n        self.has_sentence_mapping = self.words.iter().any(|i| i.1.sentences_available > 0);\n    }\n\n    pub fn get_features(&self) -> Vec<Feature> {\n        let mut out = vec![];\n\n        if !self.words.is_empty() {\n            out.push(Feature::Words);\n        }\n\n        if !self.irregular_ichidan.is_empty() {\n            out.push(Feature::WordIrregularIchidan);\n        }\n\n        if !self.katakana_words.is_empty() {\n            out.push(Feature::WordKatakana);\n        }\n\n        if self.has_sentence_mapping {\n            out.push(Feature::SentenceAvailable);\n        }\n\n        if self.has_accents {\n            out.push(Feature::WordPitch);\n        }\n\n        if self.has_jlpt {\n            out.push(Feature::WordJlpt);\n        }\n\n        out\n    }\n\n    fn clear_words(&mut self) {\n        self.words.clear();\n        self.jlpt_word_map.clear();\n        self.has_accents = false;\n        self.has_sentence_mapping = false;\n    }\n}\n"
  },
  {
    "path": "lib/search/Cargo.toml",
    "content": "[package]\nname = \"search\"\nversion = \"0.1.0\"\nauthors = [\"jojii <jojii@gmx.net>\"]\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\njapanese = { path = \"../japanese\" }\nengine = { path = \"../engine\" }\nsentence_reader = { path = \"../sentence_reader\", features = [\"jotoba_intern\"] }\nerror = { path = \"../error\" }\nutils = { path = \"../utils\" }\nindexes = { path = \"../indexes\" }\nlocalization = { path = \"../localization\" }\nresources = { path = \"../resources\" }\ntypes = { path = \"../types\", default-features = false, features = [\n  \"jotoba_intern\",\n] }\nlog = \"0.4.19\"\nitertools = \"0.11.0\"\nonce_cell = { version = \"1.18.0\", default-features = false }\nregex = { version = \"1.9.1\", features = [\"std\"], default-features = false }\npercent-encoding = \"2.3.0\"\nrayon = \"1.7.0\"\npriority_container = { git = \"https://github.com/JojiiOfficial/PrioContainer/\" }\n#priority_container = { path = \"../../../priority_container\" }\norder_struct = { git = \"https://github.com/JojiiOfficial/OrderStruct\" }\nintmap = { git = \"https://github.com/JojiiOfficial/rust-intmap\" }\n#ngindex = { path = \"../../../ngindex\" }\nngindex = { git = \"https://github.com/JojiiOfficial/ngindex\"}\n#ngindex2 = { path =\"../../../ngindex\", package = \"ngindex\" }\n#index_framework = { path = \"../../../index_framework\" }\nindex_framework = { git = \"https://github.com/WeDontPanic/index_framework\" }\n#vsm = { path = \"../../../vsm\"}\nvsm = { git = \"https://github.com/JojiiOfficial/VSM\"}\nsparse_vec = { git = \"https://github.com/JojiiOfficial/SparseVec\"}\nserde = { version = \"1.0.171\", features = [\"derive\"] }\nbincode = \"1.3.3\"\n#ngram-tools = { path = \"../../../ngram-tools\"}\nngram-tools = { git = \"https://github.com/JojiiOfficial/ngram-tools\"}\njp_utils = { git = \"https://github.com/JojiiOfficial/jp_utils\"}\njapanese-number-parser = { git = \"https://github.com/gorbit99/japanese-number-parser\"}\n\n[dev-dependencies]\ntest-case = \"3.1.0\"\n"
  },
  {
    "path": "lib/search/src/engine/mod.rs",
    "content": "pub mod names;\npub mod radical;\npub mod sentences;\npub mod words;\n"
  },
  {
    "path": "lib/search/src/engine/names/foreign.rs",
    "content": "use index_framework::{\n    retrieve::{retriever::default::DefaultRetrieve, Retrieve},\n    traits::{backend::Backend, dictionary::IndexDictionary},\n};\nuse indexes::{names::FOREIGN_NGRAM, words::native::N as NATIVE_NGRAM};\nuse ngindex::{item::IndexItem, termset::TermSet, utils::padded, NGIndex, Wordgrams};\nuse types::jotoba::{language::Language, names::Name};\n\npub struct Engine;\n\nimpl engine::Engine<'static> for Engine {\n    type B = NGIndex<FOREIGN_NGRAM, Self::Document>;\n    type DictItem = String;\n    type Document = IndexItem<u32>;\n    type Retriever = DefaultRetrieve<'static, Self::B, Self::DictItem, Self::Document>;\n    // TODO: fix NGramRetriever needing more than `limit` iterations\n    //type Retriever = NGramRetriever<'static, NATIVE_NGRAM, Self::B, Self::DictItem, Self::Document>;\n    type Output = &'static Name;\n    type Query = TermSet;\n\n    fn make_query<S: AsRef<str>>(inp: S, _: Option<Language>) -> Option<Self::Query> {\n        let fmt = format_word(inp.as_ref());\n\n        let dict = Self::get_index(None).dict();\n        let mut tids: Vec<_> = Wordgrams::new(&padded(&fmt, NATIVE_NGRAM - 1), NATIVE_NGRAM)\n            .filter_map(|i| dict.get_id(i))\n            .collect();\n        tids.sort_unstable();\n        println!(\"{tids:#?}\");\n        if tids.is_empty() {\n            return None;\n        }\n        Some(TermSet::new(tids))\n    }\n\n    #[inline]\n    fn doc_to_output(input: &Self::Document) -> Option<Vec<Self::Output>> {\n        resources::get()\n            .names()\n            .by_sequence(*input.item())\n            .map(|i| vec![i])\n    }\n\n    #[inline]\n    fn get_index(_: Option<Language>) -> &'static Self::B {\n        indexes::get().name().foreign()\n    }\n\n    #[inline]\n    fn retrieve_for(\n        query: &Self::Query,\n        _q_str: &str,\n        lang: Option<Language>,\n    ) -> Retrieve<'static, Self::B, Self::DictItem, Self::Document> {\n        Self::retrieve(lang).by_term_ids(query.iter().copied())\n    }\n}\n\n#[inline]\nfn format_word(inp: &str) -> String {\n    let mut out = String::from(inp.to_lowercase());\n    for i in \".,[]() \\t\\\"'\\\\/-;:\".chars() {\n        out = out.replace(i, \" \");\n    }\n    out.to_lowercase()\n}\n"
  },
  {
    "path": "lib/search/src/engine/names/mod.rs",
    "content": "pub mod foreign;\npub mod native;\n"
  },
  {
    "path": "lib/search/src/engine/names/native.rs",
    "content": "use index_framework::{\n    retrieve::{retriever::default::DefaultRetrieve, Retrieve},\n    traits::{backend::Backend, dictionary::IndexDictionary},\n};\nuse indexes::words::native::N as NATIVE_NGRAM;\nuse jp_utils::JapaneseExt;\nuse ngindex::{item::IndexItem, termset::TermSet, utils::padded, NGIndex, Wordgrams};\nuse types::jotoba::{language::Language, names::Name};\n\npub struct Engine;\n\nimpl engine::Engine<'static> for Engine {\n    type B = NGIndex<NATIVE_NGRAM, Self::Document>;\n    type DictItem = String;\n    type Document = IndexItem<u32>;\n    type Retriever = DefaultRetrieve<'static, Self::B, Self::DictItem, Self::Document>;\n    // TODO: fix NGramRetriever needing more than `limit` iterations\n    //type Retriever = NGramRetriever<'static, NATIVE_NGRAM, Self::B, Self::DictItem, Self::Document>;\n    type Output = &'static Name;\n    type Query = TermSet;\n\n    fn make_query<S: AsRef<str>>(inp: S, _: Option<Language>) -> Option<Self::Query> {\n        let dict = Self::get_index(None).dict();\n        let mut tids: Vec<_> =\n            Wordgrams::new(&padded(inp.as_ref(), NATIVE_NGRAM - 1), NATIVE_NGRAM)\n                .filter_map(|i| dict.get_id(i))\n                .collect();\n        tids.sort_unstable();\n        if tids.is_empty() {\n            return None;\n        }\n        Some(TermSet::new(tids))\n    }\n\n    #[inline]\n    fn doc_to_output(input: &Self::Document) -> Option<Vec<Self::Output>> {\n        resources::get()\n            .names()\n            .by_sequence(*input.item())\n            .map(|i| vec![i])\n    }\n\n    #[inline]\n    fn get_index(_: Option<Language>) -> &'static Self::B {\n        indexes::get().name().native()\n    }\n\n    #[inline]\n    fn retrieve_for(\n        query: &Self::Query,\n        _q_str: &str,\n        lang: Option<Language>,\n    ) -> Retrieve<'static, Self::B, Self::DictItem, Self::Document> {\n        let search_in;\n\n        if _q_str.is_kanji() {\n            search_in = 2;\n        } else if _q_str.has_kanji() {\n            search_in = 1;\n        } else {\n            search_in = 0;\n        }\n\n        Self::retrieve(lang)\n            .by_term_ids(query.iter().copied())\n            .in_posting(search_in)\n    }\n}\n"
  },
  {
    "path": "lib/search/src/engine/radical/mod.rs",
    "content": "use types::jotoba::kanji::radical::SearchRadicalInfo;\n\n/// Finds Radicals by its meaning(s). If `query_str` was not found\n/// as meaning of an radical, similar meanings are being searched\n/// and added to the result\npub fn find(query_str: &str) -> Vec<&'static SearchRadicalInfo> {\n    let mut queries = vec![query_str];\n\n    let meaning_index = indexes::get().radical().meaning_index();\n\n    if !meaning_index.has_term(query_str) {\n        add_similar(query_str, &mut queries);\n    }\n\n    queries\n        .into_iter()\n        .filter_map(|term| meaning_index.get(term))\n        .flatten()\n        .take(5)\n        .collect()\n}\n\n/// Adds meanings of radicals with similar meaning as `query_str` to `out`\nfn add_similar(query_str: &str, out: &mut Vec<&str>) {\n    let meaning_index = indexes::get().radical().meaning_index();\n\n    // Search term in meanings\n    let mut found = meaning_index.term_tree.find(&query_str.to_string(), 2);\n\n    // Show more similar terms above\n    found.sort_by(|a, b| a.1.cmp(&b.1).reverse());\n\n    // Assign `queries` to a new vec because it can only contain in index existing terms\n    out.extend(found.into_iter().take(3).map(|i| i.0.as_str()));\n}\n"
  },
  {
    "path": "lib/search/src/engine/sentences/foreign.rs",
    "content": "use index_framework::{\n    backend::memory::{\n        dict::default::Dictionary, postings::compressed::Postings, storage::default::Storage,\n        MemBackend,\n    },\n    retrieve::retriever::default::DefaultRetrieve,\n    traits::{backend::Backend, dictionary::IndexDictionary},\n};\nuse sparse_vec::{SpVec32, VecExt};\nuse types::jotoba::{language::Language, sentences::Sentence};\nuse vsm::{dict_term::DictTerm, doc_vec::DocVector};\n\npub struct Engine {}\n\nimpl engine::Engine<'static> for Engine {\n    type B = MemBackend<\n        DictTerm,\n        DocVector<u32>,\n        Dictionary<DictTerm>,\n        Storage<DocVector<u32>>,\n        Postings,\n    >;\n    type DictItem = DictTerm;\n    type Document = DocVector<u32>;\n    type Retriever = DefaultRetrieve<'static, Self::B, Self::DictItem, Self::Document>;\n    type Output = &'static Sentence;\n    type Query = SpVec32;\n\n    fn make_query<S: AsRef<str>>(inp: S, _lang: Option<Language>) -> Option<Self::Query> {\n        let query = inp.as_ref();\n\n        let mut terms = all_terms(&query.to_lowercase());\n        terms.push(query.to_string().to_lowercase());\n\n        let index = Self::get_index(None);\n\n        let term_ids = terms\n            .into_iter()\n            .filter_map(|i| index.dict().get_id(&i))\n            .map(|id| (id, 1.0));\n        let vec = SpVec32::create_new_raw(term_ids);\n        (!vec.is_empty()).then(|| vec)\n    }\n\n    #[inline]\n    fn doc_to_output(input: &Self::Document) -> Option<Vec<Self::Output>> {\n        resources::get()\n            .sentences()\n            .by_id(*input.document())\n            .map(|i| vec![i])\n    }\n\n    #[inline]\n    fn get_index(_lang: Option<Language>) -> &'static Self::B {\n        indexes::get().sentence().foreign()\n    }\n\n    #[inline]\n    fn retrieve_for(\n        inp: &Self::Query,\n        _query_str: &str,\n        lang: Option<Language>,\n    ) -> index_framework::retrieve::Retrieve<'static, Self::B, Self::DictItem, Self::Document> {\n        let term_iter = inp.dimensions().map(|i| i as u32);\n        Self::retrieve(lang)\n            .by_term_ids(term_iter)\n            .in_posting(lang.unwrap() as u32)\n    }\n}\n\n/// Splits a string into all its terms.\n///\n/// # Example\n/// \"make some coffee\" => vec![\"make\",\"some\",\"coffee\"];\npub(crate) fn all_terms(i: &str) -> Vec<String> {\n    i.split(' ')\n        .map(|i| {\n            format_word(i)\n                .split(' ')\n                .map(|i| i.to_lowercase())\n                .filter(|i| !i.is_empty())\n                .collect::<Vec<_>>()\n        })\n        .flatten()\n        .collect()\n}\n\n/// Replaces all special characters into spaces so we can split it down into words\nfn format_word(inp: &str) -> String {\n    let mut out = String::from(inp);\n    for i in \".,[]() \\t\\\"'\\\\/-;:\".chars() {\n        out = out.replace(i, \" \");\n    }\n    out\n}\n"
  },
  {
    "path": "lib/search/src/engine/sentences/mod.rs",
    "content": "pub mod foreign;\npub mod native;\n"
  },
  {
    "path": "lib/search/src/engine/sentences/native.rs",
    "content": "use index_framework::{\n    backend::memory::{\n        dict::default::Dictionary, postings::compressed::Postings, storage::default::Storage,\n        MemBackend,\n    },\n    retrieve::retriever::default::DefaultRetrieve,\n    traits::{backend::Backend, dictionary::IndexDictionary},\n};\nuse jp_utils::JapaneseExt;\nuse sentence_reader::output::ParseResult;\nuse sparse_vec::{SpVec32, VecExt};\nuse std::collections::HashSet;\nuse types::jotoba::{language::Language, sentences::Sentence};\nuse vsm::{dict_term::DictTerm, doc_vec::DocVector};\n\npub struct Engine {}\n\nimpl engine::Engine<'static> for Engine {\n    type B = MemBackend<\n        DictTerm,\n        DocVector<u32>,\n        Dictionary<DictTerm>,\n        Storage<DocVector<u32>>,\n        Postings,\n    >;\n    type DictItem = DictTerm;\n    type Document = DocVector<u32>;\n    type Retriever = DefaultRetrieve<'static, Self::B, Self::DictItem, Self::Document>;\n    type Output = &'static Sentence;\n    type Query = SpVec32;\n\n    fn make_query<S: AsRef<str>>(inp: S, _lang: Option<Language>) -> Option<Self::Query> {\n        let mut terms: HashSet<String> = HashSet::new();\n\n        let dict = Self::get_index(None).dict();\n\n        let query = inp.as_ref();\n\n        if dict.get_id(query).is_some() {\n            terms.insert(query.to_string());\n        } else {\n            match sentence_reader::Parser::new(query).parse() {\n                ParseResult::Sentence(s) => {\n                    terms.extend(s.iter().map(|i| i.get_inflected()));\n                    terms.extend(s.iter().map(|i| i.get_normalized()));\n                }\n                ParseResult::InflectedWord(w) => {\n                    let infl = w.get_inflected();\n                    //println!(\"inflected: {infl:?}: {:?}\", dict.get_id(&infl));\n                    if dict.get_id(&infl).is_some() {\n                        terms.insert(infl);\n                    } else {\n                        terms.insert(w.get_normalized());\n                    }\n                }\n                ParseResult::None => (),\n            };\n        }\n\n        //terms.retain(|w| !index.is_stopword_cust(&w, 10.0).unwrap_or(true));\n\n        let terms = terms.into_iter().map(|i| format_query(&i)).filter_map(|i| {\n            let id = dict.get_id(&i);\n            //let term = dict.get_term(id).unwrap();\n            Some((id?, 1.0))\n        });\n\n        let vec = SpVec32::create_new_raw(terms);\n        (!vec.is_empty()).then(|| vec)\n    }\n\n    #[inline]\n    fn doc_to_output(input: &Self::Document) -> Option<Vec<Self::Output>> {\n        resources::get()\n            .sentences()\n            .by_id(*input.document())\n            .map(|i| vec![i])\n    }\n\n    #[inline]\n    fn get_index(_lang: Option<Language>) -> &'static Self::B {\n        indexes::get().sentence().native()\n    }\n\n    #[inline]\n    fn retrieve_for(\n        inp: &Self::Query,\n        _query_str: &str,\n        lang: Option<Language>,\n    ) -> index_framework::retrieve::Retrieve<'static, Self::B, Self::DictItem, Self::Document> {\n        let term_iter = inp.dimensions().map(|i| i as u32);\n        if let Some(lang) = lang {\n            Self::retrieve(Some(lang))\n                .by_term_ids(term_iter)\n                .in_posting(lang as u32)\n        } else {\n            let langs = Language::iter_word().map(|i| i as u32);\n            Self::retrieve(None)\n                .by_term_ids(term_iter)\n                .in_postings(langs)\n        }\n    }\n}\n\n#[inline]\nfn format_query(inp: &str) -> String {\n    inp.to_halfwidth()\n}\n"
  },
  {
    "path": "lib/search/src/engine/words/foreign.rs",
    "content": "use index_framework::{\n    backend::memory::{\n        dict::default::Dictionary, postings::compressed::Postings, storage::default::Storage,\n        MemBackend,\n    },\n    retrieve::{retriever::default::DefaultRetrieve, Retrieve},\n    traits::{backend::Backend, dictionary::IndexDictionary},\n};\n\nuse once_cell::sync::Lazy;\nuse regex::Regex;\nuse sparse_vec::{SpVec32, VecExt};\nuse types::jotoba::{language::Language, words::Word};\nuse vsm::{dict_term::DictTerm, doc_vec::DocVector};\n\npub struct Engine;\n\nconst FORMAT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(\"^to \").unwrap());\n\nimpl engine::Engine<'static> for Engine {\n    type B = MemBackend<\n        DictTerm,\n        DocVector<u32>,\n        Dictionary<DictTerm>,\n        Storage<DocVector<u32>>,\n        Postings,\n    >;\n    type DictItem = DictTerm;\n    type Document = DocVector<u32>;\n    type Retriever = DefaultRetrieve<'static, Self::B, Self::DictItem, Self::Document>;\n    type Output = &'static Word;\n    type Query = SpVec32;\n\n    fn make_query<S: AsRef<str>>(inp: S, lang: Option<Language>) -> Option<Self::Query> {\n        let query_str = format_word(inp.as_ref().trim());\n        let dict = Self::get_index(lang).dict();\n\n        let inp = FORMAT_REGEX.replace_all(&query_str, \"\").to_string();\n\n        let add_term_iter = inp\n            .split(' ')\n            .map(|i| i.trim())\n            .filter_map(|term| dict.get_id(term))\n            .map(|i| (i, 0.001));\n\n        let sparse = dict\n            .get_id(&inp)\n            .map(|i| (i, 1.0))\n            .into_iter()\n            .chain(add_term_iter);\n\n        let vec = SpVec32::create_new_raw(sparse);\n\n        if vec.is_empty() {\n            return None;\n        }\n\n        Some(vec)\n    }\n\n    #[inline]\n    fn doc_to_output(input: &Self::Document) -> Option<Vec<Self::Output>> {\n        resources::get()\n            .words()\n            .by_sequence(*input.document())\n            .map(|i| vec![i])\n    }\n\n    #[inline]\n    fn get_index(lang: Option<Language>) -> &'static Self::B {\n        indexes::get().word().foreign(lang.unwrap()).unwrap()\n    }\n\n    #[inline]\n    fn retrieve_for(\n        query: &Self::Query,\n        _q_str: &str,\n        lang: Option<Language>,\n    ) -> Retrieve<'static, Self::B, Self::DictItem, Self::Document> {\n        let term_iter = query.as_vec().iter().map(|i| i.0);\n        Self::retrieve(lang).by_term_ids(term_iter)\n    }\n}\n\n#[inline]\nfn format_word(inp: &str) -> String {\n    let mut out = String::from(inp);\n    for i in \".,[]() \\t\\\"'\\\\/-;:・\".chars() {\n        out = out.replace(i, \" \");\n    }\n    out.to_lowercase()\n}\n"
  },
  {
    "path": "lib/search/src/engine/words/mod.rs",
    "content": "pub mod foreign;\npub mod native;\n"
  },
  {
    "path": "lib/search/src/engine/words/native/k_reading.rs",
    "content": "use index_framework::retrieve::retriever::default::DefaultRetrieve;\nuse types::jotoba::language::Language;\nuse types::jotoba::words::Word;\n\npub struct Engine;\n\nimpl engine::Engine<'static> for Engine {\n    type B = indexes::kanji::reading::Index;\n    type DictItem = String;\n    type Document = u32;\n    type Retriever = DefaultRetrieve<'static, Self::B, Self::DictItem, Self::Document>;\n    type Output = &'static Word;\n    type Query = String;\n\n    fn make_query<S: AsRef<str>>(inp: S, _lang: Option<Language>) -> Option<Self::Query> {\n        Some(inp.as_ref().to_string())\n    }\n\n    #[inline]\n    fn doc_to_output(input: &Self::Document) -> Option<Vec<Self::Output>> {\n        resources::get()\n            .words()\n            .by_sequence(*input)\n            .map(|i| vec![i])\n    }\n\n    #[inline]\n    fn get_index(_lang: Option<Language>) -> &'static Self::B {\n        indexes::get().word().k_reading()\n    }\n\n    fn retrieve_for(\n        inp: &Self::Query,\n        _query_str: &str,\n        _lang: Option<Language>,\n    ) -> index_framework::retrieve::Retrieve<'static, Self::B, Self::DictItem, Self::Document> {\n        Self::retrieve(None).by_term(inp)\n    }\n}\n"
  },
  {
    "path": "lib/search/src/engine/words/native/mod.rs",
    "content": "pub mod k_reading;\npub mod regex;\n\nuse index_framework::{\n    retrieve::{retriever::default::DefaultRetrieve, Retrieve},\n    traits::{backend::Backend, dictionary::IndexDictionary},\n};\nuse indexes::words::native::N as NATIVE_NGRAM;\nuse ngindex::{item::IndexItem, termset::TermSet, utils::padded, NGIndex, Wordgrams};\nuse types::jotoba::{language::Language, words::Word};\n\npub struct Engine {}\n\nimpl engine::Engine<'static> for Engine {\n    type B = NGIndex<NATIVE_NGRAM, Self::Document>;\n    type DictItem = String;\n    type Document = IndexItem<u32>;\n    type Retriever = DefaultRetrieve<'static, Self::B, Self::DictItem, Self::Document>;\n    // TODO: fix NGramRetriever needing more than `limit` iterations\n    //type Retriever = NGramRetriever<'static, NATIVE_NGRAM, Self::B, Self::DictItem, Self::Document>;\n    type Output = &'static Word;\n    type Query = TermSet;\n\n    fn make_query<S: AsRef<str>>(inp: S, _: Option<Language>) -> Option<Self::Query> {\n        let dict = Self::get_index(None).dict();\n        let mut tids: Vec<_> =\n            Wordgrams::new(&padded(inp.as_ref(), NATIVE_NGRAM - 1), NATIVE_NGRAM)\n                .filter_map(|i| dict.get_id(i))\n                .collect();\n        tids.sort_unstable();\n        if tids.is_empty() {\n            return None;\n        }\n        Some(TermSet::new(tids))\n    }\n\n    #[inline]\n    fn doc_to_output(input: &Self::Document) -> Option<Vec<Self::Output>> {\n        resources::get()\n            .words()\n            .by_sequence(*input.item())\n            .map(|i| vec![i])\n    }\n\n    #[inline]\n    fn get_index(_: Option<Language>) -> &'static Self::B {\n        indexes::get().word().native()\n    }\n\n    #[inline]\n    fn retrieve_for(\n        query: &Self::Query,\n        _q_str: &str,\n        lang: Option<Language>,\n    ) -> Retrieve<'static, Self::B, Self::DictItem, Self::Document> {\n        Self::retrieve(lang).by_term_ids(query.iter().copied())\n    }\n}\n"
  },
  {
    "path": "lib/search/src/engine/words/native/regex.rs",
    "content": "use crate::query::regex::RegexSQuery;\nuse engine::utils::page_from_pqueue;\nuse indexes::regex::RegexSearchIndex;\nuse intmap::int_set::IntSet;\nuse itertools::Itertools;\nuse order_struct::order_nh::OrderVal;\nuse priority_container::StableUniquePrioContainerMax;\nuse types::jotoba::words::Word;\n\n/// Result of a regex search\npub struct RegexSearchResult {\n    pub items: Vec<&'static Word>,\n    // the total amount of items the search would return.\n    // This value is most likely different than items.len()\n    pub item_len: usize,\n}\n\npub fn search<F>(query: &RegexSQuery, sort: F, limit: usize, offset: usize) -> RegexSearchResult\nwhere\n    F: Fn(&Word, &str) -> usize,\n{\n    let word_resources = resources::get().words();\n\n    let queue_size = limit + offset;\n    let mut out_queue = StableUniquePrioContainerMax::new_allocated(queue_size, queue_size);\n\n    let index = indexes::get().word().regex();\n    let possible_results = find_words(index, &query.get_chars());\n\n    for seq_id in possible_results.into_iter().sorted() {\n        let word = word_resources.by_sequence(seq_id).unwrap();\n\n        let item_iter = word\n            .reading_iter(true)\n            .filter_map(|i| query.matches(&i.reading).then(|| (word, &i.reading)))\n            .map(|(word, reading)| {\n                let order = sort(word, reading);\n                OrderVal::new(word, order)\n            });\n\n        out_queue.extend(item_iter);\n    }\n\n    let item_len = out_queue.total_pushed();\n\n    let items: Vec<_> = page_from_pqueue(limit, offset, out_queue)\n        .into_iter()\n        .map(|i| i.into_inner())\n        .collect();\n\n    RegexSearchResult { items, item_len }\n}\n\n/// Get all indexed words using characters in `chars`\npub(crate) fn find_words(index: &RegexSearchIndex, chars: &[char]) -> IntSet {\n    if chars.is_empty() {\n        return IntSet::new();\n    }\n\n    let mut out = IntSet::new();\n\n    // Add words of first character to `out`\n    let mut chars_iter = chars.iter();\n\n    // We want to fill `out` with some values.\n    loop {\n        let first = match chars_iter.next() {\n            Some(f) => f,\n            None => break,\n        };\n\n        if let Some(v) = index.get_words_with(*first) {\n            out.reserve(v.len());\n            out.extend(v.iter().copied());\n            // exit first found character\n            break;\n        }\n    }\n\n    for v in chars_iter.filter_map(|c| index.get_words_with(*c)) {\n        out.retain(|i| v.contains(&i));\n        if out.is_empty() {\n            return IntSet::new();\n        }\n    }\n\n    out\n}\n"
  },
  {
    "path": "lib/search/src/executor/mod.rs",
    "content": "pub mod out_builder;\npub mod producer;\npub mod search_result;\npub mod searchable;\n\nuse std::time::Instant;\n\nuse engine::{pushable::FilteredMaxCounter, utils::page_from_pqueue_with_max_dist};\nuse log::debug;\nuse out_builder::OutputBuilder;\nuse search_result::SearchResult;\nuse searchable::Searchable;\nuse types::jotoba::search::guess::{Guess, GuessType};\n\nuse crate::executor::out_builder::OutputAddable;\n\n/// Max items to count for estimation\npub const MAX_ESTIMATE: usize = 100;\n\n/// Executes a search\npub struct SearchExecutor<S: Searchable> {\n    search: S,\n}\n\nimpl<S: Searchable> SearchExecutor<S> {\n    /// Creates a new SearchExecutor\n    #[inline]\n    pub fn new(search: S) -> Self {\n        Self { search }\n    }\n\n    /// Executes the search\n    pub fn run(self) -> SearchResult<S::OutItem, S::ResAdd> {\n        let query = self.search.get_query();\n        let limit = query.settings.page_size as usize;\n        let offset = query.page_offset;\n\n        let mut out = OutputBuilder::new(|i| self.search.filter(i), limit + offset);\n\n        for prod in self.search.get_producer() {\n            if !prod.should_run(out.p.total_pushed()) {\n                continue;\n            }\n            let before = out.p.total_pushed();\n            let start = Instant::now();\n            prod.produce(&mut out);\n            let dur = start.elapsed();\n            let after = out.p.total_pushed();\n            let name = prod.name();\n            debug!(\"{name}: {} Elements in {:?}\", after - before, dur);\n        }\n\n        self.search.mod_output(&mut out);\n\n        if out.is_empty() && out.output_add.is_empty() {\n            let mut res = SearchResult::default();\n            res.other_data = out.output_add;\n            return res;\n        }\n\n        // Get total len of results\n        let len;\n        if let Some(max_top_dist) = self.search.max_top_dist() {\n            len = out\n                .rel_list\n                .iter()\n                .filter(|i| **i + max_top_dist >= out.max)\n                .count();\n        } else {\n            len = out.p.total_pushed();\n        }\n        assert_eq!(out.p.total_pushed(), out.rel_list.len());\n\n        let max_top_dist = self.search.max_top_dist().unwrap_or(0.0);\n        let items: Vec<_> =\n            page_from_pqueue_with_max_dist(limit, offset, max_top_dist, out.max, out.p)\n                .into_iter()\n                .map(|i| self.search.to_output_item(i.item))\n                .collect();\n\n        SearchResult::with_other_data(items, len, out.output_add)\n    }\n\n    pub fn guess(&self) -> Option<Guess> {\n        let start = Instant::now();\n\n        let mut counter =\n            FilteredMaxCounter::<S::Item>::new(MAX_ESTIMATE + 1, |i| self.search.filter(i));\n\n        // Keep track of real count to give `should_run` a correct value\n        let mut c = 0;\n        for prod in self.search.get_producer() {\n            if !prod.should_run(c) {\n                continue;\n            }\n\n            let old_counter = counter.val();\n            prod.estimate_to(&mut counter);\n\n            // Add たった今数えた量 to `c`\n            c += counter.val() - old_counter;\n\n            if counter.is_full() {\n                break;\n            }\n        }\n\n        let sum = counter.val();\n\n        let gt;\n        if sum > MAX_ESTIMATE {\n            gt = GuessType::MoreThan;\n        } else {\n            gt = GuessType::Accurate;\n        }\n\n        debug!(\"Guessing took: {:?}\", start.elapsed());\n        Some(Guess::new(sum.min(MAX_ESTIMATE) as u32, gt))\n    }\n}\n"
  },
  {
    "path": "lib/search/src/executor/out_builder.rs",
    "content": "use engine::{pushable::Pushable, relevance::item::RelItem};\nuse priority_container::StableUniquePrioContainerMax;\nuse std::hash::Hash;\n\npub struct OutputBuilder<'a, I, OA> {\n    pub(crate) p: StableUniquePrioContainerMax<RelItem<I>>,\n    pub(crate) filter: Box<dyn Fn(&I) -> bool + 'a>,\n    pub(crate) output_add: OA,\n    pub(crate) rel_list: Vec<f32>,\n    pub(crate) max: f32,\n}\n\nimpl<'a, I: Eq + Hash + Clone, OA: OutputAddable> OutputBuilder<'a, I, OA> {\n    #[inline]\n    pub(crate) fn new<F: Fn(&I) -> bool + 'a>(filter: F, len: usize) -> Self {\n        Self {\n            p: StableUniquePrioContainerMax::new(len),\n            filter: Box::new(filter),\n            output_add: OA::default(),\n            rel_list: vec![],\n            max: 0.0,\n        }\n    }\n\n    #[inline]\n    pub fn len(&self) -> usize {\n        self.p.len()\n    }\n\n    #[inline]\n    pub fn is_empty(&self) -> bool {\n        self.p.is_empty()\n    }\n\n    /// Pushes an element into the output and  returns `true` if it was not filtered out\n    #[inline]\n    pub fn push(&mut self, item: RelItem<I>) -> bool {\n        if !(self.filter)(&item.item) {\n            if self.max < item.relevance {\n                self.max = item.relevance;\n            }\n\n            let rel = item.relevance;\n            if self.p.insert(item) {\n                self.rel_list.push(rel);\n            }\n\n            return true;\n        }\n\n        false\n    }\n}\n\nimpl<'a, I: Eq + Hash + Clone, OA: OutputAddable> Pushable for OutputBuilder<'a, I, OA> {\n    type Item = RelItem<I>;\n\n    /// Pushes an element into the output and  returns `true` if it was not filtered out\n    #[inline]\n    fn push(&mut self, i: Self::Item) -> bool {\n        self.push(i)\n    }\n}\n\npub trait OutputAddable: Default {\n    #[inline]\n    fn is_empty(&self) -> bool {\n        false\n    }\n}\n\nimpl OutputAddable for () {}\n"
  },
  {
    "path": "lib/search/src/executor/producer.rs",
    "content": "use super::{out_builder::OutputBuilder, searchable::Searchable};\nuse engine::pushable::FilteredMaxCounter;\nuse std::any::type_name;\n\npub trait Producer {\n    type Target: Searchable;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    );\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        true\n    }\n\n    fn estimate_to(&self, _out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {}\n\n    fn name(&self) -> String {\n        format_debug_name::<Self>()\n    }\n}\n\nfn format_debug_name<T: ?Sized>() -> String {\n    let mut name = type_name::<T>().to_string();\n\n    // Strip module name\n    let start_pos = name\n        .char_indices()\n        .rev()\n        .find(|i| i.1 == ':')\n        .map(|i| i.0 + 1)\n        .unwrap_or(0);\n    name.replace_range(0..start_pos, \"\");\n\n    name\n}\n"
  },
  {
    "path": "lib/search/src/executor/search_result.rs",
    "content": "use std::ops::Deref;\n\n/// The final result of a search\n#[derive(Clone, Debug)]\npub struct SearchResult<T, O = ()> {\n    pub items: Vec<T>,\n    pub total: usize,\n    pub other_data: O,\n}\n\nimpl<T> SearchResult<T, ()> {\n    /// Creates a new SearchResult from a vec\n    #[inline]\n    pub fn from_vec(items: Vec<T>) -> Self {\n        let total = items.len();\n        Self {\n            items,\n            total,\n            other_data: (),\n        }\n    }\n\n    /// Creates a new search result\n    #[inline]\n    pub fn new(items: Vec<T>, total: usize) -> Self {\n        Self {\n            items,\n            total,\n            other_data: (),\n        }\n    }\n}\n\nimpl<T, O> SearchResult<T, O> {\n    /// Creates a new search result\n    #[inline]\n    pub fn with_other_data(items: Vec<T>, total: usize, other_data: O) -> Self {\n        Self {\n            items,\n            total,\n            other_data,\n        }\n    }\n\n    #[inline]\n    pub fn len(&self) -> usize {\n        self.items.len()\n    }\n\n    #[inline]\n    pub fn is_empty(&self) -> bool {\n        self.len() == 0\n    }\n\n    #[inline]\n    pub fn iter(&self) -> impl Iterator<Item = &T> {\n        self.items.iter()\n    }\n}\n\nimpl<T, O: Default> SearchResult<T, O> {\n    /// Creates a new search result\n    pub fn with_other_default(items: Vec<T>, total: usize) -> Self {\n        Self {\n            items,\n            total,\n            other_data: O::default(),\n        }\n    }\n}\n\nimpl<T, O> Default for SearchResult<T, O>\nwhere\n    O: Default,\n{\n    #[inline]\n    fn default() -> Self {\n        Self {\n            items: vec![],\n            total: 0,\n            other_data: O::default(),\n        }\n    }\n}\n\nimpl<T, O> Deref for SearchResult<T, O> {\n    type Target = O;\n\n    #[inline]\n    fn deref(&self) -> &Self::Target {\n        &self.other_data\n    }\n}\n"
  },
  {
    "path": "lib/search/src/executor/searchable.rs",
    "content": "use super::{\n    out_builder::{OutputAddable, OutputBuilder},\n    producer::Producer,\n};\nuse crate::query::Query;\nuse std::{fmt::Debug, hash::Hash};\n\npub trait Searchable {\n    type Item: Clone + Eq + Hash + Debug;\n    type OutItem;\n    type ResAdd: OutputAddable;\n\n    fn get_producer<'s>(&'s self) -> &Vec<Box<dyn Producer<Target = Self> + 's>>;\n\n    fn get_query(&self) -> &Query;\n\n    /// Converts from `Self::Item` to `Self::OutputItem`\n    fn to_output_item(&self, item: Self::Item) -> Self::OutItem;\n\n    /// Allows modifying the collected producers output before converting it to a SearchResult\n    fn mod_output(&self, _out: &mut OutputBuilder<Self::Item, Self::ResAdd>) {}\n\n    /// Should return `true` if the passed item should be ignored / filtered\n    fn filter(&self, _item: &Self::Item) -> bool {\n        false\n    }\n\n    #[inline]\n    fn max_top_dist(&self) -> Option<f32> {\n        None\n    }\n}\n"
  },
  {
    "path": "lib/search/src/kanji/mod.rs",
    "content": "mod order;\npub mod result;\nmod tag_only;\n\nuse self::result::KanjiResult;\nuse super::query::Query;\nuse crate::{engine::words::native::Engine, query::QueryLang, word::order::native::NativeOrder};\nuse engine::task::SearchTask;\nuse error::Error;\nuse jp_utils::JapaneseExt;\nuse result::Item;\nuse types::jotoba::{\n    kanji::Kanji,\n    search::guess::{Guess, GuessType},\n};\n\n/// The entry of a kanji search\npub fn search(query: &Query) -> Result<KanjiResult, Error> {\n    if query.form.is_tag_only() {\n        return tag_only::search(query);\n    }\n\n    let query_str = format_query(&query.query_str);\n\n    let res = match query.q_lang {\n        QueryLang::Japanese => by_japanese_query(&query.query_str),\n        QueryLang::Korean => by_korean_reading(&query.query_str),\n        QueryLang::Foreign | QueryLang::Undetected => by_meaning(&query.query_str),\n    };\n\n    // TODO: don't use this items in v2 since compound words don't need to be loaded\n    // here anymore\n    let mut items = to_item(res, &query);\n\n    if !query_str.is_japanese() {\n        items.sort_by(order::default);\n    }\n\n    let total_len = items.len();\n\n    let page_size = query.settings.page_size as usize;\n\n    let items = items\n        .into_iter()\n        .skip(query.page_offset(page_size))\n        .take(page_size)\n        .collect::<Vec<_>>();\n\n    Ok(KanjiResult { items, total_len })\n}\n\n/// Find a kanji by its literal\nfn by_japanese_query(query: &str) -> Vec<Kanji> {\n    // Use kanji from query\n    let kanji = kanji_from_str(query);\n    if !kanji.is_empty() || query.is_kanji() {\n        return kanji;\n    }\n\n    // Do word searc with kana instead\n    kana_search(query)\n}\n\n/// Search for kanji using kana query\nfn kana_search(query: &str) -> Vec<Kanji> {\n    let q = query.to_string();\n\n    let mut search_task = SearchTask::<Engine>::new(query)\n        .with_limit(10)\n        .with_threshold(0.8)\n        .with_result_filter(move |i| i.has_reading(&q))\n        .with_custom_order(NativeOrder::new(query.to_string()));\n\n    search_task\n        .find()\n        .into_iter()\n        .map(|i| kanji_from_str(&i.get_reading().reading))\n        .flatten()\n        .take(100)\n        .collect()\n}\n\nfn by_korean_reading(query: &str) -> Vec<Kanji> {\n    resources::get()\n        .kanji()\n        .iter()\n        .filter(|k| k.korean_h.iter().any(|kw| kw == query))\n        .cloned()\n        .collect()\n}\n\n#[inline]\nfn from_char(c: char) -> Option<Kanji> {\n    resources::get().kanji().by_literal(c).cloned()\n}\n\nfn kanji_from_str(text: &str) -> Vec<Kanji> {\n    text.chars()\n        .into_iter()\n        .filter_map(|i| i.is_kanji().then(|| from_char(i)).flatten())\n        .take(100)\n        .collect()\n}\n\n/// Guesses the amount of results a search would return with given `query`\npub fn guess_result(query: &Query) -> Option<Guess> {\n    let query_str = &query.query_str;\n\n    let kanji_storage = resources::get().kanji();\n    let guess = query_str\n        .chars()\n        .into_iter()\n        .filter(|i| i.is_kanji())\n        .filter_map(|literal| kanji_storage.by_literal(literal))\n        .take(15)\n        .count();\n\n    Some(Guess::new(guess as u32, GuessType::Accurate))\n}\n\n/// Find kanji by mits meaning\nfn by_meaning(meaning: &str) -> Vec<Kanji> {\n    // TODO: implement proper algo kek\n    let meaning = meaning.to_lowercase();\n    resources::get()\n        .kanji()\n        .iter()\n        .filter(|i| i.meanings.contains(&meaning))\n        .cloned()\n        .collect::<Vec<_>>()\n}\n\n#[inline]\nfn to_item(items: Vec<Kanji>, query: &Query) -> Vec<Item> {\n    items\n        .into_iter()\n        .map(|i| Item::load_words(i, query.lang_param()))\n        .collect()\n}\n\n#[inline]\nfn format_query(query: &str) -> String {\n    query.replace(\" \", \"\").replace(\".\", \"\").trim().to_string()\n}\n"
  },
  {
    "path": "lib/search/src/kanji/order.rs",
    "content": "use super::result::Item;\nuse std::cmp::Ordering;\nuse utils::option_order;\n\n/// Order kanji results which were found by the kanjis meaning appropriately\n#[inline]\npub(crate) fn default(a: &Item, b: &Item) -> Ordering {\n    let a = &a.kanji;\n    let b = &b.kanji;\n\n    if let Some(o) = option_order(&a.grade, &b.grade) {\n        return o;\n    }\n\n    if let Some(o) = option_order(&a.frequency, &b.frequency) {\n        return o;\n    }\n\n    if let Some(o) = option_order(&a.jlpt, &b.jlpt) {\n        return o;\n    }\n\n    Ordering::Equal\n}\n"
  },
  {
    "path": "lib/search/src/kanji/result.rs",
    "content": "use std::{fs::read_to_string, path::Path};\nuse types::jotoba::{\n    kanji::Kanji,\n    language::param::AsLangParam,\n    words::{filter_languages, Word},\n};\n\n// The final result of a Kanji search\n#[derive(Default)]\npub struct KanjiResult {\n    pub items: Vec<Item>,\n    pub total_len: usize,\n}\n\n#[derive(Debug, PartialEq, Clone)]\npub struct Item {\n    pub kanji: Kanji,\n    pub kun_dicts: Option<Vec<Word>>,\n    pub on_dicts: Option<Vec<Word>>,\n    pub has_compositions: bool,\n}\n\nimpl Item {\n    pub fn load_words(k: Kanji, lang: impl AsLangParam) -> Self {\n        let kun_dicts = load_dicts(&k.kun_dicts, lang);\n        let on_dicts = load_dicts(&k.on_dicts, lang);\n\n        let has_compositions = resources::get().kanji().ids(k.literal).is_some();\n\n        Self {\n            kun_dicts,\n            on_dicts,\n            kanji: k,\n            has_compositions,\n        }\n    }\n}\n\nfn load_dicts(dicts: &Vec<u32>, lang: impl AsLangParam) -> Option<Vec<Word>> {\n    let word_storage = resources::get().words();\n    let mut words: Vec<_> = dicts\n        .iter()\n        .filter_map(|j| word_storage.by_sequence(*j))\n        .cloned()\n        .collect();\n\n    filter_languages(words.iter_mut(), lang);\n\n    if words.is_empty() {\n        return None;\n    }\n\n    Some(words)\n}\n\nimpl Item {\n    /// Returns the entries' frames (svg)\n    pub fn get_frames<P: AsRef<Path>>(&self, assets_path: P) -> Option<String> {\n        read_to_string(self.kanji.get_stroke_frames_path(assets_path)).ok()\n    }\n\n    /// Return the animation entries for the template\n    pub fn get_animation<P: AsRef<Path>>(&self, assets_path: P) -> Option<String> {\n        read_to_string(self.kanji.get_animation_path(assets_path)).ok()\n    }\n\n    /// Get a list of korean readings, formatted as: \"<Hangul> (<romanized>)\"\n    pub fn get_korean(&self) -> Option<Vec<String>> {\n        if !self.kanji.korean_r.is_empty() && !self.kanji.korean_h.is_empty() {\n            let korean_h = &self.kanji.korean_h;\n            let korean_r = &self.kanji.korean_r;\n\n            Some(\n                korean_h\n                    .iter()\n                    .zip(korean_r.iter())\n                    .map(|(h, k)| format!(\"{} ({})\", h, k))\n                    .collect(),\n            )\n        } else {\n            None\n        }\n    }\n\n    /// Returns the amount of parts a kanji is bulit with\n    #[inline]\n    pub fn get_parts_count(&self) -> usize {\n        self.kanji.parts.len()\n    }\n\n    #[inline]\n    pub fn get_radical(&self) -> String {\n        if let Some(ref alternative) = self.kanji.radical.alternative {\n            format!(\"{} ({})\", self.kanji.radical.literal, alternative)\n        } else {\n            self.kanji.radical.literal.clone().to_string()\n        }\n    }\n\n    #[inline]\n    pub fn get_rad_len(&self) -> usize {\n        self.kanji\n            .radical\n            .alternative\n            .as_ref()\n            .map(|_| 1)\n            .unwrap_or_default()\n            + self\n                .kanji\n                .radical\n                .translations\n                .as_ref()\n                .map(|i| i.join(\", \").len())\n                .unwrap_or_default()\n    }\n}\n"
  },
  {
    "path": "lib/search/src/kanji/tag_only.rs",
    "content": "use super::KanjiResult;\nuse crate::query::{tags::Tag, Query};\nuse error::Error;\n\npub fn search(query: &Query) -> Result<KanjiResult, Error> {\n    let single_tag = query.tags.iter().find(|i| i.is_producer());\n\n    if single_tag.is_none() {\n        return Ok(KanjiResult::default());\n    }\n\n    match single_tag.unwrap() {\n        Tag::Jlpt(jlpt) => jlpt_search(query, *jlpt),\n        Tag::GenkiLesson(genki_lesson) => genki_search(query, *genki_lesson),\n        _ => return Ok(KanjiResult::default()),\n    }\n}\n\nfn genki_search(query: &Query, genki_lesson: u8) -> Result<KanjiResult, Error> {\n    let kanji_retrieve = resources::get().kanji();\n\n    let genki_lesson = match kanji_retrieve.by_genki_lesson(genki_lesson) {\n        Some(gl) => gl,\n        None => return Ok(KanjiResult::default()),\n    };\n\n    let kanji = genki_lesson\n        .iter()\n        .filter_map(|literal| kanji_retrieve.by_literal(*literal))\n        .cloned()\n        .collect::<Vec<_>>();\n\n    let total_len = kanji.len();\n\n    let page_size = query.settings.page_size as usize;\n    let page_offset = query.page_offset(page_size);\n\n    let kanji = kanji\n        .into_iter()\n        .skip(page_offset)\n        .take(page_size)\n        .collect::<Vec<_>>();\n\n    let items = super::to_item(kanji, query);\n\n    Ok(KanjiResult { items, total_len })\n}\n\nfn jlpt_search(query: &Query, jlpt: u8) -> Result<KanjiResult, Error> {\n    let kanji_retrieve = resources::get().kanji();\n\n    let jlpt_kanji = match kanji_retrieve.by_jlpt(jlpt) {\n        Some(jlpt) => jlpt,\n        None => return Ok(KanjiResult::default()),\n    };\n\n    let total_len = jlpt_kanji.len();\n\n    let page_size = query.settings.page_size as usize;\n\n    let page_offset = query.page_offset(page_size);\n\n    let jlpt_kanji = jlpt_kanji\n        .into_iter()\n        .skip(page_offset)\n        .take(page_size)\n        .filter_map(|literal| kanji_retrieve.by_literal(*literal))\n        .cloned()\n        .collect::<Vec<_>>();\n\n    Ok(KanjiResult {\n        items: super::to_item(jlpt_kanji, query),\n        total_len,\n    })\n}\n"
  },
  {
    "path": "lib/search/src/lib.rs",
    "content": "pub mod engine;\npub mod executor;\npub mod kanji;\npub mod name;\npub mod query;\npub mod radical;\npub mod sentence;\npub mod word;\n\npub use executor::SearchExecutor;\n\nuse query::Query;\nuse types::jotoba::search::{help::SearchHelp, SearchTarget};\n\n/// Build a [`SearchHelp`] in for cases without any search results\npub fn build_help(querytype: SearchTarget, query: &Query) -> Option<SearchHelp> {\n    let mut help = SearchHelp::default();\n\n    for qt in SearchTarget::iterate().filter(|i| *i != querytype) {\n        match qt {\n            SearchTarget::Kanji => help.kanji = kanji::guess_result(query),\n            SearchTarget::Sentences => {\n                help.sentences = SearchExecutor::new(sentence::Search::new(query)).guess()\n            }\n            SearchTarget::Names => {\n                help.names = SearchExecutor::new(name::Search::new(query)).guess()\n            }\n            SearchTarget::Words => {\n                help.words = SearchExecutor::new(word::Search::new(query)).guess()\n            }\n        }\n    }\n\n    if querytype == SearchTarget::Words {\n        //help.other_langs = word::guess_inp_language(query);\n    }\n\n    (!help.is_empty()).then(|| help)\n}\n"
  },
  {
    "path": "lib/search/src/name/mod.rs",
    "content": "mod order;\nmod producer;\n\nuse crate::{\n    executor::{producer::Producer, searchable::Searchable},\n    query::Query,\n};\nuse producer::{\n    foreign::ForeignProducer, kanji_reading::KreadingProducer, native::NativeProducer,\n    sequence::SeqProducer,\n};\nuse types::jotoba::names::Name;\n\nuse self::producer::native::split::SplitProducer;\n\npub struct Search<'a> {\n    query: &'a Query,\n    producer: Vec<Box<dyn Producer<Target = Self> + 'a>>,\n}\n\nimpl<'a> Search<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        let mut producer: Vec<Box<dyn Producer<Target = Self>>> = vec![];\n        producer.push(Box::new(SeqProducer::new(query)));\n        producer.push(Box::new(KreadingProducer::new(query)));\n        producer.push(Box::new(ForeignProducer::new(query)));\n        producer.push(Box::new(NativeProducer::new(query)));\n        producer.push(Box::new(SplitProducer::new(query)));\n        Self { query, producer }\n    }\n}\n\nimpl<'a> Searchable for Search<'a> {\n    type Item = &'static Name;\n    type OutItem = &'static Name;\n    type ResAdd = ();\n\n    #[inline]\n    fn to_output_item(&self, item: Self::Item) -> Self::OutItem {\n        item\n    }\n\n    #[inline]\n    fn get_producer<'s>(&'s self) -> &Vec<Box<dyn Producer<Target = Self> + 's>> {\n        &self.producer\n    }\n\n    #[inline]\n    fn get_query(&self) -> &Query {\n        self.query\n    }\n}\n"
  },
  {
    "path": "lib/search/src/name/order/foreign.rs",
    "content": "use engine::relevance::{data::SortData, RelevanceEngine};\nuse ngindex::{item::IndexItem, termset::TermSet};\nuse types::jotoba::names::Name;\n\npub struct ForeignOrder;\n\nimpl RelevanceEngine for ForeignOrder {\n    type OutItem = &'static Name;\n    type IndexItem = IndexItem<u32>;\n    type Query = TermSet;\n\n    #[inline]\n    fn score<'item, 'query>(\n        &self,\n        item: &SortData<'item, 'query, Self::OutItem, Self::IndexItem, Self::Query>,\n    ) -> f32 {\n        item.index_item().dice_weighted(item.query(), 0.1)\n    }\n}\n"
  },
  {
    "path": "lib/search/src/name/order/japanese.rs",
    "content": "use engine::relevance::{data::SortData, RelevanceEngine};\nuse ngindex::{item::IndexItem, termset::TermSet};\nuse types::jotoba::names::Name;\n\npub struct NativeOrder;\n\nimpl RelevanceEngine for NativeOrder {\n    type OutItem = &'static Name;\n    type IndexItem = IndexItem<u32>;\n    type Query = TermSet;\n\n    #[inline]\n    fn score<'item, 'query>(\n        &self,\n        item: &SortData<'item, 'query, Self::OutItem, Self::IndexItem, Self::Query>,\n    ) -> f32 {\n        item.index_item().dice_weighted(item.query(), 0.1)\n    }\n}\n"
  },
  {
    "path": "lib/search/src/name/order/mod.rs",
    "content": "pub mod foreign;\npub mod japanese;\n"
  },
  {
    "path": "lib/search/src/name/producer/foreign.rs",
    "content": "use engine::{pushable::FilteredMaxCounter, task::SearchTask};\n\nuse crate::{\n    engine::names::foreign::Engine,\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    name::{order::foreign::ForeignOrder, Search},\n    query::{Query, QueryLang},\n};\n\npub struct ForeignProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> ForeignProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    fn foreign_task(&self) -> SearchTask<'static, Engine> {\n        let query = format_word(&self.query.query_str);\n        SearchTask::<Engine>::new(&query)\n            .with_custom_order(ForeignOrder)\n            .with_threshold(0.5)\n    }\n}\n\nimpl<'a> Producer for ForeignProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        self.foreign_task().find_to(out);\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        self.query.q_lang != QueryLang::Japanese\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        self.foreign_task().estimate_to(out);\n    }\n}\n\n#[inline]\nfn format_word(inp: &str) -> String {\n    let mut out = String::from(inp.to_lowercase());\n    for i in \".,[]() \\t\\\"'\\\\/-;:\".chars() {\n        out = out.replace(i, \" \");\n    }\n    out.to_lowercase()\n}\n"
  },
  {
    "path": "lib/search/src/name/producer/kanji_reading.rs",
    "content": "use crate::{\n    engine::names::native::Engine,\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    name::Search,\n    query::Query,\n};\nuse engine::{pushable::FilteredMaxCounter, task::SearchTask};\nuse japanese::furigana::generate::{assign_readings, ReadingRetrieve};\nuse resources::retrieve::kanji::KanjiRetrieve;\nuse types::jotoba::names::Name;\n\npub struct KreadingProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> KreadingProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    fn search_task(&self) -> Option<SearchTask<'static, Engine>> {\n        let k_reading = self.query.form.as_kanji_reading()?;\n\n        let literal = k_reading.literal;\n        let reading = k_reading.reading.clone();\n\n        let task = SearchTask::<Engine>::new(k_reading.literal.to_string())\n            .with_result_filter(move |name| filter(name, &reading, literal).unwrap_or(false));\n\n        Some(task)\n    }\n}\n\nimpl<'a> Producer for KreadingProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        if let Some(mut task) = self.search_task() {\n            task.find_to(out);\n        }\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        self.query.form.is_kanji_reading()\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        if let Some(mut task) = self.search_task() {\n            task.estimate_to(out);\n        }\n    }\n}\n\n/// Search result filter function\nfn filter(name: &Name, reading: &str, literal: char) -> Option<bool> {\n    let kanji = name.kanji.as_ref()?;\n\n    let retrieve = NanoriRetrieve::new(resources::get().kanji());\n    let readings = assign_readings(retrieve, kanji, &name.kana)?;\n\n    Some(\n        readings\n            .iter()\n            .any(|i| i.0.contains(&literal.to_string()) && i.1.contains(&reading)),\n    )\n}\n\n/// Custom `ReadingRetrieve` implementing struct to include\n// nanori readings in reading retrieve function\nstruct NanoriRetrieve<'a> {\n    kanji_retrieve: KanjiRetrieve<'a>,\n}\n\nimpl<'a> NanoriRetrieve<'a> {\n    fn new(kanji_retrieve: KanjiRetrieve<'a>) -> Self {\n        Self { kanji_retrieve }\n    }\n}\n\nimpl<'a> ReadingRetrieve for NanoriRetrieve<'a> {\n    #[inline]\n    fn onyomi(&self, lit: char) -> Vec<String> {\n        self.kanji_retrieve.onyomi(lit)\n    }\n\n    #[inline]\n    fn kunyomi(&self, lit: char) -> Vec<String> {\n        self.kanji_retrieve.kunyomi(lit)\n    }\n\n    fn all(&self, lit: char) -> Vec<String> {\n        let res = resources::get().kanji();\n        let k = match res.by_literal(lit) {\n            Some(k) => k,\n            None => return vec![],\n        };\n\n        k.kunyomi\n            .clone()\n            .into_iter()\n            .chain(k.onyomi.clone())\n            .chain(k.nanori.clone())\n            .collect()\n    }\n}\n"
  },
  {
    "path": "lib/search/src/name/producer/mod.rs",
    "content": "pub mod foreign;\npub mod kanji_reading;\npub mod native;\npub mod sequence;\n"
  },
  {
    "path": "lib/search/src/name/producer/native/mod.rs",
    "content": "pub mod split;\n\nuse crate::{\n    engine::names::native::Engine,\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    name::{order::japanese::NativeOrder, Search},\n    query::{Query, QueryLang},\n};\nuse engine::{pushable::FilteredMaxCounter, task::SearchTask};\n\npub struct NativeProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> NativeProducer<'a> {\n    #[inline]\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    #[inline]\n    fn jp_task(&self) -> SearchTask<'static, Engine> {\n        SearchTask::<Engine>::new(&self.query.query_str)\n            .with_custom_order(NativeOrder)\n            .with_threshold(0.3)\n    }\n}\n\nimpl<'a> Producer for NativeProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        self.jp_task().find_to(out);\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        self.query.q_lang == QueryLang::Japanese\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        self.jp_task().estimate_to(out);\n    }\n}\n"
  },
  {
    "path": "lib/search/src/name/producer/native/split.rs",
    "content": "use engine::{\n    pushable::{FilteredMaxCounter, Pushable},\n    relevance::{data::SortData, item::RelItem, RelevanceEngine},\n    task::SearchTask,\n};\nuse ngindex::{item::IndexItem, termset::TermSet};\nuse sentence_reader::{output::ParseResult, Parser};\nuse types::jotoba::names::Name;\n\nuse crate::{\n    engine::names::native::Engine,\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    name::{order::japanese::NativeOrder, Search},\n    query::Query,\n};\n\npub struct SplitProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> SplitProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    fn queries(&self) -> Vec<String> {\n        let splitted = Parser::new(&self.query.query_str);\n        match splitted.parse() {\n            ParseResult::Sentence(s) => s.iter().map(|p| p.get_normalized()).collect(),\n            ParseResult::InflectedWord(w) => vec![w.get_normalized()],\n            ParseResult::None => vec![],\n        }\n    }\n\n    fn run<C, P, O>(&self, cb: C, out: &mut P)\n    where\n        C: Fn(&mut SearchTask<'static, Engine>, &mut P),\n        P: Pushable<Item = O>,\n    {\n        let queries = self.queries();\n        let query_count = queries.len();\n        for (pos, query) in queries.into_iter().enumerate() {\n            let mut task = SearchTask::<Engine>::new(&query)\n                .with_limit(1)\n                .with_custom_order(SplitOrder::new(query_count, pos));\n\n            (cb)(&mut task, out);\n        }\n    }\n\n    fn find_to<P>(&self, out: &mut P)\n    where\n        P: Pushable<Item = RelItem<&'static Name>>,\n    {\n        self.run(\n            |engine, out| {\n                engine.find_to(out);\n            },\n            out,\n        );\n    }\n}\n\nimpl<'a> Producer for SplitProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        self.find_to(out)\n    }\n\n    fn should_run(&self, already_found: usize) -> bool {\n        already_found < 10\n        //already_found == 0\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        self.run(|engine, out| engine.estimate_to(out), out);\n    }\n}\n\nstruct SplitOrder {\n    q_count: usize,\n    pos: usize,\n}\n\nimpl SplitOrder {\n    #[inline]\n    fn new(q_count: usize, pos: usize) -> Self {\n        Self { q_count, pos }\n    }\n}\n\nimpl RelevanceEngine for SplitOrder {\n    type OutItem = &'static Name;\n    type IndexItem = IndexItem<u32>;\n    type Query = TermSet;\n\n    #[inline]\n    fn score<'item, 'query>(\n        &self,\n        item: &SortData<'item, 'query, Self::OutItem, Self::IndexItem, Self::Query>,\n    ) -> f32 {\n        let sim = NativeOrder.score(item);\n        let rel = (self.q_count - self.pos) as f32;\n        sim * rel * 0.001\n    }\n}\n"
  },
  {
    "path": "lib/search/src/name/producer/sequence.rs",
    "content": "use crate::{\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    name::Search,\n    query::Query,\n};\nuse engine::{\n    pushable::{FilteredMaxCounter, Pushable},\n    relevance::item::RelItem,\n};\nuse types::jotoba::names::Name;\n\npub struct SeqProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> SeqProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    fn name(&self) -> Option<&'static Name> {\n        let seq = *self.query.form.as_sequence()?;\n        resources::get().names().by_sequence(seq)\n    }\n}\n\nimpl<'a> Producer for SeqProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        if let Some(name) = self.name() {\n            out.push(RelItem::new(name, 0.0));\n        }\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        if let Some(name) = self.name() {\n            out.push(name);\n        }\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        self.query.form.is_sequence()\n    }\n}\n"
  },
  {
    "path": "lib/search/src/query/form.rs",
    "content": "use types::jotoba::kanji;\n\n/// The form the query was provided in\n#[derive(Debug, Clone, PartialEq, Hash, Default)]\npub enum Form {\n    /// A single word was provided\n    SingleWord,\n\n    /// Multiple words were provided\n    MultiWords,\n\n    /// Kanji reading based search eg. '気 ケ'\n    KanjiReading(kanji::reading::ReadingSearch),\n\n    /// Tag only. Implies query string to be empty\n    TagOnly,\n\n    /// Sequence Search\n    Sequence(u32),\n\n    /// Form was not recognized\n    #[default]\n    Undetected,\n}\n\nimpl Form {\n    #[inline]\n    pub fn as_kanji_reading(&self) -> Option<&kanji::reading::ReadingSearch> {\n        if let Self::KanjiReading(v) = self {\n            Some(v)\n        } else {\n            None\n        }\n    }\n\n    /// Returns `true` if the form is [`KanjiReading`].\n    #[inline]\n    pub fn is_kanji_reading(&self) -> bool {\n        matches!(self, Self::KanjiReading(..))\n    }\n\n    /// Returns `true` if the form is [`TagOnly`].\n    ///\n    /// [`TagOnly`]: Form::TagOnly\n    #[inline]\n    pub fn is_tag_only(&self) -> bool {\n        matches!(self, Self::TagOnly)\n    }\n\n    /// Returns `true` if the form is no special type of search\n    #[inline]\n    pub fn is_normal(&self) -> bool {\n        match self {\n            Form::SingleWord | Form::MultiWords | Form::Undetected => true,\n            _ => false,\n        }\n    }\n\n    /// Returns `true` if the form is [`Sequence`].\n    ///\n    /// [`Sequence`]: Form::Sequence\n    #[inline]\n    pub fn is_sequence(&self) -> bool {\n        matches!(self, Self::Sequence(..))\n    }\n\n    pub fn as_sequence(&self) -> Option<&u32> {\n        if let Self::Sequence(v) = self {\n            Some(v)\n        } else {\n            None\n        }\n    }\n}\n"
  },
  {
    "path": "lib/search/src/query/mod.rs",
    "content": "pub mod form;\npub mod parser;\npub mod prefix;\npub mod regex;\npub mod tags;\npub mod user_settings;\n\npub use form::Form;\npub use tags::Tag;\npub use user_settings::UserSettings;\n\nuse self::regex::RegexSQuery;\nuse percent_encoding::{utf8_percent_encode, AsciiSet, NON_ALPHANUMERIC};\nuse std::hash::Hash;\nuse types::jotoba::{\n    language::{LangParam, Language},\n    search::SearchTarget,\n    words::{misc::Misc, part_of_speech::PosSimple},\n};\n\nconst QUERY_ENCODE_SET: &AsciiSet = &NON_ALPHANUMERIC.add(b'/');\n\n/// A parsed query for a search request\n#[derive(Debug, Clone, PartialEq, Default, Hash)]\npub struct Query {\n    /// The raw query string from the user without any modifications\n    pub raw_query: String,\n    /// Parsed query string which will be used to find results\n    pub query_str: String,\n    /// Where to search {Words,Names,Kanji,Sentences}\n    pub target: SearchTarget,\n    /// Additional tags eg. #kanji or #jlpt4\n    pub tags: Vec<Tag>,\n    /// The form of the Query. Eg. KanjiReadingSearch or TagOnly\n    pub form: Form,\n    /// The language of the passed query string\n    pub q_lang: QueryLang,\n    /// User settings\n    pub settings: UserSettings,\n    /// Item offset based on the (current) page\n    pub page_offset: usize,\n    /// Current page\n    pub page: usize,\n    /// Word index within a sentence reader search\n    pub word_index: usize,\n    /// All terms the result has to contain to be shown\n    pub must_contain: Vec<String>,\n    /// Overwrite the users settings language temporarily\n    pub cust_lang: Option<Language>,\n    /// Regex query (for jp)\n    pub regex: Option<RegexSQuery>,\n}\n\n/// The language of the query content itself\n#[derive(Debug, Default, Clone, Copy, PartialEq, Hash)]\npub enum QueryLang {\n    Japanese,\n    Foreign,\n    Korean,\n    #[default]\n    Undetected,\n}\n\nimpl Query {\n    /// Returns true if the query has at least one pos tag\n    #[inline]\n    pub fn has_part_of_speech_tags(&self) -> bool {\n        self.get_part_of_speech_tags().next().is_some()\n    }\n\n    /// Returns an iterator over all search type tags\n    #[inline]\n    pub fn get_search_type_tags(&self) -> impl Iterator<Item = &SearchTarget> + '_ {\n        self.tags.iter().filter_map(|i| i.as_search_type())\n    }\n\n    /// Returns an iterator over all PosSimple tags\n    #[inline]\n    pub fn get_part_of_speech_tags(&self) -> impl Iterator<Item = &PosSimple> + '_ {\n        self.tags.iter().filter_map(|i| i.as_part_of_speech())\n    }\n\n    /// Returns an iterator over all Misc tags\n    #[inline]\n    pub fn get_misc_tags(&self) -> impl Iterator<Item = &Misc> + '_ {\n        self.tags.iter().filter_map(|i| i.as_misc())\n    }\n\n    /// Returns the result offset by a given page\n    #[inline]\n    pub fn page_offset(&self, page_size: usize) -> usize {\n        parser::calc_page_offset(self.page, page_size)\n    }\n\n    /// Returns `true` if query has `tag`\n    #[inline]\n    pub fn has_tag(&self, tag: Tag) -> bool {\n        self.tags.iter().any(|i| *i == tag)\n    }\n\n    /// Adds `n` pages to the query\n    pub fn add_page(&mut self, n: usize) {\n        self.page = (self.page + n).min(100);\n        self.page_offset += (self.settings.page_size as usize) * n;\n    }\n\n    /// Returns the original_query with search type tags omitted\n    #[inline]\n    pub fn without_search_type_tags(&self) -> String {\n        let (new_query, _) = parser::tags::extract_parse(&self.raw_query, |s| {\n            let p = parser::tags::parse(&s);\n            if p.is_empty() {\n                return (vec![], false);\n            }\n            let retain = p.iter().any(|i| i.is_search_type());\n            (p, retain)\n        });\n        new_query\n    }\n\n    /// Encodes the parsed query string using percent encoding\n    pub fn get_query_encoded(&self) -> String {\n        utf8_percent_encode(&self.query_str, QUERY_ENCODE_SET).to_string()\n    }\n\n    /// Returns the language with lang override applied\n    pub fn get_search_lang(&self) -> Language {\n        self.cust_lang.unwrap_or(self.settings.user_lang)\n    }\n\n    /// Shortcut for query.settings.user_lang. This does not apply overwritten language. For that use `get_search_lang`\n    #[inline]\n    pub fn lang(&self) -> Language {\n        self.settings.user_lang\n    }\n\n    /// Returns the queries lang params\n    #[inline]\n    pub fn lang_param(&self) -> LangParam {\n        self.settings.lang_param()\n    }\n\n    /// Shortcut for query.settings.show_english\n    #[inline]\n    pub fn show_english(&self) -> bool {\n        self.settings.show_english\n    }\n\n    /// Returns `true` if the query is a regex query\n    #[inline]\n    pub fn is_regex(&self) -> bool {\n        self.regex.is_some()\n    }\n\n    /// Returns a `RegexSQuery` if the query contains a valid regex\n    pub fn as_regex_query(&self) -> Option<&RegexSQuery> {\n        self.regex.as_ref()\n    }\n}\n"
  },
  {
    "path": "lib/search/src/query/parser/lang.rs",
    "content": "use crate::query::{regex::RegexSQuery, QueryLang};\nuse jp_utils::JapaneseExt;\nuse std::cmp::Ordering;\n\nuse super::JAPANESE_THRESHOLD;\n\n/// Tries to determine between Japanese/Non japnaese\npub fn parse(query: &str) -> QueryLang {\n    let query = strip_regex(query).unwrap_or_else(|| query.to_string());\n    if utils::korean::is_hangul_str(&query) {\n        return QueryLang::Korean;\n    }\n\n    match get_jp_part(&query).cmp(&JAPANESE_THRESHOLD) {\n        Ordering::Equal => QueryLang::Undetected,\n        Ordering::Less => QueryLang::Foreign,\n        Ordering::Greater => QueryLang::Japanese,\n    }\n}\n\n/// Returns a number 0-100 of japanese character ratio\nfn get_jp_part(inp: &str) -> usize {\n    let mut total = 0;\n    let mut japanese = 0;\n    for c in inp.chars() {\n        total += 1;\n        if c.is_japanese() {\n            japanese += 1;\n        }\n    }\n\n    ((japanese as f32 / total as f32) * 100f32) as usize\n}\n\n/// Removes regex parts from a query. Returns `None` if `query` does not contain regex symbols\nfn strip_regex(query: &str) -> Option<String> {\n    Some(RegexSQuery::new(query)?.get_chars().into_iter().collect())\n}\n"
  },
  {
    "path": "lib/search/src/query/parser/mod.rs",
    "content": "pub mod lang;\npub(crate) mod prefix;\npub mod req_terms;\npub(crate) mod tags;\n\nuse super::{prefix::SearchPrefix, regex::RegexSQuery, Form, Query, Tag, UserSettings};\nuse jp_utils::JapaneseExt;\nuse types::jotoba::{kanji, language::Language as ContentLanguage, search::SearchTarget};\n\n/// Max amount of characters a query is allowed to have\npub const MAX_QUERY_LEN: usize = 400;\n\n/// Amount of characters (in percent) that have to be Japanese characters\n/// in order to handle the input as Japanese text\npub const JAPANESE_THRESHOLD: usize = 40;\n\n/// Represents a query\npub struct QueryParser {\n    /// Where to search {Words,Names,Kanji,Sentences}\n    q_type: SearchTarget,\n    /// The unmodified query from the search-input\n    raw_query: String,\n    /// Users settings\n    user_settings: UserSettings,\n    /// Item offset based on the picked page\n    page_offset: usize,\n    /// Current page\n    page: usize,\n    /// Word index for the sentence reader\n    word_index: usize,\n    /// Overwrite the users settings language\n    language_override: Option<ContentLanguage>,\n}\n\nimpl QueryParser {\n    /// Create a new QueryParser\n    pub fn new(\n        raw_query: String,\n        q_type: SearchTarget,\n        user_settings: UserSettings,\n    ) -> QueryParser {\n        QueryParser {\n            raw_query,\n            q_type,\n            user_settings,\n            page_offset: 0,\n            page: 0,\n            word_index: 0,\n            language_override: None,\n        }\n    }\n\n    #[inline]\n    pub fn with_lang_overwrite(mut self, lang: ContentLanguage) -> Self {\n        self.language_override = Some(lang);\n        self\n    }\n\n    #[inline]\n    pub fn with_word_index(mut self, word_index: usize) -> Self {\n        self.word_index = word_index;\n        self\n    }\n\n    #[inline]\n    pub fn with_page(mut self, page: usize) -> Self {\n        self.page = page;\n        self.page_offset = calc_page_offset(page, self.user_settings.page_size as usize);\n        self\n    }\n\n    /// Parses a user query into Query\n    pub fn parse(mut self) -> Option<Query> {\n        let (stripped, s_prefix) = prefix::parse_prefix(&self.raw_query);\n        if let Some(SearchPrefix::LangOverwrite(r#lang_overwrite)) = s_prefix {\n            self.language_override = Some(lang_overwrite);\n        }\n\n        let (new_query, tags) = Self::extract_tags(&stripped);\n        let (new_query, must_contain) = req_terms::parse(&new_query);\n        let query_str: String = new_query\n            .trim()\n            .chars()\n            .into_iter()\n            .take(MAX_QUERY_LEN)\n            .collect();\n\n        // Don't allow empty queries\n        if query_str.is_empty() && !tags.iter().any(|i| i.is_producer()) {\n            return None;\n        }\n\n        let q_lang = lang::parse(&query_str);\n        let target = self.get_search_target(&tags);\n        let form = self.parse_form(&query_str, &tags, s_prefix);\n\n        let regex = RegexSQuery::new(&query_str);\n\n        Some(Query {\n            q_lang,\n            target,\n            form,\n            tags,\n            query_str,\n            raw_query: self.raw_query,\n            settings: self.user_settings,\n            page_offset: self.page_offset,\n            page: self.page,\n            word_index: self.word_index,\n            cust_lang: self.language_override,\n            must_contain,\n            regex,\n        })\n    }\n\n    // Extracts all tags from `query_str` and returns a new String along with the extracted tags\n    #[inline]\n    fn extract_tags(query_str: &str) -> (String, Vec<Tag>) {\n        tags::extract_parse(query_str, |t_s| {\n            let s = t_s.to_lowercase();\n            (tags::parse(&s), true)\n        })\n    }\n\n    /// Parses the QueryType based on the user selection and tags\n    #[inline]\n    fn get_search_target(&self, tags: &[Tag]) -> SearchTarget {\n        tags.iter()\n            .filter_map(|i| i.as_search_type())\n            .copied()\n            .next()\n            .unwrap_or(self.q_type)\n    }\n\n    fn parse_form(&self, query: &str, tags: &[Tag], s_prefix: Option<SearchPrefix>) -> Form {\n        // Sequence search\n        if let Some(SearchPrefix::BySequence(r#seq)) = s_prefix {\n            return Form::Sequence(seq);\n        }\n\n        // Tag only search\n        if query.is_empty() && tags.iter().any(|i| i.is_producer()) {\n            return Form::TagOnly;\n        }\n\n        // Detect a kanji reading query\n        if let Some(kr) = self.parse_kanji_reading(query) {\n            return Form::KanjiReading(kr);\n        }\n\n        // Japanese only input\n        if query.is_japanese() {\n            return Form::SingleWord;\n        }\n\n        // Non Japanese input\n        if !query.has_japanese() {\n            // Assuming every other supported language is\n            // not as retarded as Japanese and actually uses spaces INSTEAD OF FUCKING 2000 CHARACTERS FFS\n            return if query.contains(' ') {\n                Form::MultiWords\n            } else {\n                Form::SingleWord\n            };\n        }\n\n        Form::Undetected\n    }\n\n    /// Returns Some(KanjiReading) if the query is a kanji reading query\n    fn parse_kanji_reading(&self, query: &str) -> Option<kanji::reading::ReadingSearch> {\n        // Format of kanji query: '<Kanji> <reading>'\n        if utils::real_string_len(query) < 3 || !query.contains(' ') {\n            return None;\n        }\n\n        let split: Vec<_> = query.split(' ').collect();\n\n        let kanji_lit = split[0].trim();\n\n        if kanji_lit.is_kanji()\n                && format_kanji_reading(split[1]).is_japanese()\n                // don't allow queries like '音楽 おと'\n                && utils::real_string_len(kanji_lit) == 1\n        {\n            // Kanji detected\n            return Some(kanji::reading::ReadingSearch {\n                literal: split[0].chars().next().unwrap(),\n                reading: split[1].to_string(),\n            });\n        }\n\n        None\n    }\n}\n\n#[inline]\npub fn format_kanji_reading(s: &str) -> String {\n    s.replace('.', \"\").replace('-', \"\").replace(' ', \"\")\n}\n\npub fn calc_page_offset(page: usize, page_size: usize) -> usize {\n    page.saturating_sub(1) * page_size\n}\n"
  },
  {
    "path": "lib/search/src/query/parser/prefix.rs",
    "content": "use crate::query::prefix::SearchPrefix;\nuse std::str::FromStr;\nuse types::jotoba::language::Language;\n\n/// Strinps and parses a `SearchPrefix` from a `query`\npub fn parse_prefix(query: &str) -> (&str, Option<SearchPrefix>) {\n    if let (new_query, Some(lang)) = try_lang_prefix(query) {\n        return (new_query, Some(SearchPrefix::LangOverwrite(lang)));\n    }\n\n    if let Some(seq_id) = try_sequence(query) {\n        return (query, Some(SearchPrefix::BySequence(seq_id)));\n    }\n\n    (query, None)\n}\n\nfn try_lang_prefix(query: &str) -> (&str, Option<Language>) {\n    let split_pos = query.find(':');\n    if split_pos.is_none() || *split_pos.as_ref().unwrap() > 3 || query.len() < 5 {\n        return (query, None);\n    }\n\n    let split_pos = split_pos.unwrap();\n\n    let lang_str = &query[..split_pos].trim();\n\n    let lang = match Language::from_str(lang_str) {\n        Ok(lang) => lang,\n        Err(_) => {\n            return (query, None);\n        }\n    };\n\n    let new_query = query[split_pos + 1..].trim();\n\n    (new_query, Some(lang))\n}\n\n#[inline]\nfn try_sequence(query: &str) -> Option<u32> {\n    if let Some(seq_str) = query.strip_prefix(\"seq:\") {\n        let seq_str = seq_str.trim();\n        let parsed: u32 = seq_str.parse().ok()?;\n        return Some(parsed);\n    }\n    None\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn test_lang_override_split() {\n        let query = \"eng: dog\";\n        let (new_query, language) = try_lang_prefix(query);\n        assert_eq!(new_query, \"dog\");\n        assert_eq!(language, Some(Language::English));\n    }\n\n    #[test]\n    fn test_lang_override_split_invalid() {\n        let query = \"eng:\";\n        let (new_query, language) = try_lang_prefix(query);\n        assert_eq!(new_query, \"eng:\");\n        assert_eq!(language, None);\n\n        let query = \"egn:\";\n        let (new_query, language) = try_lang_prefix(query);\n        assert_eq!(new_query, \"egn:\");\n        assert_eq!(language, None);\n    }\n}\n"
  },
  {
    "path": "lib/search/src/query/parser/req_terms.rs",
    "content": "use once_cell::sync::Lazy;\nuse regex::Regex;\n\npub const QUOTS_CONTENT: Lazy<Regex> = Lazy::new(|| Regex::new(r#\"\"[^\"]+\"\"#).unwrap());\n\npub fn parse(inp: &str) -> (String, Vec<String>) {\n    if !inp.contains('\\\"') {\n        return (inp.to_string(), vec![]);\n    }\n\n    let mut terms = vec![];\n\n    let mut new_query = inp.to_string();\n\n    let mut delta = 0;\n    for quots in QUOTS_CONTENT.find_iter(inp) {\n        let r = quots.range();\n\n        // strip quotes, we want the content\n        let s = r.start - delta;\n        let e = r.end - delta - 1;\n        new_query.replace_range(s..s + 1, \"\");\n        new_query.replace_range(e - 1..e, \"\");\n\n        let s = r.start + 1;\n        let e = r.end - 1;\n        let term = &inp[s..e].trim();\n        if !term.is_empty() {\n            terms.push(term.to_string().to_lowercase());\n        }\n        delta += 2;\n    }\n\n    (new_query, terms)\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn test_parse_quote_cnt() {\n        let inp = r#\"this is \"some\" text that \"contains some\" quotes\" lol\"#;\n        let res = vec![\"some\", \"contains some\"];\n        assert_eq!(parse(inp).1, res);\n    }\n}\n"
  },
  {
    "path": "lib/search/src/query/parser/tags.rs",
    "content": "use crate::query::Tag;\nuse once_cell::sync::Lazy;\nuse regex::Regex;\nuse std::str::FromStr;\nuse types::jotoba::{\n    search::SearchTarget,\n    sentences,\n    words::{misc::Misc, part_of_speech::PosSimple},\n};\nuse utils::trim_string_end;\n\n/// Regex for finding tags within a query.\nstatic TAG_REGEX: Lazy<Regex> = Lazy::new(|| regex::Regex::new(\"#[a-zA-Z0-9\\\\-]+\").unwrap());\n\n/// Extracts all tags from the query and returns a new one without tags along with those tags which were extracted\npub fn extract_parse<'a, F>(inp: &'a str, parse: F) -> (String, Vec<Tag>)\nwhere\n    F: Fn(&str) -> (Vec<Tag>, bool),\n{\n    let mut new_out = inp.to_string();\n\n    let mut tags = vec![];\n\n    // We edit the string so we have to keep track of how many bytes\n    // we already removed in order to remove the correct range from the string\n    let mut delta = 0;\n    for m in TAG_REGEX.find_iter(inp) {\n        let tag_str = m.as_str();\n\n        let (parsed_tags, remove) = parse(tag_str);\n\n        if !parsed_tags.is_empty() {\n            tags.extend(parsed_tags);\n        }\n\n        if !remove {\n            continue;\n        }\n\n        // Remove tag-str from query\n        let r = m.range();\n        let s = r.start - delta;\n        let mut e = r.end - delta;\n\n        // Strip space from tag too\n        if new_out.len() > e + 1 && inp.is_char_boundary(e + 1) && &inp[e..e + 1] == \" \" {\n            e += 1;\n            delta += 1;\n        }\n        new_out.replace_range(s..e, \"\");\n        delta += r.len();\n    }\n\n    (trim_string_end(new_out), tags)\n}\n\n/// Parse a tag from a string\npub fn parse(s: &str) -> Vec<Tag> {\n    let mut tags: Vec<Tag> = vec![];\n\n    if let Some(tag) = s.to_lowercase().strip_prefix(\"#\") {\n        match tag {\n            \"hidden\" | \"hide\" => tags.push(Tag::Hidden),\n            \"irrichidan\" | \"irregularichidan\" | \"irregular-ichidan\" => {\n                tags.push(Tag::IrregularIruEru);\n            }\n            \"katakana\" => tags.push(Tag::Katakana),\n            _ => (),\n        }\n    }\n\n    if let Some(tag) = parse_genki_tag(s) {\n        tags.push(tag);\n    }\n    if let Some(tag) = parse_jlpt_tag(s) {\n        tags.push(tag);\n    }\n    if let Some(tag) = parse_search_type(s) {\n        tags.push(tag);\n    }\n    if let Some(pos) = PosSimple::from_str(&s[1..]).ok() {\n        tags.push(Tag::PartOfSpeech(pos));\n    }\n    if let Some(misc) = Misc::from_str(&s[1..]).ok() {\n        tags.push(Tag::Misc(misc));\n    }\n    if let Some(sentence_tag) = sentences::Tag::from_str(&s[1..]).ok() {\n        tags.push(Tag::SentenceTag(sentence_tag));\n    }\n\n    tags\n}\n\n/// Returns `Some(u8)` if `s` is a valid N/jlpt-tag\nfn parse_jlpt_tag(s: &str) -> Option<Tag> {\n    let jlpt = s\n        .strip_prefix(\"#n\")\n        .or_else(|| s.strip_prefix(\"#jlpt\"))?\n        .parse::<u8>()\n        .ok()?\n        .min(5)\n        .max(1);\n    Some(Tag::Jlpt(jlpt))\n}\n\n/// Returns `Some(u8)` if `s` is a valid genki-tag\nfn parse_genki_tag(s: &str) -> Option<Tag> {\n    let genki = s.strip_prefix(\"#genki\")?.parse::<u8>().ok()?.max(3).min(23);\n    Some(Tag::GenkiLesson(genki))\n}\n\n/// Parse only search type\nfn parse_search_type(s: &str) -> Option<Tag> {\n    Some(match s[1..].to_lowercase().as_str() {\n        \"kanji\" => Tag::SearchType(SearchTarget::Kanji),\n        \"sentence\" | \"sentences\" => Tag::SearchType(SearchTarget::Sentences),\n        \"name\" | \"names\" => Tag::SearchType(SearchTarget::Names),\n        \"word\" | \"words\" => Tag::SearchType(SearchTarget::Words),\n        \"abbreviation\" | \"abbrev\" => Tag::Misc(Misc::Abbreviation),\n        \"uwk\" => Tag::Misc(Misc::UsuallyWrittenInKana),\n        _ => return None,\n    })\n}\n\n#[cfg(test)]\nmod test {\n    use super::*;\n\n    #[test]\n    fn test_parse_jlpt_tag_parsing() {\n        assert_eq!(parse_jlpt_tag(\"#n4\"), Some(Tag::Jlpt(4)));\n    }\n\n    #[test]\n    fn test_parse_genki_tag_parsing() {\n        assert_eq!(parse_genki_tag(\"#genki3\"), Some(Tag::GenkiLesson(3)));\n        assert_eq!(parse_genki_tag(\"#genki23\"), Some(Tag::GenkiLesson(23)));\n    }\n}\n"
  },
  {
    "path": "lib/search/src/query/prefix.rs",
    "content": "use types::jotoba::language::Language;\n\n/// Prefix of a search query. eg 'seq: 1234'\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\npub enum SearchPrefix {\n    /// A custom language prefix. Eg: 'rus: Россия'\n    LangOverwrite(Language),\n    /// Search by sequence-id within jmdict\n    BySequence(u32),\n}\n"
  },
  {
    "path": "lib/search/src/query/regex.rs",
    "content": "//! A regex like search query to search in words with wildcards\n//!\n//! # Example\n//!\n//! \"宇宙*行士\" => \"宇宙飛行士\"\n//!\n//! # Supported syntax\n//! `*` - Allows 0-n other characters\n//! `?` - Allows 1 other characters\n//!\n//! # Note\n//! All queries containing (custom)regex syntax will be handled as full-word matches. In other words if\n//! a query contains regex syntax, only full words will be matched. If words should also be open to\n//! an end (eg. right variable) then a regex charecter has to be placed at the end as well\n\nuse std::hash::Hash;\n\nuse regex::Regex;\n\n/// All characters treated as regex characters\npub const REGEX_CHARS: &[char] = &['*', '?', '?'];\n\n/// Regex Search query. Can be used to match words\n#[derive(Clone, Debug)]\npub struct RegexSQuery {\n    query: String,\n    regex: Regex,\n}\n\nimpl RegexSQuery {\n    /// Create a new regex query. Returns `None` if invalid or no regex given\n    pub fn new(query: &str) -> Option<Self> {\n        let query = adjust_regex(query);\n\n        if !Self::is_regex(&query) {\n            return None;\n        }\n\n        let regex = Regex::new(&Self::convert_regex(&query)).ok()?;\n        Some(RegexSQuery { query, regex })\n    }\n\n    /// Returns `true` if a word matches the regex query\n    #[inline]\n    pub fn matches(&self, word: &str) -> bool {\n        self.regex.is_match(word)\n    }\n\n    /// Returns all characters that don't represent regex syntax\n    pub fn get_chars(&self) -> Vec<char> {\n        let mut out = Vec::with_capacity(self.query.len());\n        for c in self.query.chars() {\n            if !REGEX_CHARS.contains(&c) {\n                out.push(c);\n            }\n        }\n        out\n    }\n\n    /// Returns a real regex expression which will be used to match words\n    fn convert_regex(query: &str) -> String {\n        let mut out = String::with_capacity(query.len() + 2);\n        out.push('^');\n        out.push_str(\n            &query\n                .replace('*', \".*\")\n                .replace('?', \".{1}\")\n                .replace('+', \".{1}\"),\n        );\n        if !out.ends_with('$') {\n            out.push('$');\n        }\n        out\n    }\n\n    /// Returns `true` if query can be interpreted as regex query\n    #[inline]\n    fn is_regex(query: &str) -> bool {\n        let query = adjust_regex(query);\n        query.contains('*') || query.contains('+') || query.contains('?')\n    }\n\n    /// Get a reference to the regex squery's query.\n    pub fn query(&self) -> &str {\n        self.query.as_ref()\n    }\n}\n\n/// Adjusts the query to a consistent format\n#[inline]\nfn adjust_regex(query: &str) -> String {\n    query\n        .replace('＊', \"*\")\n        .replace('＋', \"+\")\n        .replace('？', \"?\")\n}\n\nimpl Hash for RegexSQuery {\n    #[inline]\n    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {\n        self.query.hash(state);\n    }\n}\n\nimpl PartialEq for RegexSQuery {\n    #[inline]\n    fn eq(&self, other: &Self) -> bool {\n        self.query == other.query\n    }\n}\n\nimpl Eq for RegexSQuery {}\n"
  },
  {
    "path": "lib/search/src/query/tags.rs",
    "content": "use types::jotoba::{\n    search::SearchTarget,\n    sentences,\n    words::{misc::Misc, part_of_speech::PosSimple},\n};\n\n/// Hashtag based search tags\n#[derive(Debug, Clone, Copy, PartialEq, Hash)]\npub enum Tag {\n    // Producer tags\n    PartOfSpeech(PosSimple),\n    Misc(Misc),\n    Jlpt(u8),\n    GenkiLesson(u8),\n    Katakana,\n    SentenceTag(sentences::Tag),\n    IrregularIruEru,\n\n    // Non producer\n    SearchType(SearchTarget),\n    Hidden,\n}\n\nimpl Tag {\n    /// Returns true if the tag can be used without a query\n    #[inline]\n    pub fn is_producer(&self) -> bool {\n        !self.is_search_type() && !self.is_hidden()\n    }\n\n    /// Returns `true` if the tag is [`SearchType`].\n    #[inline]\n    pub fn is_search_type(&self) -> bool {\n        matches!(self, Self::SearchType(..))\n    }\n\n    /// Returns `true` if the tag is [`PartOfSpeech`].\n    #[inline]\n    pub fn is_part_of_speech(&self) -> bool {\n        matches!(self, Self::PartOfSpeech(..))\n    }\n\n    #[inline]\n    pub fn as_search_type(&self) -> Option<&SearchTarget> {\n        if let Self::SearchType(v) = self {\n            Some(v)\n        } else {\n            None\n        }\n    }\n\n    #[inline]\n    pub fn as_part_of_speech(&self) -> Option<&PosSimple> {\n        if let Self::PartOfSpeech(v) = self {\n            Some(v)\n        } else {\n            None\n        }\n    }\n\n    /// Returns `true` if the tag is [`Misc`].\n    ///\n    /// [`Misc`]: Tag::Misc\n    #[inline]\n    pub fn is_misc(&self) -> bool {\n        matches!(self, Self::Misc(..))\n    }\n\n    #[inline]\n    pub fn as_misc(&self) -> Option<&Misc> {\n        if let Self::Misc(v) = self {\n            Some(v)\n        } else {\n            None\n        }\n    }\n\n    /// Returns `true` if the tag is [`Jlpt`].\n    ///\n    /// [`Jlpt`]: Tag::Jlpt\n    #[inline]\n    pub fn is_jlpt(&self) -> bool {\n        matches!(self, Self::Jlpt(..))\n    }\n\n    #[inline]\n    pub fn as_jlpt(&self) -> Option<u8> {\n        if let Self::Jlpt(v) = self {\n            Some(*v)\n        } else {\n            None\n        }\n    }\n\n    /// Returns `true` if the tag is [`GenkiLesson`].\n    ///\n    /// [`GenkiLesson`]: Tag::GenkiLesson\n    #[inline]\n    pub fn is_genki_lesson(&self) -> bool {\n        matches!(self, Self::GenkiLesson(..))\n    }\n\n    #[inline]\n    pub fn as_genki_lesson(&self) -> Option<u8> {\n        if let Self::GenkiLesson(v) = self {\n            Some(*v)\n        } else {\n            None\n        }\n    }\n\n    /// Returns `true` if the tag is [`IrregularIruEru`].\n    ///\n    /// [`IrregularIruEru`]: Tag::IrregularIruEru\n    pub fn is_irregular_iru_eru(&self) -> bool {\n        matches!(self, Self::IrregularIruEru)\n    }\n\n    /// Returns `true` if the tag is [`Hidden`].\n    ///\n    /// [`Hidden`]: Tag::Hidden\n    #[must_use]\n    pub fn is_hidden(&self) -> bool {\n        matches!(self, Self::Hidden)\n    }\n\n    /// Returns `true` if the tag is [`SentenceTag`].\n    ///\n    /// [`SentenceTag`]: Tag::SentenceTag\n    #[must_use]\n    #[inline]\n    pub fn is_sentence_tag(&self) -> bool {\n        matches!(self, Self::SentenceTag(..))\n    }\n\n    #[inline]\n    pub fn as_sentence_tag(&self) -> Option<&sentences::Tag> {\n        if let Self::SentenceTag(v) = self {\n            Some(v)\n        } else {\n            None\n        }\n    }\n\n    /// Returns `true` if the tag is [`Katakana`].\n    ///\n    /// [`Katakana`]: Tag::Katakana\n    #[must_use]\n    pub fn is_katakana(&self) -> bool {\n        matches!(self, Self::Katakana)\n    }\n}\n"
  },
  {
    "path": "lib/search/src/query/user_settings.rs",
    "content": "use std::hash::{Hash, Hasher};\nuse types::jotoba::language::{LangParam, Language};\n\n/// In-cookie saved personalized settings by an user\n#[derive(Debug, Clone, Copy)]\npub struct UserSettings {\n    pub user_lang: Language,\n    pub page_lang: localization::language::Language,\n    pub show_english: bool,\n    pub english_on_top: bool,\n    pub page_size: u32,\n    pub show_example_sentences: bool,\n    pub sentence_furigana: bool,\n}\n\nimpl UserSettings {\n    /// Returns `true` if an action has to be done for english too. This\n    /// Is the case if the user wants to see enlgish results as well but\n    /// didn't set english as main language\n    #[inline]\n    pub fn show_english(&self) -> bool {\n        self.show_english && self.user_lang != Language::English\n    }\n\n    #[inline]\n    pub fn language(&self) -> Language {\n        self.user_lang\n    }\n\n    /// Returns a LangParam respecting the users settings language preferences\n    #[inline]\n    pub fn lang_param(&self) -> LangParam {\n        LangParam::with_en_raw(self.user_lang, self.show_english())\n    }\n}\n\nimpl Default for UserSettings {\n    #[inline]\n    fn default() -> Self {\n        Self {\n            show_english: true,\n            user_lang: Language::default(),\n            page_lang: localization::language::Language::default(),\n            english_on_top: false,\n            page_size: 10,\n            show_example_sentences: true,\n            sentence_furigana: true,\n        }\n    }\n}\n\nimpl PartialEq for UserSettings {\n    #[inline]\n    fn eq(&self, other: &Self) -> bool {\n        self.user_lang == other.user_lang && self.show_english == other.show_english\n    }\n}\n\nimpl Hash for UserSettings {\n    #[inline]\n    fn hash<H: Hasher>(&self, state: &mut H) {\n        self.user_lang.hash(state);\n        self.show_english.hash(state);\n    }\n}\n"
  },
  {
    "path": "lib/search/src/radical/mod.rs",
    "content": "/// Radical word search\npub mod word;\n\nuse std::collections::HashSet;\n\n/// Finds radicals by its meanings\npub fn meaning_search(query: &str) -> HashSet<char> {\n    crate::engine::radical::find(query)\n        .into_iter()\n        .map(|j| j.literal)\n        .collect()\n}\n"
  },
  {
    "path": "lib/search/src/radical/word/foreign.rs",
    "content": "use std::collections::HashSet;\n\nuse crate::{engine::words::foreign::Engine, word::order::foreign::ForeignOrder};\nuse engine::{result::SearchResult, task::SearchTask};\nuse jp_utils::JapaneseExt;\nuse types::jotoba::{language::Language, words::Word};\n\n/// Amount of words to return in a search for radicals\nconst WORD_LIMIT: usize = 3;\n\n/// Search for radicals in words by a foreign query\npub struct Search<'a> {\n    query: &'a str,\n    lang: Language,\n}\n\nimpl<'a> Search<'a> {\n    #[inline]\n    pub fn new(query: &'a str, lang: Language) -> Self {\n        Self { query, lang }\n    }\n\n    /// Does a kana word-search and returns some likely radicals for the given query\n    #[inline]\n    pub fn run(&self) -> HashSet<char> {\n        let mut search_task = self.search_task();\n        self.select_kanji(search_task.find())\n    }\n\n    #[inline]\n    fn search_task(&self) -> SearchTask<'static, Engine> {\n        SearchTask::with_language(&self.query, self.lang)\n            .with_custom_order(ForeignOrder::new())\n            .with_limit(WORD_LIMIT)\n    }\n\n    fn select_kanji(&self, res: SearchResult<&Word>) -> HashSet<char> {\n        let kanji_retr = resources::get().kanji();\n        res.into_iter()\n            .filter(|word| word.get_reading().reading == self.query)\n            .map(|word| word.get_reading().reading.chars().filter(|i| i.is_kanji()))\n            .flatten()\n            .filter_map(|kanji| kanji_retr.by_literal(kanji).map(|i| &i.parts))\n            .flatten()\n            .copied()\n            .take(10)\n            .collect()\n    }\n}\n"
  },
  {
    "path": "lib/search/src/radical/word/mod.rs",
    "content": "pub mod foreign;\npub mod romaji;\n\npub use foreign::Search as ForeignSearch;\npub use romaji::Search as RomajiSearch;\n"
  },
  {
    "path": "lib/search/src/radical/word/romaji.rs",
    "content": "use crate::{engine::words::native::Engine, word::order::native::NativeOrder};\nuse engine::{result::SearchResult, task::SearchTask};\nuse jp_utils::JapaneseExt;\nuse std::collections::HashSet;\nuse types::jotoba::words::Word;\n\n/// Amount of words to return in a search for radicals\nconst WORD_LIMIT: usize = 3;\n\n/// Search for radicals in words by a foreign query\npub struct Search<'a> {\n    query: &'a str,\n}\n\nimpl<'a> Search<'a> {\n    #[inline]\n    pub fn new(query: &'a str) -> Self {\n        Self { query }\n    }\n\n    /// Does a kana word-search and returns some likely radicals for the given query\n    #[inline]\n    pub fn run(&self) -> HashSet<char> {\n        let mut search_task = self.search_task();\n        self.select_kanji(search_task.find())\n    }\n\n    #[inline]\n    fn search_task(&self) -> SearchTask<'static, Engine> {\n        SearchTask::new(&self.query)\n            .with_limit(WORD_LIMIT)\n            .with_threshold(0.8)\n            .with_custom_order(NativeOrder::new(self.query.to_string()))\n    }\n\n    fn select_kanji(&self, res: SearchResult<&Word>) -> HashSet<char> {\n        let kanji_retr = resources::get().kanji();\n        res.into_iter()\n            .map(|word| word.get_reading().reading.chars().filter(|i| i.is_kanji()))\n            .flatten()\n            .filter_map(|kanji| kanji_retr.by_literal(kanji).map(|i| &i.parts))\n            .flatten()\n            .copied()\n            .take(10)\n            .collect()\n    }\n}\n"
  },
  {
    "path": "lib/search/src/sentence/mod.rs",
    "content": "pub mod order;\nmod producer;\npub mod result;\n\nuse super::query::Query;\nuse crate::{\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::Tag,\n};\nuse producer::{\n    foreign::ForeignProducer, native::NativeProducer, sequence::SequenceProducer, tag::TagProducer,\n};\nuse result::ResData;\nuse types::jotoba::{language::Language, sentences::Sentence};\n\npub struct Search<'a> {\n    query: &'a Query,\n    producer: Vec<Box<dyn Producer<Target = Self> + 'a>>,\n}\n\nimpl<'a> Search<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        let mut producer: Vec<Box<dyn Producer<Target = Self>>> = vec![\n            Box::new(SequenceProducer::new(query)),\n            Box::new(ForeignProducer::new(query, query.lang())),\n            Box::new(TagProducer::new(query)),\n            Box::new(NativeProducer::new(query, query.lang())),\n        ];\n\n        if query.lang() != Language::English && query.show_english() {\n            producer.push(Box::new(ForeignProducer::new(query, Language::English)));\n            producer.push(Box::new(NativeProducer::new(query, Language::English)));\n        }\n\n        Self { query, producer }\n    }\n}\n\nimpl<'a> Searchable for Search<'a> {\n    type ResAdd = ResData;\n    type OutItem = result::Sentence;\n    type Item = &'static Sentence;\n\n    fn get_producer<'s>(&'s self) -> &Vec<Box<dyn Producer<Target = Self> + 's>> {\n        &self.producer\n    }\n\n    fn mod_output(&self, out: &mut OutputBuilder<Self::Item, Self::ResAdd>) {\n        out.output_add = ResData::new(self.query.has_tag(Tag::Hidden));\n    }\n\n    #[inline]\n    fn to_output_item(&self, item: Self::Item) -> Self::OutItem {\n        result::Sentence::from_m_sentence(item, self.query.lang_param()).unwrap()\n    }\n\n    fn get_query(&self) -> &Query {\n        self.query\n    }\n\n    #[inline]\n    fn filter(&self, item: &Self::Item) -> bool {\n        !producer::filter::filter_sentence(self.query, item)\n    }\n\n    #[inline]\n    fn max_top_dist(&self) -> Option<f32> {\n        Some(0.9)\n        //None\n    }\n}\n"
  },
  {
    "path": "lib/search/src/sentence/order/foreign.rs",
    "content": "use engine::relevance::{data::SortData, RelevanceEngine};\nuse sparse_vec::SpVec32;\nuse types::jotoba::{language::Language, sentences::Sentence};\nuse vsm::doc_vec::DocVector;\n\npub struct ForeignOrder {\n    lang: Language,\n}\n\nimpl ForeignOrder {\n    pub fn new(lang: Language) -> Self {\n        Self { lang }\n    }\n}\n\nimpl RelevanceEngine for ForeignOrder {\n    type OutItem = &'static Sentence;\n    type IndexItem = DocVector<u32>;\n    type Query = SpVec32;\n\n    fn score<'item, 'query>(\n        &self,\n        item: &SortData<'item, 'query, Self::OutItem, Self::IndexItem, Self::Query>,\n    ) -> f32 {\n        let mut rel = item.vec_similarity();\n\n        if !item.item().has_translation(self.lang) {\n            rel *= 0.8;\n        }\n\n        rel\n    }\n}\n"
  },
  {
    "path": "lib/search/src/sentence/order/mod.rs",
    "content": "pub mod foreign;\npub mod native;\n"
  },
  {
    "path": "lib/search/src/sentence/order/native.rs",
    "content": "use engine::relevance::{data::SortData, RelevanceEngine};\nuse sparse_vec::{SpVec32, VecExt};\nuse types::jotoba::{language::Language, sentences::Sentence};\nuse vsm::doc_vec::DocVector;\n\npub const QUERY_WEIGHT: f32 = 100.0;\n\npub struct NativeOrder {\n    lang: Language,\n}\n\nimpl NativeOrder {\n    pub fn new(lang: Language) -> Self {\n        Self { lang }\n    }\n}\n\nimpl RelevanceEngine for NativeOrder {\n    type OutItem = &'static Sentence;\n    type IndexItem = DocVector<u32>;\n    type Query = SpVec32;\n\n    #[inline]\n    fn score<'item, 'query>(\n        &self,\n        item: &SortData<'item, 'query, Self::OutItem, Self::IndexItem, Self::Query>,\n    ) -> f32 {\n        //let mut rel = term_dist(item.query(), item.index_item().vec());\n        let mut rel = sim(item.query(), item.index_item().vec(), QUERY_WEIGHT);\n\n        if !item.item().has_translation(self.lang) {\n            rel *= 0.99;\n        }\n\n        rel\n    }\n}\n\n/// Calculates a similar value to the cosine similarity between vec_a and vec_b but\n/// gives the length of vec_a more weight than vec_b's length.\n/// This prevents longer sentences being less relevant than short sentences, even if\n/// the longer sentences contains all terms of the query when the short sentence does not.\n#[inline]\nfn sim(vec_a: &SpVec32, vec_b: &SpVec32, a_weight: f32) -> f32 {\n    if !vec_a.could_overlap(vec_b) {\n        return 0.0;\n    }\n\n    let sc = vec_a.scalar(vec_b);\n\n    let ldiff = ((vec_a.get_length() * a_weight) + vec_b.get_length()) / (a_weight + 1.0);\n\n    sc / ldiff\n}\n"
  },
  {
    "path": "lib/search/src/sentence/producer/filter.rs",
    "content": "use super::kanji;\nuse crate::{engine, query::Query};\nuse index_framework::traits::{backend::Backend, dictionary::IndexDictionary};\nuse jp_utils::JapaneseExt;\nuse sparse_vec::VecExt;\nuse types::jotoba::sentences::Sentence;\nuse vsm::doc_vec::DocVector;\n\npub(crate) fn filter_sentence(query: &Query, sentence: &Sentence) -> bool {\n    if sentence.get_translation(query.lang_param()).is_none() {\n        return false;\n    }\n\n    if query.form.is_kanji_reading() {\n        let kreading = query\n            .form\n            .as_kanji_reading()\n            .and_then(|i| kanji::get_reading(i))\n            .unwrap();\n        return kanji::sentence_matches(sentence, &kreading);\n    }\n\n    if !query.must_contain.is_empty() {\n        if !by_quot_marks(query, sentence) {\n            return false;\n        }\n    }\n\n    if !query\n        .tags\n        .iter()\n        .filter_map(|i| i.as_sentence_tag())\n        .all(|tag| sentence.has_tag(tag))\n    {\n        return false;\n    }\n\n    true\n}\n\nfn by_quot_marks(query: &Query, sentence: &Sentence) -> bool {\n    if !by_quot_marks_jp(query, sentence) {\n        return false;\n    }\n\n    // We're doing filtering for foreign words directly as search engine filter\n    /*\n    sentence\n        .get_translation(query.lang(), query.show_english())\n        .map(|sentence| by_quot_marks_fe(query, sentence))\n        .unwrap_or(true)\n        */\n    true\n}\n\n/*\nfn by_quot_marks_fe(query: &Query, sentence: &str) -> bool {\n    let sentence = sentence.to_lowercase();\n    let sentence: Vec<_> = sentence.split(' ').collect();\n\n    let iter = query.must_contain.iter().filter(|i| !i.is_japanese());\n\n    for needle in iter {\n        if !sentence.contains(&needle.as_str()) {\n            return false;\n        }\n    }\n\n    true\n}\n*/\n\nfn by_quot_marks_jp(query: &Query, sentence: &Sentence) -> bool {\n    let jp_sentence = &sentence.japanese;\n\n    let jp_terms = query.must_contain.iter().filter(|i| i.is_japanese());\n    for needle in jp_terms {\n        let is_kana = needle.is_kana();\n\n        // If kana reading and kana contains needle\n        if (is_kana && sentence.get_kana().contains(needle))\n            // Or full reading contains\n            || (!is_kana && jp_sentence.contains(needle))\n        {\n            continue;\n        }\n\n        return false;\n    }\n\n    true\n}\n\n/// Vector filter for Sentences filtering based on quoted terms\npub struct FeQotTermsVecFilter {\n    mc_terms: Vec<u32>,\n    filter_all: bool,\n}\n\nimpl FeQotTermsVecFilter {\n    pub fn new(query: &Query) -> Self {\n        // If there is a term that is not indexed and thus can't be found,\n        // filter out all results\n        let mut filter_all = false;\n        let mut mc_terms = vec![];\n\n        let index = indexes::get().sentence().foreign();\n        let ix_dict = index.dict();\n\n        'o: for t in query.must_contain.iter().filter(|i| !i.is_japanese()) {\n            for term in engine::sentences::foreign::all_terms(t).into_iter() {\n                if let Some(v) = ix_dict.get_id(&term) {\n                    mc_terms.push(v as u32);\n                    continue;\n                }\n\n                filter_all = true;\n                mc_terms.clear();\n                break 'o;\n            }\n        }\n\n        Self {\n            mc_terms,\n            filter_all,\n        }\n    }\n\n    pub fn filter(&self, sentence: &DocVector<u32>) -> bool {\n        if self.filter_all {\n            return false;\n        }\n\n        if self.mc_terms.is_empty() {\n            return true;\n        }\n\n        self.mc_terms\n            .iter()\n            .all(|dim| sentence.vec().has_dim(*dim as usize))\n    }\n}\n"
  },
  {
    "path": "lib/search/src/sentence/producer/foreign.rs",
    "content": "use super::filter::{self, FeQotTermsVecFilter};\nuse crate::{\n    engine::sentences::foreign,\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::{Query, QueryLang},\n    sentence::{order::foreign::ForeignOrder, Search},\n};\nuse engine::{pushable::FilteredMaxCounter, task::SearchTask};\nuse types::jotoba::language::Language;\n\n/// Producer for sentences by foreign keywords\npub struct ForeignProducer<'a> {\n    query: &'a Query,\n    language: Language,\n}\n\nimpl<'a> ForeignProducer<'a> {\n    pub fn new(query: &'a Query, language: Language) -> Self {\n        Self { query, language }\n    }\n\n    fn task(&self) -> SearchTask<'static, foreign::Engine> {\n        let query_str = &self.query.query_str;\n        let query_c = self.query.clone();\n        let vec_filter = FeQotTermsVecFilter::new(&self.query);\n        let lang = self.query.lang();\n\n        SearchTask::with_language(query_str, self.language)\n            .with_result_filter(move |i| filter::filter_sentence(&query_c, *i))\n            .with_item_filter(move |i| vec_filter.filter(i))\n            .with_custom_order(ForeignOrder::new(lang))\n    }\n}\n\nimpl<'a> Producer for ForeignProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        self.task().find_to(out);\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        self.query.form.is_normal() && self.query.q_lang == QueryLang::Foreign\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        self.task().estimate_to(out);\n    }\n}\n"
  },
  {
    "path": "lib/search/src/sentence/producer/kanji.rs",
    "content": "use japanese::ToKanaExt;\nuse jp_utils::furi::{\n    segment::{kanji::as_kanji::AsKanjiSegment, AsSegment},\n    Furigana,\n};\nuse sentence_reader::JA_NL_PARSER;\nuse types::jotoba::{\n    kanji::reading::{Reading, ReadingSearch},\n    sentences::Sentence,\n};\n\npub(crate) fn sentence_matches(sentence: &Sentence, reading: &Reading) -> bool {\n    let lit = reading.get_lit_str();\n\n    if reading.is_full_reading() {\n        let parsed_furi = Furigana(&sentence.furigana);\n        let reading_hira = reading.get_raw().to_hiragana();\n\n        for i in parsed_furi.segments() {\n            let Some(curr_kanji) = i.as_kanji() else {continue};\n\n            if !curr_kanji.literals().contains(&lit) {\n                continue;\n            }\n\n            if i.get_kana_reading().to_hiragana().contains(&reading_hira) {\n                return true;\n            }\n        }\n\n        return false;\n    }\n\n    // Kunyomi\n    let formatted = reading.format_reading_with_literal();\n    for morph in JA_NL_PARSER.get().unwrap().parse(&sentence.japanese) {\n        let reading = morph.lexeme;\n        if reading == formatted {\n            return true;\n        }\n    }\n\n    false\n}\n\npub(crate) fn get_reading(reading: &ReadingSearch) -> Option<Reading> {\n    let kanji_storage = resources::get().kanji();\n    let kanji = kanji_storage.by_literal(reading.literal)?;\n    let reading = kanji.find_reading(&reading.reading)?;\n    Some(reading)\n}\n"
  },
  {
    "path": "lib/search/src/sentence/producer/mod.rs",
    "content": "pub mod filter;\npub mod foreign;\nmod kanji;\npub mod native;\npub mod sequence;\npub mod tag;\n"
  },
  {
    "path": "lib/search/src/sentence/producer/native.rs",
    "content": "use super::filter;\nuse crate::{\n    engine::sentences::native,\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::{Query, QueryLang},\n    sentence::{order::native::NativeOrder, Search},\n};\nuse engine::{pushable::FilteredMaxCounter, task::SearchTask};\nuse types::jotoba::language::Language;\n\n/// Producer for sentences by foreign keywords\npub struct NativeProducer<'a> {\n    query: &'a Query,\n    lang: Language,\n}\n\nimpl<'a> NativeProducer<'a> {\n    pub fn new(query: &'a Query, lang: Language) -> Self {\n        Self { query, lang }\n    }\n\n    fn task(&self) -> SearchTask<'static, native::Engine> {\n        let query = self.query.clone();\n        let query_str = self.jp_reading();\n\n        SearchTask::with_language(&query_str, self.lang)\n            .with_result_filter(move |sentence| filter::filter_sentence(&query, *sentence))\n            .with_custom_order(NativeOrder::new(self.query.lang()))\n    }\n\n    fn jp_reading(&self) -> String {\n        let mut query_str = self.query.query_str.clone();\n\n        if let Some(kanji_reading) = self.query.form.as_kanji_reading() {\n            query_str = kanji_reading.literal.to_string();\n        }\n\n        query_str\n    }\n}\n\nimpl<'a> Producer for NativeProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        self.task().find_to(out);\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        self.query.form.is_normal() && self.query.q_lang == QueryLang::Japanese\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        self.task().estimate_to(out);\n    }\n}\n"
  },
  {
    "path": "lib/search/src/sentence/producer/sequence.rs",
    "content": "use crate::{\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::Query,\n    sentence::Search,\n};\nuse engine::{\n    pushable::{FilteredMaxCounter, Pushable},\n    relevance::item::RelItem,\n};\nuse types::jotoba::sentences::Sentence;\n\n/// Producer for sentence by seq\npub struct SequenceProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> SequenceProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    fn sentence(&self) -> Option<&'static Sentence> {\n        let seq = self.query.form.as_sequence()?;\n        resources::get().sentences().by_id(*seq)\n    }\n}\n\nimpl<'a> Producer for SequenceProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        if let Some(s) = self.sentence() {\n            out.push(RelItem::new(s, 0.0));\n        }\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        if let Some(sentence) = self.sentence() {\n            out.push(sentence);\n        }\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        self.query.form.is_sequence()\n    }\n}\n"
  },
  {
    "path": "lib/search/src/sentence/producer/tag.rs",
    "content": "use crate::{\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::{Query, Tag},\n    sentence::Search,\n};\nuse engine::{\n    pushable::FilteredMaxCounter,\n    pushable::{PushMod, Pushable},\n    relevance::item::RelItem,\n};\nuse types::jotoba::sentences::Sentence;\n\n/// Producer for Tags\npub struct TagProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> TagProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    fn find_to<P>(&self, out: &mut P)\n    where\n        P: Pushable<Item = RelItem<&'static Sentence>>,\n    {\n        let tag = self\n            .query\n            .tags\n            .iter()\n            .filter(|i| i.is_jlpt() || i.is_sentence_tag())\n            .find(|i| i.is_producer())\n            .unwrap();\n        self.push_tag(tag, out);\n    }\n\n    pub fn push_tag<P>(&self, tag: &Tag, out: &mut P)\n    where\n        P: Pushable<Item = RelItem<&'static Sentence>>,\n    {\n        let s_res = resources::get().sentences();\n\n        match tag {\n            Tag::SentenceTag(sentence_tag) => self.push_iter(s_res.by_tag(sentence_tag), out),\n            Tag::Jlpt(jlpt) => self.push_iter(s_res.by_jlpt(*jlpt), out),\n            _ => (),\n        }\n    }\n\n    fn push_iter<P, I>(&self, iter: I, out: &mut P)\n    where\n        P: Pushable<Item = RelItem<&'static Sentence>>,\n        I: Iterator<Item = &'static Sentence>,\n    {\n        let mut c = 0;\n        for w in iter {\n            let item = RelItem::new(w, c as f32);\n            if out.push(item) {\n                c += 1;\n                if c >= 1000 {\n                    break;\n                }\n            }\n        }\n    }\n}\n\nimpl<'a> Producer for TagProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        self.find_to(out);\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        let mut m = PushMod::new(out, |i: RelItem<&Sentence>| i.item);\n        self.find_to(&mut m);\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        self.query.query_str.is_empty()\n            && self\n                .query\n                .tags\n                .iter()\n                // Only run for jlpt and sentence tags\n                .filter(|i| i.is_jlpt() || i.is_sentence_tag())\n                .any(|i| i.is_producer())\n    }\n}\n"
  },
  {
    "path": "lib/search/src/sentence/result.rs",
    "content": "use jp_utils::furi::{segment::SegmentRef, seq::FuriSequence};\nuse types::jotoba::language::{param::AsLangParam, Language};\n\nuse crate::executor::out_builder::OutputAddable;\n\n/// Additional result data for a sentence search\n#[derive(Clone, Copy, Default, Debug)]\npub struct ResData {\n    pub hidden: bool,\n}\n\nimpl ResData {\n    pub fn new(hidden: bool) -> Self {\n        Self { hidden }\n    }\n}\n\nimpl OutputAddable for ResData {}\n\n/// A displayable sentence\n#[derive(Clone, Debug)]\npub struct Sentence {\n    pub id: u32,\n    pub content: &'static str,\n    pub furigana: &'static str,\n    pub translation: &'static str,\n    pub language: Language,\n    pub eng: Option<String>,\n}\n\nimpl Sentence {\n    #[inline]\n    pub fn furigana_pairs<'a>(&'a self) -> Vec<SegmentRef<'a>> {\n        // Can unwrap here since we check and fix all sentences at preprocessing.\n        FuriSequence::parse_ref(self.furigana).unwrap().into_parts()\n    }\n\n    #[inline]\n    pub fn get_english(&self) -> Option<&str> {\n        self.eng.as_deref()\n    }\n\n    #[inline]\n    pub fn from_m_sentence(\n        s: &'static types::jotoba::sentences::Sentence,\n        lang: impl AsLangParam,\n    ) -> Option<Self> {\n        let translation = s.get_translation(lang)?;\n\n        Some(Self {\n            id: s.id,\n            translation,\n            content: &s.japanese,\n            furigana: &s.furigana,\n            eng: None,\n            language: lang.as_lang().language(),\n        })\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/filter.rs",
    "content": "use crate::query::Query;\nuse jp_utils::JapaneseExt;\nuse std::borrow::Borrow;\nuse types::jotoba::words::Word;\n\npub struct WordFilter {\n    query: Query,\n    jlpt_lvl: Option<u8>,\n}\n\nimpl WordFilter {\n    pub fn new(query: Query) -> Self {\n        let jlpt_lvl = query.tags.iter().find_map(|i| i.as_jlpt());\n        Self { query, jlpt_lvl }\n    }\n\n    /// Returns `true` for all words the query has a filter for aka if the word should be filtered out of the results\n    #[inline]\n    pub fn filter_word<W: Borrow<Word>>(&self, word: W) -> bool {\n        #[inline]\n        fn inner(wf: &WordFilter, word: &Word) -> Option<()> {\n            wf.by_misc_tags(word)?;\n            wf.by_language(word)?;\n            wf.by_pos_tags(word)?;\n            wf.by_jlpt(word)?;\n            wf.by_katakana_tag(word)?;\n\n            wf.by_quot_marks(word)?;\n\n            Some(())\n        }\n\n        inner(self, word.borrow()).is_none()\n    }\n\n    #[inline]\n    fn by_language(&self, w: &Word) -> Option<()> {\n        w.has_language(self.query.lang_param()).then(|| ())\n    }\n\n    #[inline]\n    fn by_katakana_tag(&self, w: &Word) -> Option<()> {\n        let has_tag = self.query.has_tag(crate::query::Tag::Katakana);\n        (!has_tag || w.get_reading_str().is_katakana()).then(|| ())\n    }\n\n    #[inline]\n    fn by_jlpt(&self, w: &Word) -> Option<()> {\n        // Ignore if not set\n        if self.jlpt_lvl.is_none() {\n            return Some(());\n        }\n\n        (w.get_jlpt_lvl() == self.jlpt_lvl).then(|| ())\n    }\n\n    #[inline]\n    fn by_pos_tags(&self, w: &Word) -> Option<()> {\n        w.has_all_pos_iter(self.query.get_part_of_speech_tags())\n            .then(|| ())\n    }\n\n    #[inline]\n    fn by_misc_tags(&self, w: &Word) -> Option<()> {\n        self.query\n            .get_misc_tags()\n            .all(|mt| w.has_misc(mt))\n            .then(|| ())\n    }\n\n    fn by_quot_marks(&self, w: &Word) -> Option<()> {\n        if self.query.must_contain.is_empty() {\n            return Some(());\n        }\n\n        let (jp_q_terms, mut fn_q_terms): (Vec<_>, Vec<_>) = self\n            .query\n            .must_contain\n            .iter()\n            .partition(|i| i.is_japanese());\n\n        if !fn_q_terms.is_empty() {\n            for i in w.gloss_iter_by_lang(self.query.lang_param()) {\n                let i = i.to_lowercase();\n                fn_q_terms.retain(|k| !i.contains(k.as_str()));\n                if fn_q_terms.is_empty() {\n                    break;\n                }\n            }\n        }\n\n        if !jp_q_terms.is_empty() {\n            for term in jp_q_terms {\n                self.by_quot_marks_jp(w, &term)?;\n            }\n        }\n\n        // Success if all quted terms were removed\n        fn_q_terms.is_empty().then(|| ())\n    }\n\n    #[inline]\n    fn by_quot_marks_jp(&self, w: &Word, q_term: &str) -> Option<()> {\n        if q_term.is_kana() {\n            if !w.get_kana().contains(q_term) {\n                return None;\n            }\n        } else if !w.reading_iter(false).any(|i| i.reading.contains(q_term)) {\n            return None;\n        }\n\n        Some(())\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/kanji.rs",
    "content": "use itertools::Itertools;\nuse jp_utils::{alphabet::Alphabet, tokenize::words_with_alphabet};\nuse types::jotoba::{kanji::Kanji, words::Word};\n\n/// Retrieves all (up to 10) kanji for words in correct order without duplicates\npub fn load_word_kanji_info(words: &[Word]) -> Vec<Kanji> {\n    let kanji_resources = resources::get().kanji();\n    words\n        .iter()\n        .filter_map(|i| {\n            let kanji = &i.reading.kanji.as_ref()?.reading;\n            Some(words_with_alphabet(&kanji, Alphabet::Kanji))\n        })\n        .flatten()\n        .map(|i| i.chars().collect::<Vec<_>>())\n        .flatten()\n        .filter_map(|i| kanji_resources.by_literal(i).cloned())\n        .unique_by(|i| i.literal)\n        .take(10)\n        .collect()\n}\n"
  },
  {
    "path": "lib/search/src/word/mod.rs",
    "content": "pub mod filter;\npub mod kanji;\npub mod order;\npub mod producer;\npub mod result;\n\nuse crate::{\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::Query,\n};\nuse types::jotoba::words::Word;\n\nuse filter::WordFilter;\nuse producer::{\n    foreign::{romaji::RomajiProducer, ForeignProducer},\n    japanese::{number::NumberProducer, sentence_reader::SReaderProducer, NativeProducer},\n    k_reading::KReadingProducer,\n    regex::RegexProducer,\n    sequence::SeqProducer,\n    tag::TagProducer,\n};\n\n/// Word search\npub struct Search<'a> {\n    query: &'a Query,\n    producer: Vec<Box<dyn Producer<Target = Self> + 'a>>,\n    filter: WordFilter,\n}\n\nimpl<'a> Search<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        let producer: Vec<Box<dyn Producer<Target = Self>>> = vec![\n            Box::new(KReadingProducer::new(query)),\n            Box::new(TagProducer::new(query)),\n            Box::new(SeqProducer::new(query)),\n            Box::new(RegexProducer::new(query)),\n            Box::new(SReaderProducer::new(query)),\n            Box::new(NativeProducer::new(query)),\n            Box::new(ForeignProducer::new(query)),\n            Box::new(RomajiProducer::new(query)),\n            Box::new(NumberProducer::new(query)),\n        ];\n\n        let filter = WordFilter::new(query.clone());\n        Self {\n            query,\n            producer,\n            filter,\n        }\n    }\n}\n\nimpl<'a> Searchable for Search<'a> {\n    type Item = &'static Word;\n    type OutItem = Word;\n    type ResAdd = result::AddResData;\n\n    fn get_producer<'s>(&'s self) -> &Vec<Box<dyn Producer<Target = Self> + 's>> {\n        &self.producer\n    }\n\n    fn get_query(&self) -> &Query {\n        self.query\n    }\n\n    fn mod_output(&self, out: &mut OutputBuilder<Self::Item, Self::ResAdd>) {\n        if out.output_add.raw_query.is_empty() {\n            out.output_add.raw_query = self.query.raw_query.clone();\n        }\n    }\n\n    #[inline]\n    fn to_output_item(&self, item: Self::Item) -> Self::OutItem {\n        let mut item = item.to_owned();\n        item.adjust_language(self.query.lang_param());\n        item\n    }\n\n    #[inline]\n    fn filter(&self, word: &Self::Item) -> bool {\n        self.filter.filter_word(*word)\n    }\n\n    #[inline]\n    fn max_top_dist(&self) -> Option<f32> {\n        if !max_top_dist_filter(&self.query) {\n            return None;\n        }\n        //Some(2.0)\n        None\n    }\n}\n\n#[inline]\nfn max_top_dist_filter(query: &Query) -> bool {\n    !query.is_regex() && query.form.is_normal()\n}\n"
  },
  {
    "path": "lib/search/src/word/order/foreign.rs",
    "content": "use super::REMOVE_PARENTHESES;\nuse engine::relevance::{data::SortData, RelevanceEngine};\nuse indexes::ng_freq::{term_dist, NgFreqIndex};\nuse sparse_vec::{SpVec32, VecExt};\nuse types::jotoba::{\n    language::{LangParam, Language},\n    words::Word,\n};\nuse vsm::doc_vec::DocVector;\n\npub struct ForeignOrder {\n    query_vec_lang: SpVec32,\n    query_vec_en: Option<SpVec32>,\n\n    lang: Language,\n}\n\nimpl ForeignOrder {\n    #[inline]\n    pub fn new() -> Self {\n        Self {\n            query_vec_lang: SpVec32::default(),\n            query_vec_en: None,\n            lang: Language::English,\n        }\n    }\n\n    #[inline]\n    fn get_query_vec(&self, lang: Language) -> &SpVec32 {\n        if lang == self.lang {\n            &self.query_vec_lang\n        } else if lang == Language::English {\n            // If `lang` is english and not the users lang, `query_vec_en` is always set\n            self.query_vec_en.as_ref().unwrap()\n        } else {\n            // There are only search tasks for users language or english. So the query vector has\n            // to be `query_vec_lang` in case `lang` is the users language, or `query_vec_en` if\n            // the language is english. If there are other search requests, this code must be\n            // adjusted\n            log::error!(\"Unreachable\");\n            unreachable!()\n        }\n    }\n\n    #[inline]\n    fn text_sim(&self, word: &Word, lang: Language) -> f32 {\n        let dist = |i: &str| -> f32 {\n            let fmt = REMOVE_PARENTHESES.replace_all(i, \"\").trim().to_lowercase();\n            if fmt.is_empty() {\n                return 0.0;\n            }\n            let vec = build_vec(get_ng_index(lang), &fmt);\n            term_dist(self.get_query_vec(lang), &vec)\n        };\n\n        word.gloss_iter_by_lang(LangParam::new(lang))\n            .map(|i| dist(i))\n            .chain(\n                self.query_vec_en\n                    .iter()\n                    .map(|_| word.gloss_iter_by_lang(Language::English).map(|i| dist(i)))\n                    .flatten(),\n            )\n            .max_by(|a, b| a.total_cmp(&b))\n            .unwrap_or(0.0)\n    }\n}\n\nimpl RelevanceEngine for ForeignOrder {\n    type OutItem = &'static Word;\n    type IndexItem = DocVector<u32>;\n    type Query = SpVec32;\n\n    #[inline]\n    fn score<'item, 'query>(\n        &self,\n        item: &SortData<'item, 'query, Self::OutItem, Self::IndexItem, Self::Query>,\n    ) -> f32 {\n        let word = item.item();\n\n        let lang = item.language().unwrap_or(Language::English);\n        let text_sim = self.text_sim(word, lang);\n\n        let mut rel_add = 0.0;\n        if text_sim >= 0.5 {\n            let index_item = item.index_item().vec();\n            let gloss_sim = item.query().scalar(index_item);\n            rel_add += gloss_sim * 100.0;\n        }\n\n        (rel_add + text_sim) / 2.0\n    }\n\n    fn init(&mut self, init: engine::relevance::RelEngineInit) {\n        let lang = init.language.unwrap();\n\n        let query = init.query.to_lowercase();\n        self.query_vec_lang = build_vec(get_ng_index(lang), &query);\n\n        if lang != Language::English {\n            self.query_vec_en = Some(build_vec(get_ng_index(Language::English), &query));\n        }\n\n        self.lang = lang;\n    }\n}\n\n#[inline]\nfn get_ng_index(lang: Language) -> &'static NgFreqIndex {\n    indexes::get().word().foreign(lang).unwrap().ng_index()\n}\n\n#[inline]\npub fn build_vec(index: &NgFreqIndex, term: &str) -> SpVec32 {\n    index.build_custom_vec(term, |_freq, _tot| 1.0)\n}\n"
  },
  {
    "path": "lib/search/src/word/order/kanji_reading.rs",
    "content": "use engine::relevance::RelevanceEngine;\nuse types::jotoba::words::Word;\n\npub struct KanjiReadingRelevance;\n\nimpl RelevanceEngine for KanjiReadingRelevance {\n    type OutItem = &'static Word;\n    type IndexItem = u32;\n    type Query = String;\n\n    #[inline]\n    fn score<'item, 'query>(\n        &self,\n        item: &engine::relevance::data::SortData<\n            'item,\n            'query,\n            Self::OutItem,\n            Self::IndexItem,\n            Self::Query,\n        >,\n    ) -> f32 {\n        let word = item.item();\n        let mut score: f32 = 0.0;\n\n        if word.is_common() {\n            score += 100.0;\n        }\n\n        if let Some(jlpt) = word.get_jlpt_lvl() {\n            score += jlpt as f32 * 10.0;\n        }\n\n        if score == 0.0 {\n            // Show shorter words on top if they aren't important\n            let reading_len = word.reading.get_reading().reading.chars().count();\n            //score = 100usize.saturating_sub(reading_len * 2);\n            score = (0f32).max(100.0 - reading_len as f32 * 2.0);\n        } else {\n            score += 100.0;\n        }\n\n        score\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/order/mod.rs",
    "content": "pub mod foreign;\npub mod kanji_reading;\npub mod native;\npub mod regex;\n\nuse once_cell::sync::Lazy;\n\n/// A Regex matching parentheses and its contents\npub(crate) static REMOVE_PARENTHESES: Lazy<::regex::Regex> =\n    Lazy::new(|| ::regex::Regex::new(\"\\\\(.*\\\\)\").unwrap());\n"
  },
  {
    "path": "lib/search/src/word/order/native.rs",
    "content": "use engine::relevance::{data::SortData, RelevanceEngine};\nuse indexes::ng_freq::{term_dist, NgFreqIndex};\nuse japanese::ToKanaExt;\nuse jp_utils::JapaneseExt;\nuse ngindex::{item::IndexItem, termset::TermSet};\nuse sparse_vec::{SpVec32, VecExt};\nuse types::jotoba::words::Word;\n\npub struct NativeOrder {\n    orig_query: String,\n    orig_query_ts: Option<TermSet>,\n\n    query_hw: String,\n\n    /// Word index in sentence reader\n    w_index: Option<usize>,\n\n    query_vec: SpVec32,\n}\n\nimpl NativeOrder {\n    #[inline]\n    pub fn new(orig_query: String) -> Self {\n        Self {\n            orig_query,\n            orig_query_ts: None,\n            w_index: None,\n            query_vec: SpVec32::empty(),\n            query_hw: String::new(),\n        }\n    }\n\n    /// Set a custom sentence reader word index\n    pub fn with_w_index(mut self, index: usize) -> Self {\n        self.w_index = Some(index);\n        self\n    }\n\n    pub fn with_oquery_ts(mut self, ts: TermSet) -> Self {\n        self.orig_query_ts = Some(ts);\n        self\n    }\n\n    #[inline]\n    fn exceeded_threshold<'i, 'q, A, B, C>(item: &SortData<'i, 'q, A, B, C>, score: f32) -> bool {\n        item.threshold().map(|th| score < th).unwrap_or(false)\n    }\n\n    #[inline]\n    fn text_sim(&self, word: &Word) -> f32 {\n        word.reading_iter(true)\n            .map(|i| self.reading_sim(&i.reading.to_halfwidth().to_hiragana()))\n            .max_by(|a, b| a.total_cmp(b))\n            .unwrap_or(0.0)\n    }\n\n    #[inline]\n    fn reading_sim(&self, reading: &str) -> f32 {\n        let vec = build_ng_vec(reading);\n        term_dist(&vec, &self.query_vec)\n    }\n}\n\nimpl RelevanceEngine for NativeOrder {\n    type OutItem = &'static Word;\n    type IndexItem = IndexItem<u32>;\n    type Query = TermSet;\n\n    fn score<'item, 'query>(\n        &self,\n        item: &SortData<'item, 'query, Self::OutItem, Self::IndexItem, Self::Query>,\n    ) -> f32 {\n        let word = item.item();\n        let mut score = item.index_item().dice(item.query());\n\n        // If alternative reading matches query exactly\n        if Self::exceeded_threshold(item, score) {\n            return 0.0;\n        }\n\n        score *= self.text_sim(word);\n\n        if let Some(ref o_ts) = self.orig_query_ts {\n            if self.w_index.unwrap_or(0) == 0 {\n                let new = item.index_item().dice(o_ts);\n                if new > score {\n                    score = new;\n                } else {\n                    score *= 0.7;\n                }\n            }\n        }\n\n        if Self::exceeded_threshold(item, score) {\n            return 0.0;\n        }\n\n        let kana = word.reading.kana.reading.to_halfwidth().to_hiragana();\n\n        // Words with query as substring have more relevance\n        // スイス: スイス人 > スパイス\n        if !kana.contains(&self.query_hw) {\n            //score *= 0.8;\n        }\n\n        if Self::exceeded_threshold(item, score) {\n            return 0.0;\n        }\n\n        if kana != self.orig_query && word.get_reading().reading.to_halfwidth() != self.orig_query {\n            score *= 0.7;\n        }\n\n        if Self::exceeded_threshold(item, score) {\n            return 0.0;\n        }\n\n        if word.jlpt_lvl.is_none() {\n            score *= 0.999;\n        }\n\n        // Is common\n        if !word.is_common() {\n            score *= 0.999;\n        }\n\n        //let reading_len = utils::real_string_len(&reading);\n        /* if reading_len == 1 && reading.is_kanji() {\n            let kanji = reading.chars().next().unwrap();\n            let norm = indexes::get()\n                .kanji()\n                .reading_freq()\n                .norm_reading_freq(kanji, word.get_kana());\n            if let Some(_read_freq) = norm {\n                //score += read_freq;\n            }\n        } */\n\n        score\n    }\n\n    fn init(&mut self, init: engine::relevance::RelEngineInit) {\n        self.query_vec = build_ng_vec(&init.query.to_halfwidth().to_hiragana());\n        self.query_hw = init.query.to_halfwidth().to_hiragana();\n    }\n}\n\n#[inline]\nfn ng_freq_index() -> &'static NgFreqIndex {\n    indexes::get().word().native().tf_index()\n}\n\n#[inline]\nfn build_ng_vec(term: &str) -> SpVec32 {\n    ng_freq_index().build_custom_vec(term, |freq, tot| (tot / freq).log2())\n}\n"
  },
  {
    "path": "lib/search/src/word/order/regex.rs",
    "content": "use crate::query::regex::RegexSQuery;\nuse types::jotoba::words::Word;\nuse utils::real_string_len;\n\n/// Order for regex-search results\npub fn regex_order(word: &Word, found_in: &str, _query: &RegexSQuery) -> usize {\n    let mut score: usize = 100;\n\n    if !word\n        .reading\n        .alternative\n        .iter()\n        .any(|i| i.reading == found_in)\n    {\n        score += 20;\n    }\n\n    if word.is_common() {\n        score += 30;\n    }\n\n    if let Some(jlpt) = word.get_jlpt_lvl() {\n        score += 10 + (jlpt * 2) as usize;\n    }\n\n    // Show shorter words more on top\n    score = score.saturating_sub(real_string_len(&word.get_reading().reading) * 3);\n\n    score\n}\n"
  },
  {
    "path": "lib/search/src/word/producer/foreign/mod.rs",
    "content": "pub mod romaji;\npub mod task;\n\nuse crate::{\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::{Query, QueryLang},\n    word::Search,\n};\nuse engine::pushable::FilteredMaxCounter;\nuse task::ForeignSearch;\nuse types::jotoba::language::Language;\n\n/// Producer for words by foreign query\npub struct ForeignProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> ForeignProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n}\n\nimpl<'a> Producer for ForeignProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        // convert WordOutput -> Word\n        //let mut p_mod = PushMod::new(out, |i: RelItem<WordOutput>| i.map_item(|i| i.word));\n\n        let q_str = &self.query.query_str;\n        let lang = self.query.get_search_lang();\n\n        ForeignSearch::new(self.query, q_str, lang)\n            .task()\n            .find_to(out);\n\n        // Add english results\n        if lang != Language::English && self.query.show_english() {\n            ForeignSearch::new(self.query, q_str, Language::English)\n                .task()\n                .find_to(out);\n        }\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        let q_str = &self.query.query_str;\n        let lang = self.query.get_search_lang();\n\n        ForeignSearch::new(self.query, q_str, lang)\n            .task()\n            .estimate_to(out);\n\n        // Add english results\n        if lang != Language::English && self.query.show_english() {\n            ForeignSearch::new(self.query, q_str, Language::English)\n                .task()\n                .estimate_to(out);\n        }\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        self.query.q_lang == QueryLang::Foreign && !self.query.query_str.is_empty()\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/producer/foreign/romaji.rs",
    "content": "use japanese::guessing::could_be_romaji;\n\nuse crate::{\n    engine::words::native::Engine,\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::{Query, QueryLang},\n    word::{producer::japanese::task::NativeSearch, Search},\n};\nuse engine::{pushable::FilteredMaxCounter, task::SearchTask};\n\npub struct RomajiProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> RomajiProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    fn hira_query(&self) -> String {\n        japanese::to_hira_fmt(&self.query.query_str)\n    }\n\n    fn kk_query(&self) -> String {\n        japanese::to_kk_fmt(&self.query.query_str)\n    }\n\n    fn kk_task(&self) -> SearchTask<'static, Engine> {\n        let hira_query_str = self.kk_query();\n        NativeSearch::new(self.query, &hira_query_str).task()\n    }\n\n    fn hira_task(&self) -> SearchTask<'static, Engine> {\n        let hira_query_str = self.hira_query();\n        NativeSearch::new(self.query, &hira_query_str)\n            .with_custom_original_query(&hira_query_str)\n            .task()\n    }\n}\n\nimpl<'a> Producer for RomajiProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        self.hira_task().find_to(out);\n        self.kk_task().find_to(out);\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        self.hira_task().estimate_to(out);\n        self.kk_task().estimate_to(out);\n    }\n\n    fn should_run(&self, already_found: usize) -> bool {\n        already_found < 100\n            // Don't run on jp input\n            && self.query.q_lang == QueryLang::Foreign\n            && could_be_romaji(&self.query.query_str)\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/producer/foreign/task.rs",
    "content": "use engine::task::SearchTask;\nuse types::jotoba::language::Language;\n\nuse crate::{\n    engine::words::foreign::Engine,\n    query::Query,\n    word::{filter::WordFilter, order::foreign::ForeignOrder},\n};\n\n/// Helper for creating SearchTask for foreign queries\npub struct ForeignSearch<'a> {\n    query: &'a Query,\n    query_str: &'a str,\n    language: Language,\n}\n\nimpl<'a> ForeignSearch<'a> {\n    pub(crate) fn new(query: &'a Query, query_str: &'a str, language: Language) -> Self {\n        Self {\n            query,\n            query_str,\n            language,\n        }\n    }\n\n    pub fn task(&self) -> SearchTask<'static, Engine> {\n        let filter = WordFilter::new(self.query.clone());\n        SearchTask::with_language(self.query_str, self.language)\n            .with_custom_order(ForeignOrder::new())\n            .with_result_filter(move |item| !filter.filter_word(*item))\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/producer/japanese/mod.rs",
    "content": "pub mod number;\npub mod sentence_reader;\npub mod task;\n\nuse crate::{\n    engine::words::native::Engine,\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::{Query, QueryLang},\n    word::Search,\n};\n\nuse engine::{pushable::FilteredMaxCounter, task::SearchTask};\nuse task::NativeSearch;\n\n/// Produces search results for native search input\npub struct NativeProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> NativeProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    fn task(&self) -> SearchTask<'static, Engine> {\n        NativeSearch::new(self.query, &self.query.query_str).task()\n    }\n}\n\nimpl<'a> Producer for NativeProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        self.task().find_to(out);\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        self.task().estimate_to(out)\n    }\n\n    fn should_run(&self, already_found: usize) -> bool {\n        if self.query.q_lang != QueryLang::Japanese\n            || self.query.query_str.is_empty()\n            || self.query.form.is_kanji_reading()\n        {\n            return false;\n        }\n\n        already_found < 5\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/producer/japanese/number.rs",
    "content": "use crate::{\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::Query,\n    word::Search,\n};\n\nuse engine::pushable::FilteredMaxCounter;\nuse japanese_number_parser::JapaneseNumberFormatter;\nuse jp_utils::JapaneseExt;\nuse log::debug;\n\n/// Produces a number if the query is a Japanese number\npub struct NumberProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> NumberProducer<'a> {\n    #[inline]\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n}\n\nimpl<'a> Producer for NumberProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        let query = &self.query.query_str;\n        if let Some(number) = JapaneseNumberFormatter::new().format(&query) {\n            debug!(\"Found number: {number:?}\");\n            out.output_add.number = Some(number);\n        }\n    }\n\n    fn estimate_to(&self, _out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {}\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        let query_str = &self.query.query_str;\n\n        !query_str.is_empty()\n        // Don't parse if query is a regular number\n            && query_str\n                .to_halfwidth()\n                .parse::<usize>()\n                .is_err()\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/producer/japanese/sentence_reader.rs",
    "content": "use engine::{\n    pushable::FilteredMaxCounter,\n    relevance::{data::SortData, RelevanceEngine},\n    task::SearchTask,\n};\nuse jp_utils::{\n    furi::segment::{AsSegment, SegmentRef},\n    JapaneseExt,\n};\nuse ngindex::{item::IndexItem, termset::TermSet};\nuse sentence_reader::{output::ParseResult, Parser, Part, Sentence};\nuse types::jotoba::words::{part_of_speech::PosSimple, Word};\n\nuse crate::{\n    engine::{names, words::native::Engine},\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::{Query, QueryLang},\n    word::{\n        order::native::NativeOrder,\n        result::{InflectionInformation, SentenceInfo},\n        Search,\n    },\n};\n\nuse super::task::NativeSearch;\n\n/// Producer for sentence reader and inflection information\npub struct SReaderProducer<'a> {\n    query: &'a Query,\n    parsed: ParseResult,\n}\n\nimpl<'a> SReaderProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        let parsed = Parser::new(&query.query_str).parse();\n        Self { query, parsed }\n    }\n\n    /// Search task for inflected word\n    fn infl_task(&self) -> Option<SearchTask<'static, Engine>> {\n        let infl = self.parsed.as_inflected_word()?;\n\n        let normalized = infl.get_normalized();\n\n        let original_query = <Engine as engine::Engine>::make_query(&self.query.query_str, None)?;\n\n        let search = NativeSearch::new(self.query, &normalized);\n        let o_query = search.original_query().to_string();\n        let order = NativeOrder::new(o_query).with_oquery_ts(original_query);\n        Some(search.task().with_custom_order(order))\n    }\n\n    /// Selected word index within the sentence\n    #[inline]\n    fn sentence_index(&self) -> usize {\n        self.parsed\n            .as_sentence()\n            .map(|s| self.query.word_index.clamp(0, s.word_count() - 1))\n            .unwrap_or(0)\n    }\n\n    /// Selected word in the sentence\n    #[inline]\n    fn sentence_word(&self) -> Option<&Part> {\n        let sentence = self.parsed.as_sentence()?;\n        let index = self.sentence_index();\n        sentence.get_at(index)\n    }\n\n    /// Normalized search task for sentences\n    fn snt_task_normalized(&self) -> Option<SearchTask<'static, Engine>> {\n        let word = self.sentence_word().unwrap();\n\n        let inflected = word.get_inflected();\n        let normalized = word.get_normalized();\n\n        let search = NativeSearch::new(self.query, &normalized);\n\n        let order = NativeOrder::new(inflected).with_w_index(self.sentence_index());\n\n        Some(search.task().with_custom_order(order))\n    }\n\n    /// Inflected search task for an inflected word in a sentence\n    fn snt_task_infl(&self) -> Option<SearchTask<'static, Engine>> {\n        let word = self.sentence_word().unwrap();\n        let inflected = word.get_inflected();\n        let search = NativeSearch::new(self.query, &inflected);\n        let o_query = search.original_query().to_string();\n        let order = NativeOrder::new(o_query).with_w_index(self.sentence_index());\n        Some(search.task().with_custom_order(order))\n    }\n}\n\nimpl<'a> Producer for SReaderProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        if let ParseResult::InflectedWord(infl) = &self.parsed {\n            self.infl_task().unwrap().find_to(out);\n            out.output_add.inflection = InflectionInformation::from_part(infl);\n            return;\n        }\n\n        if let ParseResult::Sentence(mut sentence) = self.parsed.clone() {\n            set_furigana(&mut sentence);\n\n            self.snt_task_normalized().unwrap().find_to(out);\n\n            let word = self.sentence_word().unwrap();\n            if word.get_inflected() != word.get_normalized() {\n                self.snt_task_infl().unwrap().find_to(out);\n            }\n\n            out.output_add.inflection = InflectionInformation::from_part(word);\n            out.output_add.raw_query = word.get_inflected();\n            out.output_add.sentence = Some(SentenceInfo {\n                parts: Some(sentence.clone()),\n                index: self.query.word_index,\n                query: word.get_normalized(),\n            });\n        }\n    }\n\n    fn should_run(&self, already_found: usize) -> bool {\n        if self.parsed.is_none()\n            || self.query.q_lang != QueryLang::Japanese\n            || !self.query.form.is_normal()\n            || self.query.query_str.is_empty()\n        {\n            return false;\n        }\n\n        // Always run inlfections\n        if self.parsed.is_inflected_word() {\n            return true;\n        }\n\n        // Disable sentence reader if already found some words\n        if already_found > 0 {\n            return false;\n        }\n\n        let term_in_db = word_exists(&self.query.query_str);\n        // For sentences only run if the query is not a term in the db\n        !term_in_db\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        if let Some(mut infl) = self.infl_task() {\n            infl.estimate_to(out);\n            return;\n        }\n\n        if self.parsed.is_sentence() {\n            self.snt_task_normalized().unwrap().estimate_to(out);\n            let word = self.sentence_word().unwrap();\n            if word.get_inflected() != word.get_normalized() {\n                self.snt_task_infl().unwrap().estimate_to(out);\n            }\n        }\n    }\n}\n\n/// Returns `true` if the word exists in all words\nfn word_exists(term: &str) -> bool {\n    let task = SearchTask::<Engine>::new(term).with_limit(1);\n\n    let query = term.to_string();\n    let mut task = task.with_item_filter(move |i| {\n        resources::get()\n            .words()\n            .by_sequence(*i.item())\n            .unwrap()\n            .has_reading(&query)\n    });\n\n    let res = task.find();\n    res.len() > 0\n}\n\n/// Generates furigana for a sentence\nfn set_furigana(s: &mut Sentence) {\n    for part in s.iter_mut() {\n        let p = part.clone();\n        part.set_furigana(|inp| furigana_by_reading(inp, &p))\n    }\n}\n\n/// Returns furigana of the given `morpheme` if available\nfn furigana_by_reading(morpheme: &str, part: &sentence_reader::Part) -> Option<String> {\n    word_furi(morpheme, part).or_else(|| name_furi(morpheme))\n}\n\nfn name_furi(morpheme: &str) -> Option<String> {\n    let morpheme_c = morpheme.to_string();\n\n    let mut task = SearchTask::<names::native::Engine>::new(morpheme)\n        .with_limit(1)\n        .with_result_filter(move |n| n.get_reading() == morpheme_c && n.has_kanji());\n\n    let res = task.find();\n\n    if res.total_items != 1 {\n        return None;\n    }\n\n    let name = res.get(0).unwrap().item;\n    let kanji = name.kanji.as_ref().unwrap();\n    Some(SegmentRef::new_kanji(&kanji, &[&name.kana]).encode())\n}\n\nfn word_furi(morpheme: &str, part: &sentence_reader::Part) -> Option<String> {\n    let word_storage = resources::get().words();\n\n    let pos = sentence_reader::part::wc_to_simple_pos(&part.word_class_raw());\n    let morph = morpheme.to_string();\n\n    let mut st = SearchTask::<Engine>::new(morpheme)\n        .with_limit(10)\n        .with_custom_order(WordFuriOrder::new(pos, morpheme.to_string()))\n        .with_result_filter(move |i| i.has_reading(&morph));\n\n    st.find().get(0).and_then(|word| {\n        word_storage\n            .by_sequence(word.item.sequence)\n            .and_then(|i| i.furigana.clone())\n    })\n}\n\nstruct WordFuriOrder {\n    pos: Option<PosSimple>,\n    morph: String,\n}\n\nimpl WordFuriOrder {\n    #[inline]\n    fn new(pos: Option<PosSimple>, morph: String) -> Self {\n        Self { pos, morph }\n    }\n}\n\nimpl RelevanceEngine for WordFuriOrder {\n    type OutItem = &'static Word;\n    type IndexItem = IndexItem<u32>;\n    type Query = TermSet;\n\n    fn score<'item, 'query>(\n        &self,\n        item: &SortData<'item, 'query, Self::OutItem, Self::IndexItem, Self::Query>,\n    ) -> f32 {\n        let mut score = 0.0;\n\n        let i = item.item();\n        let reading = &i.get_reading().reading;\n        let reading_len = utils::real_string_len(reading);\n\n        if reading == &self.morph {\n            score += 100.0;\n        }\n\n        if reading_len == 1 && reading.is_kanji() {\n            let kanji = reading.chars().next().unwrap();\n            let kana = i.get_kana();\n            let norm = indexes::get()\n                .kanji()\n                .reading_freq()\n                .norm_reading_freq(kanji, kana);\n            if let Some(norm) = norm {\n                score += norm * 10.0;\n            }\n        }\n\n        if let Some(ref pos) = self.pos {\n            if i.has_pos(&[*pos]) {\n                score += 20.0;\n            } else {\n                //score = score.saturating_sub(30);\n                score = (score - 30.0).max(0.0);\n            }\n        }\n\n        if i.is_common() {\n            score += 2.0;\n        }\n\n        if i.get_jlpt_lvl().is_some() {\n            score += 2.0;\n        }\n\n        score\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/producer/japanese/task.rs",
    "content": "use engine::task::SearchTask;\nuse jp_utils::JapaneseExt;\n\nuse crate::{\n    engine::words::native::Engine,\n    query::Query,\n    word::{filter::WordFilter, order::native::NativeOrder},\n};\n\n/// Helper for creating SearchTask for foreign queries\npub struct NativeSearch<'a> {\n    query: &'a Query,\n    query_str: &'a str,\n    cust_original: Option<&'a str>,\n    threshold: f32,\n}\n\nimpl<'a> NativeSearch<'a> {\n    #[inline]\n    pub(crate) fn new(query: &'a Query, query_str: &'a str) -> Self {\n        // Kanji queries are shorter so we need a lower threshold to not filter too many different words for short queries\n        let kana_count: usize = query_str.chars().filter(|i| i.is_kana()).count();\n        let kanji_count: usize = query_str.chars().filter(|i| i.is_kanji()).count();\n        let kanji_query = kanji_count >= (kana_count * 2);\n        let threshold = if kanji_query || (kanji_count + kana_count < 5) {\n            0.15\n        } else {\n            0.3\n        };\n\n        Self {\n            query,\n            query_str,\n            cust_original: None,\n            threshold,\n        }\n    }\n\n    pub fn with_custom_original_query(mut self, query: &'a str) -> Self {\n        self.cust_original = Some(query);\n        self\n    }\n\n    pub fn with_threshold(mut self, threshold: f32) -> Self {\n        self.threshold = threshold;\n        self\n    }\n\n    pub fn task(&self) -> SearchTask<'static, Engine> {\n        let filter = WordFilter::new(self.query.clone());\n        let original_query = self.original_query().to_string();\n\n        SearchTask::new(self.query_str)\n            .with_custom_order(NativeOrder::new(original_query))\n            .with_result_filter(move |item| !filter.filter_word(*item))\n            .with_threshold(self.threshold)\n    }\n\n    #[inline]\n    pub fn original_query(&self) -> &str {\n        self.cust_original\n            .as_ref()\n            .unwrap_or(&self.query.raw_query.as_str())\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/producer/k_reading.rs",
    "content": "use engine::{\n    pushable::FilteredMaxCounter,\n    pushable::{PushMod, Pushable},\n    relevance::item::RelItem,\n    task::SearchTask,\n};\nuse types::jotoba::{kanji::Kanji, words::Word};\n\nuse crate::{\n    engine::words::native::k_reading,\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::Query,\n    word::{order::kanji_reading::KanjiReadingRelevance, Search},\n};\n\n/// Kanji reading search producer\npub struct KReadingProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> KReadingProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    /// Returns the kanji from the search. Returns `None` if kanji does not exist or doesn't\n    /// match the reading from the search\n    fn get_kanji(&self) -> Option<&'static Kanji> {\n        let reading = self.query.form.as_kanji_reading()?;\n        let kanji_storage = resources::get().kanji();\n\n        let kanji = kanji_storage.by_literal(reading.literal)?;\n        kanji.has_reading(&reading.reading).then(|| kanji)\n    }\n\n    /// Returns a query for the kanji reading index for the search query\n    fn kr_query(&self) -> Option<String> {\n        let kanji = self.get_kanji()?;\n        let reading = self.query.form.as_kanji_reading().unwrap();\n        Some(format!(\"{}{}\", kanji.literal, reading.reading))\n    }\n\n    fn find_to<P>(&self, out: &mut P)\n    where\n        P: Pushable<Item = RelItem<&'static Word>>,\n    {\n        let engine_query = match self.kr_query() {\n            Some(q) => q,\n            None => return,\n        };\n\n        SearchTask::<k_reading::Engine>::new(&engine_query)\n            .with_custom_order(KanjiReadingRelevance)\n            .find_to(out);\n    }\n}\n\nimpl<'a> Producer for KReadingProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        self.find_to(out);\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        self.query.form.is_kanji_reading()\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        let mut m = PushMod::new(out, |i: RelItem<&Word>| i.item);\n        // TODO: use estimate_to here\n        self.find_to(&mut m);\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/producer/mod.rs",
    "content": "pub mod foreign;\npub mod japanese;\npub mod k_reading;\npub mod regex;\npub mod sequence;\npub mod tag;\n"
  },
  {
    "path": "lib/search/src/word/producer/regex.rs",
    "content": "use itertools::Itertools;\nuse types::jotoba::words::Word;\n\nuse crate::{\n    engine::words::native::regex,\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::{regex::RegexSQuery, Query},\n    word::{order::regex::regex_order, Search},\n};\nuse engine::{\n    pushable::FilteredMaxCounter,\n    pushable::{PushMod, Pushable},\n    relevance::item::RelItem,\n};\n\npub struct RegexProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> RegexProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    fn find_to_unsorted<P: Pushable<Item = RelItem<&'static Word>>>(\n        &self,\n        out: &mut P,\n    ) -> Option<()> {\n        let regex_query = self.query.as_regex_query()?;\n        search(&regex_query, |_, _| 0, out);\n        Some(())\n    }\n\n    fn find_to<P: Pushable<Item = RelItem<&'static Word>>>(&self, out: &mut P) -> Option<()> {\n        let regex_query = self.query.as_regex_query()?;\n        search(&regex_query, |w, r| regex_order(w, r, &regex_query), out);\n        Some(())\n    }\n}\n\nimpl<'a> Producer for RegexProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        self.find_to(out);\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        self.query.as_regex_query().is_some()\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        let mut mid = PushMod::new(out, |i: RelItem<&'static Word>| i.item);\n        self.find_to_unsorted(&mut mid);\n    }\n}\n\npub fn search<'a, F, P>(query: &'a RegexSQuery, sort: F, out: &mut P)\nwhere\n    F: Fn(&'a Word, &'a str) -> usize,\n    P: Pushable<Item = RelItem<&'static Word>>,\n{\n    let word_resources = resources::get().words();\n\n    let index = indexes::get().word().regex();\n    let possible_results = regex::find_words(index, &query.get_chars());\n\n    for seq_id in possible_results.into_iter().sorted() {\n        let word = word_resources.by_sequence(seq_id).unwrap();\n\n        let item_iter = word\n            .reading_iter(true)\n            .filter_map(|i| query.matches(&i.reading).then(|| (word, &i.reading)))\n            .map(|(word, reading)| {\n                let order = sort(word, reading) as f32;\n                RelItem::new(word, order)\n            });\n\n        for i in item_iter {\n            out.push(i);\n        }\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/producer/sequence.rs",
    "content": "use crate::{\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::Query,\n    word::Search,\n};\nuse engine::{pushable::FilteredMaxCounter, pushable::Pushable, relevance::item::RelItem};\nuse types::jotoba::words::Word;\n\n/// Producer for a Word by its sequence id\npub struct SeqProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> SeqProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    // Find the word\n    pub fn word(&self) -> Option<&'static Word> {\n        let seq = *self.query.form.as_sequence()?;\n        resources::get().words().by_sequence(seq)\n    }\n}\n\nimpl<'a> Producer for SeqProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        if let Some(word) = self.word() {\n            out.push(RelItem::new(word, 0.0));\n        }\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        self.query.form.is_sequence()\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        if let Some(word) = self.word() {\n            out.push(word);\n        }\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/producer/tag.rs",
    "content": "use types::jotoba::words::Word;\n\nuse crate::{\n    executor::{out_builder::OutputBuilder, producer::Producer, searchable::Searchable},\n    query::{Query, Tag},\n    word::Search,\n};\nuse engine::{\n    pushable::FilteredMaxCounter,\n    pushable::{PushMod, Pushable},\n    relevance::item::RelItem,\n};\n\npub struct TagProducer<'a> {\n    query: &'a Query,\n}\n\nimpl<'a> TagProducer<'a> {\n    pub fn new(query: &'a Query) -> Self {\n        Self { query }\n    }\n\n    fn get_producer_tag(&self) -> Option<&Tag> {\n        self.query\n            .tags\n            .iter()\n            .filter(|i| i.is_producer() && !i.is_sentence_tag())\n            // Use tag with fewest items that it'll produce to reduce the amount of items that have to be filtered\n            .map(|i| (self.tag_len(i).unwrap_or(usize::MAX), i))\n            .min_by_key(|i| i.0)\n            .map(|i| i.1)\n    }\n\n    fn find_to<P>(&self, out: &mut P)\n    where\n        P: Pushable<Item = RelItem<&'static Word>>,\n    {\n        // Find first producer tag. All other tags are treated as filter\n        let producer_tag = self.get_producer_tag().unwrap();\n        self.find_words(out, producer_tag);\n    }\n\n    fn find_words<P>(&self, out: &mut P, tag: &Tag)\n    where\n        P: Pushable<Item = RelItem<&'static Word>>,\n    {\n        let words = resources::get().words();\n        match tag {\n            Tag::PartOfSpeech(pos) => self.push_iter(words.by_pos_simple(*pos), out),\n            Tag::Misc(m) => self.push_iter(words.by_misc(*m), out),\n            Tag::Jlpt(jlpt) => self.push_iter(words.by_jlpt(*jlpt), out),\n            Tag::Katakana => self.push_iter(words.katakana(), out),\n            Tag::IrregularIruEru => self.push_iter(words.irregular_ichidan(), out),\n            _ => (),\n        }\n    }\n\n    fn push_iter<P, I>(&self, iter: I, out: &mut P)\n    where\n        P: Pushable<Item = RelItem<&'static Word>>,\n        I: Iterator<Item = &'static Word> + DoubleEndedIterator,\n    {\n        let mut c = 0;\n        for w in iter.rev() {\n            let item = RelItem::new(w, (1000 - c) as f32);\n            if out.push(item) {\n                c += 1;\n                if c >= 1000 {\n                    break;\n                }\n            }\n        }\n    }\n\n    /// Returns the amount of words a given tag has assigned/indexed\n    #[inline]\n    fn tag_len(&self, tag: &Tag) -> Option<usize> {\n        let w_retr = resources::get().words();\n        match tag {\n            Tag::PartOfSpeech(p) => w_retr.pos_simple_len(p),\n            Tag::Misc(m) => w_retr.misc_len(m),\n            Tag::Jlpt(j) => w_retr.jlpt_len(*j),\n            Tag::IrregularIruEru => Some(w_retr.irregular_ichidan_len()),\n            Tag::Katakana => Some(w_retr.katakana_len()),\n            _ => None,\n        }\n    }\n}\n\nimpl<'a> Producer for TagProducer<'a> {\n    type Target = Search<'a>;\n\n    fn produce(\n        &self,\n        out: &mut OutputBuilder<\n            <Self::Target as Searchable>::Item,\n            <Self::Target as Searchable>::ResAdd,\n        >,\n    ) {\n        self.find_to(out);\n    }\n\n    fn should_run(&self, _already_found: usize) -> bool {\n        // Only run this producer if there is no query (except tags) and there are tags which can produce output\n        self.query.query_str.is_empty() && self.get_producer_tag().is_some()\n    }\n\n    fn estimate_to(&self, out: &mut FilteredMaxCounter<<Self::Target as Searchable>::Item>) {\n        let mut mid = PushMod::new(out, |i: RelItem<&Word>| i.item);\n        self.find_to(&mut mid);\n    }\n}\n"
  },
  {
    "path": "lib/search/src/word/result.rs",
    "content": "use types::jotoba::words::inflection::Inflection;\n\nuse crate::executor::out_builder::OutputAddable;\n\n#[derive(Default, Clone, Debug)]\npub struct AddResData {\n    pub sentence: Option<SentenceInfo>,\n    pub inflection: Option<InflectionInformation>,\n    pub raw_query: String,\n    pub number: Option<String>,\n}\n\nimpl OutputAddable for AddResData {\n    #[inline]\n    fn is_empty(&self) -> bool {\n        self.sentence.is_none() && self.inflection.is_none()\n    }\n}\n\n#[derive(Default, Clone, Debug)]\npub struct SentenceInfo {\n    pub parts: Option<sentence_reader::Sentence>,\n    pub index: usize,\n    pub query: String,\n}\n\n#[derive(Debug, Clone, PartialEq)]\npub struct InflectionInformation {\n    /// Normalized form of the word\n    pub lexeme: String,\n    /// All inflections\n    pub inflections: Vec<Inflection>,\n}\n\nimpl AddResData {\n    pub fn has_sentence(&self) -> bool {\n        self.sentence.is_some()\n    }\n\n    pub fn has_inflection(&self) -> bool {\n        self.inflection.is_some()\n    }\n\n    pub fn sentence_parts(&self) -> Option<&sentence_reader::Sentence> {\n        self.sentence.as_ref().and_then(|i| i.parts.as_ref())\n    }\n\n    pub fn sentence_index(&self) -> usize {\n        self.sentence.as_ref().map(|i| i.index).unwrap_or(0)\n    }\n}\n\nimpl InflectionInformation {\n    pub fn from_part(part: &sentence_reader::Part) -> Option<Self> {\n        if !part.has_inflections() {\n            return None;\n        }\n\n        Some(InflectionInformation {\n            lexeme: part.get_normalized(),\n            inflections: part.inflections().to_vec(),\n        })\n    }\n}\n\npub fn selected(curr: usize, selected: usize) -> &'static str {\n    if curr == selected {\n        \"selected\"\n    } else {\n        \"\"\n    }\n}\n"
  },
  {
    "path": "lib/search/tests/search_test.rs",
    "content": "use jp_utils::JapaneseExt;\nuse search::{\n    executor::search_result::SearchResult,\n    query::{parser::QueryParser, Query, UserSettings},\n    word::{kanji::load_word_kanji_info, result::AddResData},\n    SearchExecutor,\n};\nuse test_case::test_case;\nuse types::jotoba::{\n    language::Language,\n    search::SearchTarget,\n    words::{inflection::Inflection, part_of_speech::PosSimple, Word},\n};\n\nfn search(query: &Query) -> SearchResult<Word, AddResData> {\n    let search = search::word::Search::new(query);\n    SearchExecutor::new(search).run()\n}\n\n/// ----------- Inflections --------------- ///\n\n#[test_case(\"知らなかった\",&[Inflection::Past, Inflection::Negative])]\n#[test_case(\"わかりたい\",&[Inflection::Tai])]\n#[test_case(\"わかりたくない\",&[Inflection::Tai, Inflection::Negative])]\n#[test_case(\"わかりたくなかった\",&[Inflection::Tai, Inflection::Negative, Inflection::Past])]\n#[test_case(\"覚えてる\",&[Inflection::TeIru])]\n#[test_case(\"覚えてない\",&[Inflection::TeIru, Inflection::Negative])]\n#[test_case(\"覚えてなかった\",&[Inflection::TeIru, Inflection::Negative, Inflection::Past])]\n#[test_case(\"書いておく\",&[Inflection::TeOku])]\nfn inflections(query_str: &str, exp_infl: &[Inflection]) {\n    wait();\n    let query = parse_query(query_str, Language::English, SearchTarget::Words);\n    let res = search(&query);\n    assert!(res.inflection.is_some());\n    let infl_info = res.inflection.as_ref().unwrap();\n    assert!(utils::same_elements(&infl_info.inflections, exp_infl));\n}\n\n///\n/// ----------- Sentence reader --------------- ///\n\n#[test_case(\"日本語勉強したい\", &[\"日本語\",\"勉強\",\"したい\"])]\n#[test_case(\"音楽が聞きたい\", &[\"音楽\",\"が\",\"聞きたい\"])]\nfn sentence_reader_test(query_str: &str, exp_parts: &[&str]) {\n    wait();\n    //\n    let query = parse_query(query_str, Language::English, SearchTarget::Words);\n    let res = search(&query);\n    let sentence = res.sentence.clone();\n    assert!(sentence.is_some());\n    let sentence = sentence.unwrap();\n    let mut exp_iter = exp_parts.iter();\n    for part in sentence.parts.unwrap().iter() {\n        let exp = exp_iter.next().expect(\"Expected parts to short\");\n        assert_eq!(&part.get_inflected(), exp);\n    }\n}\n\n///\n/// ----------- Kanji (right) --------------- ///\n\n// called in 'word_search'\n#[test_case(\"音楽\")]\n#[test_case(\"買う\")]\n#[test_case(\"宇宙\")]\n#[test_case(\"宇宙人\")]\n#[test_case(\"覚える\")]\nfn correct_kanji_shown(query_str: &str) {\n    wait();\n    let query = make_query(query_str, Language::English);\n    let res = search(&query);\n\n    let mut exp_kanji: Vec<char> = Vec::new();\n    for word in &res.items {\n        for kanji in word\n            .get_reading()\n            .reading\n            .chars()\n            .filter(|i| i.is_kanji() && !i.is_roman_letter())\n        {\n            if !exp_kanji.contains(&kanji) {\n                exp_kanji.push(kanji);\n            }\n        }\n    }\n\n    let kanji = load_word_kanji_info(&res.items);\n    for (pos, kanji) in kanji.into_iter().enumerate() {\n        assert_eq!(exp_kanji[pos], kanji.literal);\n    }\n}\n\n/// ----------- Simple word search ------------- ///\n\n#[test_case(\"musik\", Language::German, \"音楽\")]\n#[test_case(\"音楽\", Language::German, \"音楽\")]\n#[test_case(\"バラバラ\", Language::German, \"バラバラ\")]\n#[test_case(\"ドイツ\", Language::German, \"ドイツ\")]\n#[test_case(\"ドイツ人\", Language::German, \"ドイツ人\")]\n#[test_case(\"to sleep\", Language::English, \"寝る\")]\n#[test_case(\"買う\", Language::English, \"買う\")]\n#[test_case(\"know\", Language::German, \"知る\"; \"Find in english too\")]\n#[test_case(\"remember\", Language::German, \"覚える\"; \"Find in english too 2\")]\n#[test_case(\"think\", Language::German, \"思う\"; \"Find in english too 3\")]\n#[test_case(\"especially\", Language::German, \"特に\"; \"Find in english too 4\")]\n// Regex\n#[test_case(\"宇宙*行士\", Language::German, \"宇宙飛行士\"; \"Regex 1\")]\n#[test_case(\"宇*\", Language::German, \"宇宙\"; \"Regex 2\")]\n#[test_case(\"宇宙*行士\", Language::English, \"宇宙飛行士\"; \"Regex 3\")]\n#[test_case(\"宇*\", Language::English, \"宇宙\"; \"Regex 4\")]\nfn word_search(query_str: &str, language: Language, first_res: &str) {\n    wait();\n\n    let query = parse_query(query_str, language, SearchTarget::Words);\n    let res = search(&query);\n    let word = match res.items.get(0) {\n        Some(n) => n,\n        None => return,\n    };\n\n    if !word.has_reading(first_res) {\n        panic!(\"Expected {query_str:?} ({language}) to return {first_res:?} as first result (but was: {:?})\", word.get_reading().reading);\n    }\n}\n\n/// ------------- Part of speech filter ----------- ///\n\n#[test_case(\"音楽 #adjective\", &[PosSimple::Adjective], &[\"音楽的\", \"標題音楽\", \"電子音楽\"]; \"Test single tag\")]\n#[test_case(\"speak #verb\", &[PosSimple::Verb], &[\"話す\",\"話せる\"]; \"Test foreign inp\")]\n#[test_case(\"speak #noun\", &[PosSimple::Noun], &[\"言葉\"]; \"Test unlikely\")]\nfn pos_tag_test(query_str: &str, exp_pos: &[PosSimple], exp_res: &[&str]) {\n    wait();\n\n    let query = parse_query(query_str, Language::English, SearchTarget::Words);\n    let res = search(&query);\n    let have_tag = res\n        .items\n        .iter()\n        .all(|i| exp_pos.iter().all(|j| i.has_pos(&[*j])));\n    assert!(have_tag);\n    assert!(exp_res\n        .iter()\n        .all(|j| res.items.iter().any(|w| w.has_reading(j))));\n}\n\n/// ----------- JP search Relevance ----------- ///\n\n#[test]\nfn test_jp_search() {\n    wait();\n\n    // Expect most important word on top\n    for word in resources::get().words().iter().step_by(317) {\n        let reading = &word.get_reading().reading;\n        word_search(reading, Language::Swedish, reading);\n    }\n}\n\n// ------------ Romaji search ---------------- ///\n\n#[test_case(\"kore\",&[\"これ\"])]\n#[test_case(\"tokasu\", &[\"溶かす\"])]\n#[test_case(\"kisuu\", &[\"奇数\"])]\n#[test_case(\"daijoubu\", &[\"大丈夫\"])]\n#[test_case(\"jikan\", &[\"時間\"])]\n#[test_case(\"kono\", &[\"この\"])]\n#[test_case(\"kanjiru\", &[\"感じる\"])]\n#[test_case(\"ongaku\", &[\"音楽\"])]\n#[test_case(\"kimi\", &[\"君\"])]\n#[test_case(\"jitensha\", &[\"自転車\"])]\n#[test_case(\"kiku\", &[\"聞く\"])]\n#[test_case(\"suki\", &[\"好き\"])]\nfn test_romaji(query_str: &str, expected: &[&str]) {\n    wait();\n\n    let res = search(&make_query(query_str, Language::English));\n    for exp in expected.iter() {\n        if !res.iter().take(3).any(|i| i.has_reading(exp)) {\n            panic!(\"Expected {:?} to find {exp:?} (Romaji search)\", query_str);\n        }\n    }\n}\n\nfn make_query(query_str: &str, language: Language) -> Query {\n    Query {\n        query_str: query_str.to_string(),\n        settings: UserSettings {\n            user_lang: language,\n            ..UserSettings::default()\n        },\n        ..Query::default()\n    }\n}\n\nfn parse_query(query_str: &str, language: Language, q_type: SearchTarget) -> Query {\n    let mut settings = UserSettings::default();\n    settings.user_lang = language;\n    QueryParser::new(query_str.to_string(), q_type, settings)\n        .parse()\n        .expect(\"Invaild query passed\")\n}\n\nfn load_data() {\n    if resources::is_loaded() || indexes::storage::is_loaded() {\n        return;\n    }\n    rayon::scope(|s| {\n        s.spawn(|_| {\n            resources::load(\"../../resources/storage_data\").unwrap();\n        });\n        s.spawn(|_| {\n            indexes::storage::load(\"../../resources/indexes\").unwrap();\n        });\n        s.spawn(|_| {\n            sentence_reader::load_parser(\"../../resources/unidic-mecab\");\n        })\n    });\n}\n\nfn wait() {\n    if !resources::is_loaded() && !indexes::storage::is_loaded() && !sentence_reader::is_loaded() {\n        load_data();\n        return;\n    }\n    indexes::storage::wait();\n    resources::wait();\n    sentence_reader::wait();\n}\n"
  },
  {
    "path": "lib/sentence_reader/Cargo.toml",
    "content": "[package]\nname = \"sentence_reader\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\nigo-unidic = { git = \"https://github.com/JojiiOfficial/igo-unidic\" }\nonce_cell = { version = \"1.18.0\", default-features = false }\nlocalization = { path = \"../localization\", optional = true }\njapanese = { path = \"../japanese\" }\ntypes = { path = \"../types\" }\njp_utils = { git = \"https://github.com/JojiiOfficial/jp_utils\"}\n\n[features]\ndefault = []\n\n# This feature adds stuff required for Jotoba to work but not necessarily for extern crates, so its made optional\njotoba_intern = [\"localization\"]\n"
  },
  {
    "path": "lib/sentence_reader/src/analyzer.rs",
    "content": "use crate::grammar::{rule::Rule, rule_set::RuleSet, Analyzer};\nuse once_cell::sync::Lazy;\n\nstatic RULES: Lazy<Analyzer> = Lazy::new(|| Analyzer::new(get_rules()));\n\n/// Returns a grammar analyzer with a japanese inflection ruleset\npub(crate) fn get_grammar_analyzer() -> &'static Analyzer {\n    &RULES\n}\n\n/// Returns a set of rules for japanese text analyzing\nfn get_rules() -> RuleSet {\n    // Often used dest rules\n    let end = &[];\n    let te_ending = &[\n        \"て\",\n        \"てる\",\n        \"ます\",\n        \"しまう\",\n        \"ない\",\n        \"た\",\n        \"てみる\",\n        \"いる\",\n        \"ある\",\n        \"おく\",\n    ];\n    let ru_ending = &[\n        \"て\",\n        \"てる\",\n        \"ます\",\n        \"しまう\",\n        \"ない\",\n        \"た\",\n        \"たり\",\n        \"ちゃう\",\n        \"とく\",\n        \"たい\",\n        \"られる\",\n        \"れる\",\n        \"ば\",\n    ];\n    // \\ Often used dest rules\n    //\n\n    let mut rules = Vec::with_capacity(20);\n\n    // い rule\n    rules.push(Rule::new(\"た\", end));\n    rules.push(Rule::new(\"たり\", end));\n    rules.push(Rule::new(\"ない\", &[\"て\", \"た\"]));\n    rules.push(Rule::new(\"たい\", &[\"て\", \"ない\", \"た\"]));\n\n    // じゃない\n    rules.push(Rule::new(\"じゃ\", &[\"ない\"]));\n\n    // て\n    rules.push(Rule::new(\"て\", te_ending));\n    rules.push(Rule::new(\"てみる\", ru_ending));\n    rules.push(Rule::new(\"しまう\", ru_ending));\n    rules.push(Rule::new(\"おく\", ru_ending));\n    rules.push(Rule::new(\"てる\", ru_ending));\n\n    // いる/ある\n    rules.push(Rule::new(\"いる\", ru_ending));\n    rules.push(Rule::new(\"ある\", ru_ending));\n\n    // Masu\n    rules.push(Rule::new(\"ます\", &[\"た\", \"ん\"]));\n    rules.push(Rule::new(\"ん\", &[\"です\"]));\n    rules.push(Rule::new(\"です\", &[\"た\"]));\n\n    // passive / 可能形\n    rules.push(Rule::new(\"られる\", ru_ending));\n    rules.push(Rule::new(\"れる\", ru_ending));\n\n    // ちゃう / しまう\n    rules.push(Rule::new(\"ちゃう\", ru_ending));\n    rules.push(Rule::new(\"しまう\", ru_ending));\n\n    // とく\n    rules.push(Rule::new(\"とく\", ru_ending));\n\n    // ば conditional\n    rules.push(Rule::new(\"ば\", end));\n\n    // される causative\n    rules.push(Rule::new(\"さ\", &[\"せる\", \"れる\"]));\n    rules.push(Rule::new(\"せる\", ru_ending));\n    rules.push(Rule::new(\"させる\", ru_ending));\n\n    // Exceptions\n    rules.push(Rule::new(\"いただき\", &[\"ます\"]));\n\n    // ぬ\n    //rules.push(Rule::new(\"V\", &[\"ます\"]));\n    rules.push(Rule::new(\"ん\", end));\n\n    // だった\n    rules.push(Rule::new(\"た\", &[\"た\"]));\n\n    // Generation/Root\n    rules.push(Rule::new(\n        \"V\",\n        &[\n            \"た\",\n            \"たり\",\n            \"ない\",\n            \"たい\",\n            \"て\",\n            \"てる\",\n            \"てみる\",\n            \"いる\",\n            \"ある\",\n            \"ます\",\n            \"られる\",\n            \"れる\",\n            \"ちゃう\",\n            \"しまう\",\n            \"とく\",\n            \"ば\",\n            \"せる\",\n            \"させる\",\n            // the さ of される\n            \"さ\",\n            // ぬ\n            \"ん\",\n        ],\n    ));\n\n    rules.push(Rule::new(\"AD\", &[\"ない\", \"た\", \"て\"]));\n    rules.push(Rule::new(\"NR\", &[\"NR\"]));\n\n    // generate ruleset\n    RuleSet::new(&rules)\n}\n"
  },
  {
    "path": "lib/sentence_reader/src/grammar/mod.rs",
    "content": "#![allow(dead_code)]\n\nuse self::{\n    rule::{Rule, ToRule},\n    rule_set::RuleSet,\n};\n\npub mod rule;\npub mod rule_set;\n\n/// A Grammar analyzer\n#[derive(Clone)]\npub struct Analyzer {\n    rules: RuleSet,\n}\n\nimpl Analyzer {\n    /// Creates a new Grammar analyzer\n    pub fn new(rules: RuleSet) -> Self {\n        Self { rules }\n    }\n\n    /// Checks if `inp` can be built with the given ruleset. Returns the index of the last rule\n    /// that was matching. In other words if the return value is equal to `inp.len()`, all input\n    /// rules were matching\n    pub fn check<T: ToRule>(&self, inp: &[T]) -> usize {\n        if inp.is_empty() {\n            return 0;\n        }\n\n        let mut pos = 0;\n\n        let mut last_rule = match self.resolve_to_rule(&inp[0]) {\n            Some(r) => r,\n            None => return pos,\n        };\n\n        pos += 1;\n\n        for part in &inp[pos..] {\n            let rule = match self.resolve_to_rule(part) {\n                Some(r) => r,\n                None => return pos,\n            };\n\n            if !last_rule.has_dst(rule.name()) {\n                return pos;\n            }\n\n            last_rule = rule;\n            pos += 1;\n        }\n\n        pos\n    }\n\n    /// Returns `true` if the analyzer has a given rule\n    #[inline]\n    pub fn has_rule(&self, rule: &str) -> bool {\n        self.rules.get_rule(rule).is_some()\n    }\n\n    /// Checks if a series of Rules can be built with the current set of Rules\n    #[inline]\n    pub fn check_full<T: ToRule>(&self, inp: &[T]) -> bool {\n        self.check(inp) == inp.len()\n    }\n\n    /// resolves a rule from `ToRule` to `&Rule`\n    #[inline]\n    fn resolve_to_rule<T: ToRule>(&self, tr: T) -> Option<&Rule> {\n        tr.to_rule().and_then(|i| self.rules.get_rule(i))\n    }\n\n    /// Get a reference to the analyzer's rules.\n    #[inline]\n    pub fn rules(&self) -> &RuleSet {\n        &self.rules\n    }\n}\n"
  },
  {
    "path": "lib/sentence_reader/src/grammar/rule.rs",
    "content": "use super::rule_set::ALL_WILDCARD;\n\n/// Represents a single rule describing a possible production\n/// of a grammar\n#[derive(Clone, Copy)]\npub struct Rule {\n    name: &'static str,\n    rhs: &'static [&'static str],\n}\n\nimpl Rule {\n    /// Creates a new rule\n    pub fn new(name: &'static str, rhs: &'static [&'static str]) -> Self {\n        Self { name, rhs }\n    }\n\n    /// Get the rule's name.\n    #[inline]\n    pub fn name(&self) -> &'static str {\n        self.name\n    }\n\n    /// Get the rule's destination rules\n    #[inline]\n    pub fn rhs(&self) -> &'static [&'static str] {\n        self.rhs\n    }\n\n    /// Returns `true` if the rule has a dst rule with `name`\n    #[inline]\n    pub fn has_dst(&self, name: &str) -> bool {\n        self.rhs.iter().any(|i| *i == name || *i == ALL_WILDCARD)\n    }\n}\n\npub trait ToRule {\n    fn to_rule(&self) -> Option<&str>;\n}\n\nimpl ToRule for &'static str {\n    #[inline]\n    fn to_rule(&self) -> Option<&str> {\n        Some(self)\n    }\n}\n\nimpl<T: ToRule> ToRule for &T {\n    #[inline]\n    fn to_rule(&self) -> Option<&str> {\n        (*self).to_rule()\n    }\n}\n"
  },
  {
    "path": "lib/sentence_reader/src/grammar/rule_set.rs",
    "content": "use super::rule::Rule;\nuse std::{collections::HashMap, fmt::Debug};\n\npub const ALL_WILDCARD: &str = \"*\";\n\n#[derive(Clone)]\npub struct RuleSet {\n    rules: HashMap<&'static str, Rule>,\n}\n\nimpl RuleSet {\n    /// Creates a new set of rules\n    pub fn new(rules: &[Rule]) -> Self {\n        let rules = rules\n            .iter()\n            .map(|i| (i.name(), *i))\n            .collect::<HashMap<_, _>>();\n        Self { rules }\n    }\n\n    /// Adds a Rule to the RuleSet\n    pub fn add(&mut self, rule: Rule) -> bool {\n        if self.has_rule(rule.name()) {\n            return false;\n        }\n\n        // add dummy rule to allow any dst rule\n        if rule.has_dst(ALL_WILDCARD) {\n            self.add_all_wildcard();\n        }\n\n        self.rules.insert(rule.name(), rule);\n        true\n    }\n\n    /// Returns `true` if ruleSet has a rule with `name`\n    pub fn has_rule(&self, name: &str) -> bool {\n        self.rules.contains_key(name)\n    }\n\n    /// Returns `true` if the RuleSet is complete\n    pub fn check(&self) -> bool {\n        // check that all used dst rules are reachable\n        for (_, rule) in self.rules.iter() {\n            for rhs in rule.rhs() {\n                if *rhs == ALL_WILDCARD {\n                    continue;\n                }\n                if !self.rules.contains_key(rhs) {\n                    return false;\n                }\n            }\n        }\n\n        true\n    }\n\n    /// Returns a rule with `name` or None when no such rule exists in RuleSet\n    #[inline]\n    pub fn get_rule(&self, name: &str) -> Option<&Rule> {\n        self.rules.get(name)\n    }\n\n    fn add_all_wildcard(&mut self) {\n        if self.has_rule(ALL_WILDCARD) {\n            return;\n        }\n\n        // add dummy rule that allows any production\n        self.rules\n            .insert(ALL_WILDCARD, Rule::new(ALL_WILDCARD, &[]));\n    }\n}\n\nimpl Debug for RuleSet {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        for (name, rule) in &self.rules {\n            let mut dst = String::new();\n            for (pos, d) in rule.rhs().iter().enumerate() {\n                if pos > 0 {\n                    dst.push_str(\" | \");\n                }\n                dst.push_str(*d);\n            }\n            if dst.is_empty() {\n                continue;\n            }\n            write!(f, \"{name} -> {dst}\\n\")?;\n        }\n\n        Ok(())\n    }\n}\n"
  },
  {
    "path": "lib/sentence_reader/src/lib.rs",
    "content": "mod analyzer;\nmod grammar;\npub mod output;\nmod sentence;\n\nuse std::path::Path;\n\nuse once_cell::sync::{Lazy, OnceCell};\nuse output::ParseResult;\nuse sentence::SentenceAnalyzer;\n\npub use igo_unidic;\n\npub use output::Sentence;\npub use sentence::part::{self, Part};\n\npub static JA_NL_PARSER: Lazy<OnceCell<igo_unidic::Parser>> = Lazy::new(|| OnceCell::new());\n\npub fn load_parser<P: AsRef<Path>>(path: P) {\n    let parser = igo_unidic::Parser::new(path.as_ref().to_str().unwrap()).unwrap();\n    JA_NL_PARSER.set(parser).ok();\n}\n\npub fn wait() {\n    JA_NL_PARSER.wait();\n}\n\npub fn is_loaded() -> bool {\n    JA_NL_PARSER.get().is_some()\n}\n\n/// Parser for sentence\npub struct Parser<'input> {\n    sentence_analyzer: SentenceAnalyzer<'input>,\n}\n\nimpl<'input> Parser<'input> {\n    /// Creates a new InputTextParser\n    pub fn new(original: &'input str) -> Self {\n        let sentence_analyzer = SentenceAnalyzer::new(\n            analyzer::get_grammar_analyzer(),\n            JA_NL_PARSER.get().unwrap().parse(original),\n        );\n\n        Self { sentence_analyzer }\n    }\n\n    /// Execute the parsing\n    pub fn parse(&self) -> ParseResult {\n        let mut sent_parse = self.sentence_analyzer.analyze::<Part>();\n\n        if sent_parse.is_empty() {\n            return ParseResult::None;\n        } else if sent_parse.len() == 1 {\n            let parsed = sent_parse.remove(0);\n            return parsed\n                .has_inflections()\n                .then(|| ParseResult::InflectedWord(parsed))\n                .unwrap_or(ParseResult::None);\n        }\n\n        ParseResult::Sentence(Sentence::new(sent_parse))\n    }\n}\n"
  },
  {
    "path": "lib/sentence_reader/src/output.rs",
    "content": "use crate::sentence::part::Part;\n\n/// Result of a sentence/inflection analysis\n#[derive(Debug, Clone)]\npub enum ParseResult {\n    Sentence(Sentence),\n    InflectedWord(Part),\n    None,\n}\n\nimpl ParseResult {\n    /// Returns `true` if the parse result is [`Sentence`].\n    ///\n    /// [`Sentence`]: ParseResult::Sentence\n    #[inline]\n    pub fn is_sentence(&self) -> bool {\n        matches!(self, Self::Sentence(..))\n    }\n\n    /// Returns `true` if the parse result is [`InflectedWord`].\n    ///\n    /// [`InflectedWord`]: ParseResult::InflectedWord\n    #[inline]\n    pub fn is_inflected_word(&self) -> bool {\n        matches!(self, Self::InflectedWord(..))\n    }\n\n    /// Returns `true` if the parse result is [`None`].\n    ///\n    /// [`None`]: ParseResult::None\n    #[inline]\n    pub fn is_none(&self) -> bool {\n        matches!(self, Self::None)\n    }\n\n    #[inline]\n    pub fn as_sentence(&self) -> Option<&Sentence> {\n        if let Self::Sentence(v) = self {\n            Some(v)\n        } else {\n            None\n        }\n    }\n\n    #[inline]\n    pub fn as_inflected_word(&self) -> Option<&Part> {\n        if let Self::InflectedWord(v) = self {\n            Some(v)\n        } else {\n            None\n        }\n    }\n}\n\n/// A split sentence\n#[derive(Debug, Clone, PartialEq)]\npub struct Sentence {\n    parts: Vec<Part>,\n}\n\nimpl Sentence {\n    #[inline]\n    pub fn new(parts: Vec<Part>) -> Self {\n        Self { parts }\n    }\n\n    /// Returns word at `pos`\n    #[inline]\n    pub fn get_at(&self, pos: usize) -> Option<&Part> {\n        self.parts.get(pos)\n    }\n\n    /// Returns word at `pos`\n    #[inline]\n    pub fn get_at_mut(&mut self, pos: usize) -> Option<&mut Part> {\n        self.parts.get_mut(pos)\n    }\n\n    #[inline]\n    pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut Part> {\n        self.parts.iter_mut()\n    }\n\n    #[inline]\n    pub fn iter(&self) -> impl Iterator<Item = &Part> {\n        self.parts.iter()\n    }\n\n    /// returns amount of words\n    #[inline]\n    pub fn word_count(&self) -> usize {\n        self.parts.len()\n    }\n\n    /// Returns all parts owned\n    #[inline]\n    pub fn into_parts(self) -> Vec<Part> {\n        self.parts\n    }\n}\n"
  },
  {
    "path": "lib/sentence_reader/src/sentence/inflection.rs",
    "content": "use super::FromMorphemes;\nuse crate::grammar::{rule::Rule, rule_set::RuleSet, Analyzer};\nuse crate::sentence::SentenceAnalyzer;\nuse igo_unidic::Morpheme;\nuse once_cell::sync::Lazy;\nuse types::jotoba::words::inflection::Inflection;\n\n/*\n#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub enum Inflection {\n    Negative,\n    Polite,\n    Present,\n    Past,\n    TeForm,\n    Potential,\n    Passive,\n    Causative,\n    CausativePassive,\n    PotentialOrPassive,\n    Imperative,\n    Tai,\n    TeIru,\n    TeAru,\n    TeMiru,\n    TeShimau,\n    Chau,\n    TeOku,\n    Toku,\n    Tara,\n    Tari,\n}\n*/\n\nimpl<'b> FromMorphemes<'static, 'b> for Inflection {\n    /// Parses an inflection from given morpheme(s)\n    fn from(parts: Vec<igo_unidic::Morpheme<'static, 'b>>, _pos: usize) -> Option<Self> {\n        let lexemes = parts.iter().map(|i| i.lexeme).collect::<Vec<_>>();\n\n        if lexemes.is_empty() {\n            None\n        } else if lexemes.len() == 1 {\n            if parts[0].surface == \"たら\" {\n                return Some(Self::Tara);\n            }\n\n            Some(match lexemes[0] {\n                \"ない\" | \"ぬ\" => Inflection::Negative,\n                \"ます\" => Inflection::Polite,\n                \"て\" | \"で\" => Inflection::TeForm,\n                \"だ\" | \"た\" => Inflection::Past,\n                \"れる\" => Inflection::Passive,\n                \"せる\" | \"させる\" => Inflection::Causative,\n                \"られる\" => Inflection::PotentialOrPassive,\n                \"たい\" => Inflection::Tai,\n                \"たり\" | \"だり\" => Inflection::Tari,\n                \"てる\" | \"でる\" => Inflection::TeIru,\n                \"とく\" | \"どく\" => Inflection::Toku,\n                \"ちゃう\" | \"じゃう\" => Inflection::Chau,\n                \"ば\" => Inflection::Ba,\n                _ => return None,\n            })\n        } else {\n            Some(match lexemes.as_slice() {\n                &[\"て\", \"いる\"] | &[\"で\", \"いる\"] => Inflection::TeIru,\n                &[\"て\", \"ある\"] | &[\"で\", \"ある\"] => Inflection::TeAru,\n                &[\"て\", \"みる\"] | &[\"で\", \"みる\"] => Inflection::TeMiru,\n                &[\"て\", \"しまう\"] | &[\"で\", \"しまう\"] => Inflection::TeShimau,\n                &[\"て\", \"おく\"] | &[\"で\", \"おく\"] => Inflection::TeOku,\n                &[\"さ\", \"せる\"] => Inflection::Causative,\n                // Fake する; The tokenizer tokenizes the さ of される as a form of する\n                &[\"する\", \"れる\"] => Inflection::CausativePassive,\n                _ => return None,\n            })\n        }\n    }\n}\n\npub(crate) fn parse_inflections(morph: &[Morpheme<'static, '_>]) -> Vec<Inflection> {\n    SentenceAnalyzer::new(&INFLECTION_RULES, morph.to_vec()).analyze::<Inflection>()\n}\n\nstatic INFLECTION_RULES: Lazy<Analyzer> = Lazy::new(|| Analyzer::new(get_rules()));\n\n/// Returns a set of rules for japanese text analyzing\nfn get_rules() -> RuleSet {\n    let mut rules = Vec::with_capacity(7);\n\n    rules.push(Rule::new(\"いる\", &[]));\n    rules.push(Rule::new(\"ある\", &[]));\n    rules.push(Rule::new(\"てみる\", &[]));\n    rules.push(Rule::new(\"しまう\", &[]));\n    rules.push(Rule::new(\"おく\", &[]));\n    rules.push(Rule::new(\"れる\", &[]));\n\n    rules.push(Rule::new(\n        \"て\",\n        &[\"いる\", \"ある\", \"てみる\", \"しまう\", \"おく\"],\n    ));\n    rules.push(Rule::new(\"さ\", &[\"れる\"]));\n\n    RuleSet::new(&rules)\n}\n"
  },
  {
    "path": "lib/sentence_reader/src/sentence/mod.rs",
    "content": "#![allow(dead_code)]\n\npub mod inflection;\npub mod owned_morpheme;\npub mod part;\n\nuse crate::grammar;\nuse igo_unidic::{Morpheme, WordClass};\n\npub trait FromMorphemes<'a, 'b>: Sized {\n    fn from(parts: Vec<Morpheme<'a, 'b>>, pos: usize) -> Option<Self>;\n}\n\nimpl<'b> FromMorphemes<'static, 'b> for (Vec<&'static str>, usize) {\n    #[inline]\n    fn from(parts: Vec<Morpheme<'static, 'b>>, pos: usize) -> Option<Self> {\n        let parts = parts.iter().map(|i| i.lexeme).collect::<Vec<_>>();\n        if parts.is_empty() {\n            return None;\n        }\n        Some((parts, pos))\n    }\n}\n\n/// An analyzer for sentences/text to portion morphemes together based on rules\npub struct SentenceAnalyzer<'input> {\n    grammar: &'input grammar::Analyzer,\n    morphemes: Vec<Morpheme<'static, 'input>>,\n}\n\nimpl<'input> SentenceAnalyzer<'input> {\n    /// Create a new SentenceAnalyer\n    pub fn new(\n        grammar: &'input grammar::Analyzer,\n        morphemes: Vec<Morpheme<'static, 'input>>,\n    ) -> Self {\n        Self { grammar, morphemes }\n    }\n\n    /// Returns `true` if SentenceAnalyer would yield no words\n    pub fn is_empty(&self) -> bool {\n        self.morphemes.is_empty()\n    }\n\n    /// Executes the analyzation and returns a set of Words which are built out of 1..n morphemes\n    pub fn analyze<O: FromMorphemes<'static, 'input>>(&self) -> Vec<O> {\n        let morphs = &self.morphemes;\n\n        let mut out = Vec::new();\n        let mut pos = 0;\n\n        loop {\n            let curr = match morphs.get(pos) {\n                Some(n) => n,\n                None => break,\n            };\n\n            // Collect rules of next n morphemes\n            let rules: Vec<_> = morphs[pos..]\n                .iter()\n                .enumerate()\n                .map(|(pos, m)| map_morph_to_rule(pos, m))\n                // if a morphemes does not have a rule, we can stop\n                // collecting all rules since the analyzer would stop\n                // at a `None` rule anyways\n                .take_while(|i| i.is_some())\n                .map(|i| i.unwrap())\n                .collect();\n\n            let n_matching = self.grammar.check(&rules);\n            let mut parts = (0..n_matching).map(|i| morphs[pos + i]).collect::<Vec<_>>();\n\n            if parts.is_empty() {\n                parts.push(*curr);\n                pos += 1;\n            }\n\n            pos += n_matching;\n\n            let word_position = out.len();\n            if let Some(word) = O::from(parts, word_position) {\n                out.push(word);\n            }\n        }\n\n        out\n    }\n\n    /// Returns the raw morphemes of the sentence\n    pub fn morphemes(&self) -> &Vec<Morpheme<'_, '_>> {\n        &self.morphemes\n    }\n\n    pub fn debug(&self) {\n        for i in self.morphemes.iter() {\n            println!(\"{}\\t({})({:?})\", i.surface, i.lexeme, i.word_class);\n        }\n\n        println!();\n\n        for i in self.analyze::<part::Part>() {\n            print!(\n                \"{}|\",\n                i.morphemes()\n                    .iter()\n                    .map(|i| i.surface.as_str())\n                    .collect::<String>()\n            );\n        }\n        println!();\n    }\n}\n\npub(crate) fn map_morph_to_rule(pos: usize, morph: &Morpheme<'_, '_>) -> Option<&'static str> {\n    if morph.surface == \"じゃ\" {\n        return Some(\"じゃ\");\n    }\n\n    if morph.lexeme == \"ない\" {\n        return Some(\"ない\");\n    }\n\n    if morph.lexeme == \"たい\" {\n        return Some(\"たい\");\n    }\n\n    if (morph.lexeme == \"た\" || morph.lexeme == \"だ\") && morph.surface != \"に\" {\n        return Some(\"た\");\n    }\n\n    if morph.lexeme == \"たり\" || morph.lexeme == \"だり\" {\n        return Some(\"たり\");\n    }\n\n    if morph.lexeme == \"てる\" || morph.lexeme == \"でる\" {\n        return Some(\"てる\");\n    }\n\n    if morph.lexeme == \"て\" || morph.lexeme == \"で\" {\n        return Some(\"て\");\n    }\n\n    if morph.lexeme == \"ある\" {\n        return Some(\"ある\");\n    }\n\n    if morph.lexeme == \"いる\" {\n        return Some(\"いる\");\n    }\n\n    if morph.lexeme == \"ます\" {\n        return Some(\"ます\");\n    }\n\n    if morph.lexeme == \"られる\" {\n        return Some(\"られる\");\n    }\n\n    if morph.lexeme == \"れる\" {\n        return Some(\"れる\");\n    }\n\n    if morph.lexeme == \"しまう\" {\n        return Some(\"しまう\");\n    }\n\n    if morph.lexeme == \"ちゃう\" || morph.lexeme == \"じゃう\" {\n        return Some(\"ちゃう\");\n    }\n\n    if morph.lexeme == \"おく\" {\n        return Some(\"おく\");\n    }\n\n    if morph.lexeme == \"とく\" || morph.lexeme == \"どく\" {\n        return Some(\"とく\");\n    }\n\n    if morph.lexeme == \"ば\" {\n        return Some(\"ば\");\n    }\n\n    if morph.lexeme == \"ぬ\" {\n        return Some(\"ん\");\n    }\n\n    if morph.lexeme == \"です\" {\n        return Some(\"です\");\n    }\n\n    if morph.surface == \"さ\" && morph.lexeme == \"する\" {\n        return Some(\"さ\");\n    }\n\n    if morph.lexeme == \"させる\" {\n        return Some(\"させる\");\n    }\n\n    if morph.lexeme == \"せる\" {\n        return Some(\"せる\");\n    }\n\n    if morph.lexeme == \"頂\" && morph.surface == \"頂\" && morph.reading == \"イタダキ\" {\n        return Some(\"いただき\");\n    }\n\n    // てみる form. Can only be applied if not pos==0. If pos == 0, the word 見る is being used\n    // which does not go with the みる rule\n    if (morph.surface == \"み\" || morph.lexeme == \"みる\") && pos > 0 {\n        return Some(\"てみる\");\n    }\n\n    if let WordClass::Noun(noun_type) = morph.word_class {\n        return Some(match noun_type {\n            igo_unidic::NounType::Numeral => \"NR\",\n            _ => \"N\",\n        });\n    }\n\n    if morph.word_class.is_adjective() {\n        return Some(\"AD\");\n    }\n\n    if morph.word_class.is_verb() {\n        return Some(\"V\");\n    }\n\n    None\n}\n\n/*\n * TODO: fix Parser not being static\n#[cfg(test)]\nmod test {\n    use crate::grammar::Analyzer;\n    use igo_unidic::Parser;\n\n    use super::*;\n\n    fn get_parser() -> Parser {\n        igo_unidic::Parser::new(\"../../unidic-mecab\").unwrap()\n    }\n\n    fn get_g_analyzer() -> &'static Analyzer {\n        crate::analyzer::get_grammar_analyzer()\n    }\n\n    #[test]\n    pub fn test_analyzer() {\n        let analyzer = get_g_analyzer();\n        assert!(analyzer.rules().check());\n        assert_eq!(analyzer.check(&[\"ない\", \"て\"]), 2);\n        assert_eq!(analyzer.check(&[\"ない\", \"abc\"]), 1);\n        assert_eq!(analyzer.check(&[\"い\", \"い\"]), 0);\n        assert_eq!(analyzer.check(&[\"V\", \"たい\", \"ない\"]), 3);\n    }\n\n    #[test]\n    pub fn test_single_words() {\n        let words = &[\n            \"見たくない\",\n            \"見る\",\n            \"見ます\",\n            \"見たい\",\n            \"見たくない\",\n            \"見たくなくて\",\n            \"見たくなかった\",\n            \"見て\",\n            \"見ている\",\n            \"見ています\",\n            \"見てある\",\n            \"見てあります\",\n            \"見ない\",\n            \"見なくて\",\n            \"見なかった\",\n            \"見ません\",\n            \"見ませんでした\",\n            \"見られる\",\n            \"見られて\",\n            \"見られている\",\n            \"見られない\",\n            \"見られなくて\",\n            \"見られなかった\",\n            \"見ちゃう\",\n            \"見てしまう\",\n            \"持っていない\",\n            \"美味しい\",\n            \"美味しかった\",\n            \"美味しくない\",\n            \"美味しくなくて\",\n            \"美味しくなかった\",\n            \"美味しくて\",\n            \"便利\",\n            \"じゃない\",\n            \"じゃなかった\",\n            \"じゃなくて\",\n            \"いちゃう\",\n            \"いてしまう\",\n            \"行ってしまう\",\n            \"行っちゃう\",\n        ];\n\n        let analyzer = get_g_analyzer();\n        let parser = get_parser();\n\n        for word in words {\n            let sentenec_parser = SentenceAnalyer::new(&analyzer, parser.parse(word));\n            let analyzed = sentenec_parser.analyze();\n            if analyzed.len() != 1 {\n                println!(\"{word}\");\n                panic!(\"Word split to much\");\n            }\n            if analyzed[0].get_inflected() != *word {\n                println!(\"{word} != {}\", analyzed[0].get_inflected());\n                panic!(\"word is not equal to surface\");\n            }\n        }\n    }\n\n    #[test]\n    pub fn test_long_texts() {\n        let analyzer = get_g_analyzer();\n        let parser = get_parser();\n\n        let inp = &[\n            \"１８日午後０時５５分頃、札幌市中央区の２２階建てホテルの１４階にある屋外スペースで、女優の神田沙也加さん（３５）が意識不明の状態で倒れているのが見つかり、約９時間後に搬送先の病院で死亡した\",\n            \"北海道警は、宿泊していた高層階の部屋の窓から転落した可能性があるとみている。ホテル関係者によると、窓は縦、横とも約１メートル。全開できないよう安全装置が取り付けられていたという\",\n            \"神田沙也加さん死亡、ホテル高層階の部屋から転落か…連絡つかず事務所が警察に通報\",\n            \"昨今「ウケる」は、面白いという意味で頻繁に使用されています。如何なる面白さにも用いることができ、「この芸人さん超ウケるよね」とか「この遊びウケる」、「この蛇の動き超ウケる」というように使われます。\",\n            \"しかし、「ウケる」の定義の幅が多いため、「君の顔ウケるよね～」なんて言うと、言った本人に悪気がなくても、言われた側は気に障ってしまうかもしれません。「ウケる」という言葉は便利ですが、時と場所、相手を選んで使うようにしましょう。\",\n            \"「ウケる」という単語には「超ウケる」というような表現も存在します。これは「超面白い」と同様に「ウケる」に「超」が付いただけのものでありますが、「ウケる」の意味を強調して、本当に面白いさまを表します。\",\n                \"また「大ウケ」という言葉もあります。こちらは主観的に用いられがちな「ウケる」と違い、客観的な評価を表してしばしば使用されます。たとえば「二次会で披露したギャグが大ウケだった」という場合、自身の芸に観客が大盛り上がりしたという意味になります。\",\n        ];\n\n        for i in inp {\n            let sentenec_parser = SentenceAnalyer::new(&analyzer, parser.parse(i));\n            let analyzed = sentenec_parser.analyze();\n            let mut out = String::new();\n            for a in analyzed {\n                out.push_str(&a.get_inflected());\n            }\n            assert_eq!(*i, out);\n        }\n    }\n}\n*/\n"
  },
  {
    "path": "lib/sentence_reader/src/sentence/owned_morpheme.rs",
    "content": "use igo_unidic::{Conjungation, Morpheme, WordClass};\n\n#[derive(Clone, Debug, PartialEq)]\npub struct OwnedMorpheme<'dict> {\n    pub surface: String,\n    pub basic: &'dict str,\n    pub word_class: WordClass<'dict>,\n    pub conjungation: Conjungation,\n    pub reading: &'dict str,\n    pub lexeme: &'dict str,\n    pub start: usize,\n}\n\nimpl<'dict> From<Morpheme<'dict, '_>> for OwnedMorpheme<'dict> {\n    #[inline]\n    fn from(m: Morpheme<'dict, '_>) -> Self {\n        Self {\n            surface: m.surface.to_string(),\n            basic: m.basic,\n            word_class: m.word_class,\n            conjungation: m.conjungation,\n            reading: m.reading,\n            lexeme: m.lexeme,\n            start: m.start,\n        }\n    }\n}\n\nimpl<'dict> OwnedMorpheme<'dict> {\n    /// Gets the main lexeme. Falls back on surface if lexeme is empty\n    pub fn reading(&self) -> &str {\n        if !self.lexeme.is_empty() {\n            self.lexeme\n        } else {\n            &self.surface\n        }\n    }\n}\n"
  },
  {
    "path": "lib/sentence_reader/src/sentence/part.rs",
    "content": "use super::{inflection, owned_morpheme::OwnedMorpheme, FromMorphemes};\nuse igo_unidic::{Morpheme, WordClass};\nuse jp_utils::{\n    furi::{\n        segment::{kanji::as_kanji::AsKanjiSegment, AsSegment},\n        Furigana,\n    },\n    JapaneseExt,\n};\nuse types::{\n    api::app::search::responses::words::SentencePart,\n    jotoba::words::{inflection::Inflection, part_of_speech::PosSimple},\n};\n\n/// A single word within a sentence. This already contains all inflection parts\n#[derive(Debug, Clone, PartialEq)]\npub struct Part {\n    /// All morphemes building the (inflected) word\n    morphemes: Vec<OwnedMorpheme<'static>>,\n    inflections: Vec<Inflection>,\n    pos: usize,\n    furigana: Option<String>,\n}\n\nimpl Part {\n    /// Creates a new sentence part. Automatically parses additional morphemes to inflections\n    pub fn new(morphemes: Vec<Morpheme<'static, '_>>, pos: usize) -> Option<Self> {\n        if morphemes.len() == 0 {\n            return None;\n        }\n\n        // parse inflections\n        let inflections = inflection::parse_inflections(&morphemes[1..]);\n\n        // get them owned\n        let morphemes = morphemes.into_iter().map(|i| i.into()).collect::<Vec<_>>();\n\n        Some(Self {\n            furigana: None,\n            inflections,\n            pos,\n            morphemes,\n        })\n    }\n\n    /// Returns `true` if the part has at least one inflection\n    pub fn has_inflections(&self) -> bool {\n        !self.inflections().is_empty()\n    }\n\n    /// Get a reference to the parts morphemes.\n    pub fn morphemes(&self) -> &[OwnedMorpheme] {\n        &self.morphemes\n    }\n\n    /// Get a reference to the word's inflections.\n    pub fn inflections(&self) -> &[Inflection] {\n        &self.inflections\n    }\n\n    /// Returns the full surface of the part. If it has inflections, this surface represents the\n    /// word written with all inflections. If there are no inflections, this method returns the\n    /// same as `get_normalized()`\n    pub fn get_inflected(&self) -> String {\n        self.morphemes\n            .iter()\n            .map(|i| i.surface.as_str())\n            .collect::<String>()\n    }\n\n    /// Returns the normalized form of the word. All inflections are removed and the dictionary\n    /// form of the word is returned\n    pub fn get_normalized(&self) -> String {\n        self.get_main_morpheme().lexeme.to_string()\n    }\n\n    /// Get the part's pos.\n    pub fn pos(&self) -> usize {\n        self.pos\n    }\n\n    /// Sets the furigana\n    pub fn set_furigana<F>(&mut self, add_fn: F)\n    where\n        F: Fn(&str) -> Option<String>,\n    {\n        let mut out = String::new();\n        let mut has_furigana = false;\n\n        for morpheme in &self.morphemes {\n            if !morpheme.surface.has_kanji() {\n                out.push_str(&morpheme.surface);\n                continue;\n            }\n\n            if let Some(furi) = add_fn(morpheme.reading()) {\n                let surface = &morpheme.surface;\n\n                // check if `furi` really contains furigana. If this is not the case but\n                // `has_furigana` is true, the text will be rendered weird\n                if !furi.contains('|') || !can_merge_furi(surface, &furi) {\n                    out.push_str(&furi);\n                } else if let Some(furi) = merge_furigana(surface, &furi) {\n                    has_furigana = true;\n                    out.push_str(&furi);\n                }\n\n                continue;\n            }\n\n            out.push_str(&morpheme.surface);\n        }\n\n        if has_furigana {\n            self.furigana = Some(out);\n        }\n    }\n\n    /// Returns furigana of the word\n    pub fn furigana(&self) -> Option<&str> {\n        self.furigana.as_deref()\n    }\n\n    /// returns msgid for the current word_class or None if no word_class is set\n    pub fn word_class(&self) -> Option<&'static str> {\n        let main_morph = self.get_main_morpheme();\n        let main_morph_wc = main_morph.word_class;\n\n        if main_morph_wc.is_symbol() && !self.main_lexeme().is_symbol() {\n            return Some(\"Undetected\");\n        }\n\n        Some(match main_morph_wc {\n            WordClass::Particle(_) => \"Particle\",\n            WordClass::Verb(_) => \"Verb\",\n            WordClass::Adjective(_) => \"Adjective\",\n            WordClass::Adverb => \"Adverb\",\n            WordClass::Noun(_) => \"Noun\",\n            WordClass::Pronoun => \"Pronoun\",\n            WordClass::Interjection => \"Interjection\",\n            WordClass::Symbol => \"Symbol\",\n            WordClass::Conjungtion => \"Conjungtion\",\n            WordClass::Suffix => \"Suffix\",\n            WordClass::Prefix => \"Prefix\",\n            WordClass::PreNoun => \"Pre-noun\",\n            WordClass::Space => \"Space\",\n        })\n    }\n\n    pub fn word_class_raw(&self) -> &WordClass<'_> {\n        &self.get_main_morpheme().word_class\n    }\n\n    /// Gets wordclass in lowercase\n    pub fn word_class_lower(&self) -> Option<String> {\n        self.word_class().map(|i| i.to_lowercase())\n    }\n\n    /// Returns the morpheme containing the actual 'word' without any inflections\n    fn get_main_morpheme(&self) -> &OwnedMorpheme {\n        &self.morphemes[0]\n    }\n\n    /// Gets the main lexeme. Falls back on surface if lexeme is empty\n    fn main_lexeme(&self) -> &str {\n        self.get_main_morpheme().reading()\n    }\n}\n\nimpl<'b> FromMorphemes<'static, 'b> for Part {\n    #[inline]\n    fn from(parts: Vec<Morpheme<'static, 'b>>, pos: usize) -> Option<Self> {\n        Self::new(parts, pos)\n    }\n}\n\n/// Merges a reading with its given furigana. This is required for cases where `furi` does not\n/// represent he same kana reading as `src`.\n///\n/// Example:\n/// src: \"行った\" furi: \"[行|い]く\" => [行|い]った\nfn merge_furigana(src: &str, furi: &str) -> Option<String> {\n    let mut out_buf = String::new();\n\n    // All Kanji parts\n    // let mut kanji_furis = furigana::parse::from_str(furi)\n    let furi = Furigana(furi);\n    let mut kanji_furis = furi\n        .segments()\n        // .filter_map(|i| i.as_ref().map(|i| i.is_kanji()).unwrap_or(false).then(|| i))\n        .filter(|i| i.is_kanji())\n        .collect::<Vec<_>>()\n        .into_iter();\n\n    for src_part in jp_utils::tokenize::by_alphabet(src, true) {\n        if !src_part.is_kanji() {\n            out_buf.push_str(src_part);\n            continue;\n        }\n\n        let kanji_furi = kanji_furis.next()?;\n        if src_part != *kanji_furi.as_kanji().unwrap().literals() {\n            return None;\n        }\n        out_buf.push_str(&kanji_furi.encode());\n    }\n\n    Some(out_buf)\n}\n\n/// Returns `true` if the given src word can be merged with the given furigana\nfn can_merge_furi(src: &str, furi: &str) -> bool {\n    if !src.has_kanji() {\n        return false;\n    }\n\n    let furigana = Furigana(furi);\n    let kanji_furis = furigana\n        .segments()\n        .filter(|i| i.is_kanji())\n        .collect::<Vec<_>>();\n    let mut kanji_furis = kanji_furis.into_iter();\n\n    for src_part in jp_utils::tokenize::by_alphabet(src, true) {\n        if !src_part.is_kanji() {\n            continue;\n        }\n\n        let kanji_furi = match kanji_furis.next() {\n            Some(v) => v,\n            None => return false,\n        };\n        if src_part != *kanji_furi.as_kanji().unwrap().literals() {\n            return false;\n        }\n    }\n\n    true\n}\n\nimpl Into<SentencePart> for Part {\n    #[inline]\n    fn into(self) -> SentencePart {\n        let furigana = self.furigana().map(|i| i.to_string());\n        let position = self.pos();\n        let inflected = self.get_inflected();\n        let word_class = self.word_class();\n        SentencePart::new(furigana, position, inflected, word_class)\n    }\n}\n\n/// Converts WordClass to simple part of speech\npub fn wc_to_simple_pos(wc: &WordClass) -> Option<PosSimple> {\n    Some(match wc {\n        WordClass::Particle(_) => PosSimple::Particle,\n        WordClass::Verb(_) => PosSimple::Verb,\n        WordClass::Adjective(_) => PosSimple::Adjective,\n        WordClass::Adverb => PosSimple::Adverb,\n        WordClass::Noun(_) => PosSimple::Noun,\n        WordClass::Pronoun => PosSimple::Pronoun,\n        WordClass::Interjection => PosSimple::Interjection,\n        WordClass::Conjungtion => PosSimple::Conjunction,\n        WordClass::Suffix => PosSimple::Suffix,\n        WordClass::Prefix => PosSimple::Prefix,\n        _ => return None,\n    })\n}\n"
  },
  {
    "path": "lib/types/Cargo.toml",
    "content": "[package]\nname = \"types\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\njapanese = { path = \"../japanese\", optional = true}\nlocalization = { path = \"../localization\", optional = true }\n### Note: This sub-crate is not allowed to have dependencies to other Jotoba crates, unless its only used if `jotoba_intern` is enabled.\njp_inflections = { git = \"https://github.com/JojiiOfficial/Japanese_Inflections\", optional=true }\n#jp_inflections = { path =\"../../../jp_inflections\", optional=true}\njp_utils = { git = \"https://github.com/JojiiOfficial/jp_utils\", features = [\"furigana\"] }\nstrum = { version = \"0.25.0\", features = [\"derive\"] }\nstrum_macros = \"0.25.1\"\nserde = { version = \"1.0.171\", features = [\"derive\"] }\nbitflags = { git = \"https://github.com/JojiiOfficial/BitFlags\" }\nitertools = \"0.11.0\"\n\n[features]\ndefault = [\"api\"]\n\n# This feature adds stuff required for Jotoba to work but not necessarily for extern crates, so its made optional\njotoba_intern = [\"localization\", \"api\", \"jp_inflections\", \"japanese\"]\n\n# Contains API types, and can be used as rust wrapper around the Jotoba API\napi = []\n\nraw_types = []\n\n[dev-dependencies]\ntest-case = \"3.1.0\"\n"
  },
  {
    "path": "lib/types/src/api/app/completions/mod.rs",
    "content": "use crate::jotoba::search::SearchTarget;\nuse serde::{Deserialize, Serialize};\n\n/// Request payload structure for suggestion endpoint\n#[derive(Deserialize, Debug)]\npub struct Request {\n    /// The search query to find suggestions for\n    pub input: String,\n\n    /// The user configured language\n    #[serde(default)]\n    pub lang: String,\n\n    /// The search type the input is designed for\n    #[serde(default)]\n    #[serde(rename = \"search_type\")]\n    pub search_target: SearchTarget,\n\n    #[serde(default)]\n    pub radicals: Vec<char>,\n\n    #[serde(default)]\n    pub hashtag: bool,\n}\n\n/// Response struct for suggestion endpoint\n#[derive(Serialize, Deserialize, Default)]\npub struct Response {\n    pub suggestions: Vec<WordPair>,\n    pub suggestion_type: SuggestionType,\n}\n\nimpl Response {\n    #[inline]\n    pub fn new(suggestions: Vec<WordPair>) -> Self {\n        Self {\n            suggestions,\n            suggestion_type: SuggestionType::Default,\n        }\n    }\n\n    #[inline]\n    pub fn with_type(suggestions: Vec<WordPair>, suggestion_type: SuggestionType) -> Self {\n        Self {\n            suggestions,\n            suggestion_type,\n        }\n    }\n}\n\n/// The type of suggestion. `Default` in most cases\n#[derive(Deserialize, Serialize, Default)]\n#[serde(rename_all = \"snake_case\")]\npub enum SuggestionType {\n    /// Default suggestion type\n    #[default]\n    Default,\n    /// Special suggestion type for kanji readings\n    KanjiReading,\n    /// Hash tag suggestions\n    Hashtag,\n}\n\n/// A word with kana and kanji reading used within [`SuggestionResponse`]\n#[derive(Serialize, Deserialize, Default, PartialEq, Eq, Debug, Hash, Clone)]\npub struct WordPair {\n    pub primary: String,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub secondary: Option<String>,\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl WordPair {\n    #[inline]\n    pub fn new(primary: String) -> Self {\n        Self {\n            primary,\n            secondary: None,\n        }\n    }\n\n    #[inline]\n    pub fn with_secondary(primary: String, secondary: String) -> Self {\n        Self {\n            primary,\n            secondary: Some(secondary),\n        }\n    }\n\n    /// Returns true if [`self`] contains [`reading`]\n    #[inline]\n    pub fn has_reading(&self, reading: &str) -> bool {\n        self.primary == reading\n            || self\n                .secondary\n                .as_ref()\n                .map(|i| i == reading)\n                .unwrap_or_default()\n    }\n\n    #[inline]\n    pub fn secondary_preferred(&self) -> &String {\n        self.secondary.as_ref().unwrap_or(&self.primary)\n    }\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl From<&crate::jotoba::words::Word> for WordPair {\n    #[inline]\n    fn from(word: &crate::jotoba::words::Word) -> Self {\n        let main_reading = word.get_reading().reading.to_owned();\n        if word.reading.kanji.is_some() {\n            WordPair {\n                secondary: Some(main_reading),\n                primary: word.reading.kana.reading.clone(),\n            }\n        } else {\n            WordPair {\n                primary: main_reading,\n                secondary: None,\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/details/mod.rs",
    "content": "pub mod query;\npub mod sentence;\npub mod word;\n"
  },
  {
    "path": "lib/types/src/api/app/details/query.rs",
    "content": "use crate::{\n    api::app::deserialize_lang,\n    jotoba::language::{LangParam, Language},\n};\nuse serde::Deserialize;\n\n#[derive(Deserialize)]\npub struct DetailsPayload {\n    pub sequence: u32,\n    #[serde(deserialize_with = \"deserialize_lang\")]\n    pub language: Language,\n    pub show_english: bool,\n}\n\nimpl DetailsPayload {\n    #[inline]\n    pub fn lang_param(&self) -> LangParam {\n        LangParam::with_en_raw(self.language, self.show_english)\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/details/sentence.rs",
    "content": "use serde::{Deserialize, Serialize};\n\nuse crate::api::app::search::responses::{kanji::Kanji, sentences::Sentence, words::Word};\n\n#[derive(Serialize, Deserialize)]\npub struct Details {\n    sentence: Sentence,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    words: Vec<Word>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    kanji: Vec<Kanji>,\n}\n\nimpl Details {\n    pub fn new(sentence: Sentence, words: Vec<Word>, kanji: Vec<Kanji>) -> Self {\n        Self {\n            sentence,\n            words,\n            kanji,\n        }\n    }\n}\n\n"
  },
  {
    "path": "lib/types/src/api/app/details/word.rs",
    "content": "use serde::Serialize;\n\nuse crate::{\n    api::{app::search::responses::kanji::Kanji, app::search::responses::words::Word},\n    jotoba::words::inflection::Inflections,\n};\n\n#[derive(Serialize)]\npub struct Details {\n    word: Word,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    kanji: Vec<Kanji>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    conjugations: Option<Inflections>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    collocations: Vec<Word>,\n    has_sentence: bool,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    transitivity_pair: Option<TransitivityPair>,\n}\n\n#[derive(Serialize)]\n#[serde(tag = \"t\", content = \"w\")]\npub enum TransitivityPair {\n    Transitive(u32),\n    Intransitive(u32),\n}\n\nimpl Details {\n    #[inline]\n    pub fn new(\n        word: Word,\n        kanji: Vec<Kanji>,\n        conjugations: Option<Inflections>,\n        collocations: Vec<Word>,\n        has_sentence: bool,\n        transitivity_pair: Option<TransitivityPair>,\n    ) -> Self {\n        Self {\n            word,\n            kanji,\n            conjugations,\n            collocations,\n            has_sentence,\n            transitivity_pair,\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/image/mod.rs",
    "content": "use serde::{Deserialize, Serialize};\n\n/// Scan endpoint response\n#[derive(Serialize, Deserialize)]\npub struct Response {\n    pub text: String,\n}\n\n/// Scan endpoint request\n#[derive(Deserialize)]\npub struct Request {\n    /// The min amount of confidence the image scan resulted in. Everything below will be treated\n    /// as fail\n    #[serde(default = \"default_conf_threshold\")]\n    pub threshold: i32,\n}\n\n/// Default mit threshold value for detection confidence\n#[inline]\nfn default_conf_threshold() -> i32 {\n    55\n}\n"
  },
  {
    "path": "lib/types/src/api/app/kanji/ids_tree.rs",
    "content": "use serde::{Deserialize, Serialize};\n\n#[derive(Deserialize, Serialize)]\npub struct Request {\n    pub literal: char,\n    pub full: bool,\n}\n\n#[derive(Deserialize, Serialize)]\npub struct Response {\n    tree: OutObject,\n    has_big: bool,\n}\n\nimpl Response {\n    pub fn new(tree: OutObject, has_big: bool) -> Self {\n        Self { tree, has_big }\n    }\n}\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]\npub struct OutObject {\n    name: char,\n    literal_available: bool,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    children: Vec<OutObject>,\n}\n\nimpl OutObject {\n    #[inline]\n    pub fn new(name: char) -> Self {\n        Self {\n            name,\n            children: vec![],\n            literal_available: false,\n        }\n    }\n\n    #[inline]\n    pub fn with_children(name: char, children: Vec<OutObject>) -> Self {\n        Self {\n            name,\n            children,\n            literal_available: false,\n        }\n    }\n\n    #[inline]\n    pub fn add_child(&mut self, child: Self) {\n        self.children.push(child)\n    }\n\n    #[inline]\n    pub fn set_literal_available(&mut self, literal_available: bool) {\n        self.literal_available = literal_available;\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/kanji/mod.rs",
    "content": "pub mod ids_tree;\n"
  },
  {
    "path": "lib/types/src/api/app/mod.rs",
    "content": "pub mod completions;\npub mod details;\npub mod image;\npub mod kanji;\npub mod news;\npub mod radical;\npub mod search;\n\nuse crate::jotoba::language::Language;\nuse serde::{Deserialize, Deserializer};\nuse std::str::FromStr;\n\n/// Deserializes a field into a Option<Language>. None if invalid lang-str, empty or Deserializing str\n/// failed\n#[inline]\npub fn deserialize_lang_option<'de, D>(s: D) -> Result<Option<Language>, D::Error>\nwhere\n    D: Deserializer<'de>,\n{\n    let s = String::deserialize(s)?;\n    if s.trim().is_empty() {\n        return Ok(None);\n    }\n    return Ok(Language::from_str(&s).ok());\n}\n\n/// Deserializes a field into a Option<Language>. None if invalid lang-str, empty or Deserializing str\n/// failed\n#[inline]\npub fn deserialize_lang<'de, D>(s: D) -> Result<Language, D::Error>\nwhere\n    D: Deserializer<'de>,\n{\n    let lang = Language::from_str(&String::deserialize(s)?).unwrap_or_default();\n    return Ok(lang);\n}\n"
  },
  {
    "path": "lib/types/src/api/app/news/long.rs",
    "content": "use serde::{Deserialize, Serialize};\n\nuse super::NewsEntry;\n\n#[derive(Deserialize)]\npub struct Request {\n    pub id: u32,\n}\n\n#[derive(Serialize, Deserialize)]\npub struct Response {\n    pub entry: NewsEntry,\n}\n"
  },
  {
    "path": "lib/types/src/api/app/news/mod.rs",
    "content": "pub mod long;\npub mod short;\n\nuse serde::{Deserialize, Serialize};\n\n#[derive(Serialize, Deserialize, Clone)]\npub struct NewsEntry {\n    pub id: u32,\n    pub title: String,\n    pub html: String,\n    pub creation_time: u64,\n    pub trimmed: bool,\n}\n"
  },
  {
    "path": "lib/types/src/api/app/news/short.rs",
    "content": "use serde::{Deserialize, Serialize};\n\nuse super::NewsEntry;\n\n#[derive(Deserialize)]\npub struct Request {\n    pub after: u64,\n}\n\n#[derive(Serialize, Deserialize)]\npub struct Response {\n    pub entries: Vec<NewsEntry>,\n}\n"
  },
  {
    "path": "lib/types/src/api/app/radical/find_kanji.rs",
    "content": "use serde::{Deserialize, Serialize};\nuse std::collections::HashMap;\n\n/// Request struct for kanji_by_radicals endpoint\n#[derive(Deserialize)]\npub struct Request {\n    pub radicals: Vec<char>,\n}\n\n/// Response struct for kanji_by_radicals endpoint\n#[derive(Serialize, Deserialize)]\npub struct Response {\n    pub kanji: HashMap<u32, Vec<char>>,\n    pub possible_radicals: HashMap<u32, Vec<char>>,\n}\n"
  },
  {
    "path": "lib/types/src/api/app/radical/mod.rs",
    "content": "pub mod find_kanji;\npub mod search;\n"
  },
  {
    "path": "lib/types/src/api/app/radical/search.rs",
    "content": "use serde::{Deserialize, Serialize};\nuse std::collections::{BTreeSet, HashMap};\n\n/// Request struct for kanji_by_radicals endpoint\n#[derive(Deserialize)]\npub struct Request {\n    pub query: String,\n}\n\n/// Response struct for kanji_by_radicals endpoint\n#[derive(Serialize, Deserialize, Default)]\npub struct Response {\n    pub radicals: HashMap<u8, BTreeSet<char>>,\n    pub kanji: Vec<KanjiRads>,\n}\n\n/// Kanji literal with radicals\n#[derive(Serialize, Deserialize, Default, PartialEq, Eq)]\npub struct KanjiRads {\n    pub kanji: char,\n    pub rads: HashMap<u32, Vec<char>>,\n}\n\nimpl KanjiRads {\n    #[inline]\n    pub fn new(kanji: char, rads: HashMap<u32, Vec<char>>) -> Self {\n        Self { kanji, rads }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/search/mod.rs",
    "content": "pub mod query;\npub mod responses;\n"
  },
  {
    "path": "lib/types/src/api/app/search/query.rs",
    "content": "use crate::{\n    api::app::{deserialize_lang, deserialize_lang_option},\n    jotoba::language::{LangParam, Language},\n};\nuse serde::Deserialize;\n\n#[derive(Debug, Clone, Deserialize)]\npub struct SearchPayload {\n    pub settings: UserSettings,\n\n    /// Searched query text\n    pub query_str: String,\n\n    /// Result page\n    #[serde(default)]\n    pub page: Option<u32>,\n\n    /// Index in sentence reader\n    #[serde(default)]\n    pub word_index: Option<usize>,\n\n    /// Overwrite\n    #[serde(default, deserialize_with = \"deserialize_lang_option\")]\n    pub lang_overwrite: Option<Language>,\n}\n\nimpl SearchPayload {\n    /// Returns language parameters for the query\n    #[inline]\n    pub fn lang_param(&self) -> LangParam {\n        self.settings.lang_param()\n    }\n}\n\n/// APP settings\n#[derive(Debug, Clone, Copy, Deserialize)]\npub struct UserSettings {\n    #[serde(deserialize_with = \"deserialize_lang\")]\n    pub user_lang: Language,\n    pub show_english: bool,\n    pub page_size: u32,\n    pub show_example_sentences: bool,\n    pub sentence_furigana: bool,\n}\n\nimpl UserSettings {\n    /// Returns language parameters for user settinsg\n    #[inline]\n    pub fn lang_param(&self) -> LangParam {\n        LangParam::with_en_raw(self.user_lang, self.show_english)\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/search/responses/k_compounds.rs",
    "content": "use serde::{Deserialize, Serialize};\n\n/// Response for kanji compound request\n#[derive(Deserialize, Serialize)]\npub struct CompoundResponse {\n    pub compounds: Vec<CompoundSet>,\n}\n\nimpl CompoundResponse {\n    #[inline]\n    pub fn new(compounds: Vec<CompoundSet>) -> Self {\n        Self { compounds }\n    }\n}\n\n/// Set of compounds for a single kanji\n#[derive(Deserialize, Serialize)]\npub struct CompoundSet {\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub on: Vec<CompoundWord>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub kun: Vec<CompoundWord>,\n}\n\nimpl CompoundSet {\n    #[inline]\n    pub fn new(on: Vec<CompoundWord>, kun: Vec<CompoundWord>) -> Self {\n        Self { on, kun }\n    }\n}\n\n/// A word used in kanji compounds\n#[derive(Clone, Debug, Serialize, Deserialize)]\npub struct CompoundWord {\n    pub jp: String,\n    pub kana: String,\n    pub translations: Vec<String>,\n}\n\nimpl CompoundWord {\n    /// Create a new CompoundWord\n    pub fn new(jp: String, kana: String, translations: Vec<String>) -> Self {\n        Self {\n            jp,\n            kana,\n            translations,\n        }\n    }\n\n    /// Convertes a Word to a CompoundWord. Takes ALL senses and ALL glosses. If you only want\n    /// some of the glosses, filter them first\n    pub fn from_word(word: &crate::jotoba::words::Word) -> Self {\n        let jp = word.get_reading().reading.clone();\n        let kana = word.reading.kana.reading.clone();\n        let translations = word\n            .senses\n            .iter()\n            .map(|i| i.glosses.clone())\n            .flatten()\n            .map(|i| i.gloss)\n            .collect::<Vec<String>>();\n        Self::new(jp, kana, translations)\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/search/responses/kanji.rs",
    "content": "use serde::{Deserialize, Serialize};\n\nuse crate::jotoba::kanji::radical::DetailedRadical;\n\n/// Kanji API response. Contains all kanji\n#[derive(Clone, Debug, Serialize)]\npub struct KanjiResponse {\n    kanji: Vec<Kanji>,\n}\n\nimpl KanjiResponse {\n    #[inline]\n    pub fn new(kanji: Vec<Kanji>) -> Self {\n        Self { kanji }\n    }\n}\n\n/// Kanji information\n#[derive(Clone, Debug, Serialize, Deserialize)]\npub struct Kanji {\n    pub literal: char,\n    pub stroke_count: u8,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub grade: Option<u8>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub frequency: Option<u16>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub jlpt: Option<u8>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub onyomi: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub kunyomi: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub variant: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub chinese: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub korean_romaji: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub korean_hangul: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub nanori: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub similar_kanji: Vec<char>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub meanings: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub parts: Vec<char>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub vietnamese: Vec<String>,\n    pub has_compounds: bool,\n    pub radical: DetailedRadical,\n}\n\nimpl From<crate::jotoba::kanji::Kanji> for Kanji {\n    #[inline]\n    fn from(k: crate::jotoba::kanji::Kanji) -> Self {\n        let has_compounds = !k.on_dicts.is_empty() || !k.kun_dicts.is_empty();\n        Self {\n            literal: k.literal,\n            stroke_count: k.stroke_count,\n            grade: k.grade,\n            frequency: k.frequency,\n            jlpt: k.jlpt,\n            onyomi: k.onyomi,\n            kunyomi: k.kunyomi,\n            variant: k.variant,\n            chinese: k.chinese,\n            korean_romaji: k.korean_r,\n            korean_hangul: k.korean_h,\n            nanori: k.nanori,\n            similar_kanji: k.similar_kanji,\n            meanings: k.meanings,\n            parts: k.parts,\n            radical: k.radical,\n            vietnamese: k.vietnamese,\n            has_compounds,\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/search/responses/mod.rs",
    "content": "pub mod k_compounds;\npub mod kanji;\npub mod names;\npub mod sentences;\npub mod words;\n\nuse serde::Serialize;\n\nuse crate::jotoba::{pagination::page::Page, search::help::SearchHelp};\n\n#[derive(Serialize)]\npub struct Response<T: Serialize> {\n    #[serde(flatten)]\n    inner: Page<T>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    search_help: Option<SearchHelp>,\n}\n\nimpl<T: Serialize> Response<T> {\n    pub fn new(inner: Page<T>) -> Self {\n        Self {\n            inner,\n            search_help: None,\n        }\n    }\n\n    pub fn with_help(inner: Page<T>, search_help: SearchHelp) -> Self {\n        Self {\n            inner,\n            search_help: Some(search_help),\n        }\n    }\n\n    pub fn with_help_fn<S>(inner: Page<T>, help_fn: S) -> Self\n    where\n        S: Fn(&Page<T>) -> Option<SearchHelp>,\n    {\n        Self {\n            search_help: help_fn(&inner),\n            inner,\n        }\n    }\n\n    pub fn set_search_help(&mut self, search_help: SearchHelp) -> &mut Self {\n        self.search_help = Some(search_help);\n        self\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/search/responses/names.rs",
    "content": "use serde::Serialize;\n\nuse crate::jotoba::names::Name;\n\n/// Names API response. Contains all Names\n#[derive(Clone, Debug, Serialize)]\npub struct Response {\n    names: Vec<Name>,\n}\n\nimpl Response {\n    #[inline]\n    pub fn new(names: Vec<Name>) -> Self {\n        Self { names }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/search/responses/sentences.rs",
    "content": "use serde::{Deserialize, Serialize};\n\n/// Names API response. Contains all Names\n#[derive(Serialize, Deserialize, Clone)]\npub struct Response {\n    sentences: Vec<Sentence>,\n}\n\nimpl Response {\n    #[inline]\n    pub fn new(sentences: Vec<Sentence>) -> Self {\n        Self { sentences }\n    }\n}\n\n#[derive(Serialize, Deserialize, Clone)]\npub struct Sentence {\n    sequence: u32,\n    content: String,\n    translation: String,\n}\n\nimpl Sentence {\n    /// Create a new sentence\n    #[inline]\n    pub fn new(sequence: u32, content: String, translation: String) -> Self {\n        Self {\n            sequence,\n            content,\n            translation,\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/search/responses/words/inflection.rs",
    "content": "use serde::Serialize;\n\nuse crate::jotoba::words::inflection::Inflection;\n\n#[derive(Clone, Serialize)]\npub struct InflectionInfo {\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    inflections: Vec<Inflection>,\n    /// The \"uninflected\" version\n    lexeme: String,\n}\n\nimpl InflectionInfo {\n    /// Create a new InflectionInfo\n    #[inline]\n    pub fn new(inflection: Vec<Inflection>, lexeme: String) -> Self {\n        Self {\n            inflections: inflection,\n            lexeme,\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/search/responses/words/mod.rs",
    "content": "mod inflection;\nmod sentence;\nmod word;\n\npub use inflection::*;\npub use sentence::*;\npub use word::*;\n\nuse super::kanji::Kanji;\nuse serde::Serialize;\n\n/// A word search response\n#[derive(Clone, Serialize)]\npub struct Response {\n    /// All word results for the current search\n    words: Vec<Word>,\n\n    /// Several kanji for the given words\n    kanji: Vec<Kanji>,\n\n    /// Parsed number from query\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    number: Option<String>,\n\n    /// Inflection information of the current word\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    infl_info: Option<InflectionInfo>,\n\n    /// Sentence reader data\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    sentence: Option<Sentence>,\n\n    /// Query that has actually been used for search\n    original_query: String,\n}\n\nimpl Response {\n    /// Create a new Response\n    pub fn new(\n        words: Vec<Word>,\n        kanji: Vec<Kanji>,\n        infl_info: Option<InflectionInfo>,\n        sentence: Option<Sentence>,\n        original_query: String,\n        number: Option<String>,\n    ) -> Self {\n        Self {\n            words,\n            kanji,\n            infl_info,\n            sentence,\n            original_query,\n            number,\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/search/responses/words/sentence.rs",
    "content": "use serde::Serialize;\n\n#[derive(Clone, Serialize)]\npub struct Sentence {\n    /// Currently selected part\n    curr_index: usize,\n    /// All Parts of the sentence\n    parts: Vec<SentencePart>,\n}\n\nimpl Sentence {\n    #[inline]\n    pub fn new(curr_index: usize, parts: Vec<SentencePart>) -> Self {\n        Self { curr_index, parts }\n    }\n}\n\n#[derive(Clone, Serialize)]\npub struct SentencePart {\n    /// Original inflected word\n    inflected: String,\n    /// Furigana of the inflected word. None if can't be\n    /// calculated or word is completetly in kana\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    furigana: Option<String>,\n    /// Position of the sentence_part in the sentence\n    position: usize,\n    /// Part of Speech\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    word_class: Option<&'static str>,\n}\n\nimpl SentencePart {\n    #[inline]\n    pub fn new(\n        furigana: Option<String>,\n        position: usize,\n        inflected: String,\n        word_class: Option<&'static str>,\n    ) -> Self {\n        Self {\n            furigana,\n            position,\n            inflected,\n            word_class,\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/app/search/responses/words/word.rs",
    "content": "use serde::{Deserialize, Serialize};\n\nuse crate::jotoba::{\n    language::Language,\n    words::{\n        dialect::Dialect, field::Field, misc::Misc, part_of_speech::PartOfSpeech, pitch::Pitch,\n        sense::Gairaigo,\n    },\n};\n\n/// A single word item\n#[derive(Clone, Serialize, Deserialize)]\npub struct Word {\n    pub sequence: u32,\n    pub is_common: bool,\n    pub reading: String,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub alt_readings: Vec<String>,\n    pub senses: Vec<Sense>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub audio: Option<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub accents: Vec<Pitch>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub furigana: Option<String>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub jlpt_lvl: Option<u8>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub transive_version: Option<u32>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub intransive_version: Option<u32>,\n    pub sentences_available: u16,\n}\n\n#[derive(Clone, Serialize, Deserialize)]\npub struct Sense {\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub misc: Option<Misc>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub field: Option<Field>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub dialect: Option<Dialect>,\n    pub glosses: Vec<String>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub xref: Option<String>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub antonym: Option<String>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub information: Option<String>,\n    pub part_of_speech: Vec<PartOfSpeech>,\n    pub language: Language,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub example_sentence: Option<(String, String)>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub gairaigo: Option<Gairaigo>,\n}\n"
  },
  {
    "path": "lib/types/src/api/internal/info/mod.rs",
    "content": "pub mod words;\n"
  },
  {
    "path": "lib/types/src/api/internal/info/words.rs",
    "content": "use crate::{\n    api::app::deserialize_lang,\n    jotoba::{\n        language::{LangParam, Language},\n        sentences::Sentence,\n        words::{part_of_speech::PosSimple, Word},\n    },\n};\nuse serde::{Deserialize, Serialize};\n\n#[derive(Serialize, Deserialize, Debug)]\npub struct Request {\n    pub ids: Vec<u32>,\n    #[serde(deserialize_with = \"deserialize_lang\")]\n    pub language: Language,\n    pub show_english: bool,\n}\n\nimpl Request {\n    #[inline]\n    pub fn new(ids: Vec<u32>, language: Language, show_english: bool) -> Self {\n        Self {\n            ids,\n            language,\n            show_english,\n        }\n    }\n\n    #[inline]\n    pub fn lang_param(&self) -> LangParam {\n        LangParam::with_en_raw(self.language, self.show_english)\n    }\n}\n\n#[derive(Serialize, Deserialize)]\npub struct Response {\n    pub items: Vec<WordItem>,\n}\n\n#[derive(Serialize, Deserialize)]\npub struct WordItem {\n    pub word: Word,\n    pub sentences: Vec<Sentence>,\n    pub audio: Option<String>,\n    pub pos: Vec<PosSimple>,\n}\n\nimpl WordItem {\n    pub fn new(\n        word: Word,\n        sentences: Vec<Sentence>,\n        audio: Option<String>,\n        pos: Vec<PosSimple>,\n    ) -> Self {\n        Self {\n            word,\n            sentences,\n            audio,\n            pos,\n        }\n    }\n}\n\nimpl Response {\n    #[inline]\n    pub fn new(items: Vec<WordItem>) -> Self {\n        Self { items }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/internal/mod.rs",
    "content": "pub mod info;\n"
  },
  {
    "path": "lib/types/src/api/mod.rs",
    "content": "pub mod app;\npub mod internal;\npub mod search;\n"
  },
  {
    "path": "lib/types/src/api/search/kanji.rs",
    "content": "use std::path::Path;\n\nuse serde::{Deserialize, Serialize};\n\n#[derive(Serialize, Deserialize)]\npub struct Response {\n    pub kanji: Vec<Kanji>,\n}\n\n#[derive(Serialize, Deserialize)]\npub struct Kanji {\n    literal: String,\n    meanings: Vec<String>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    grade: Option<u8>,\n    stroke_count: u8,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    frequency: Option<u16>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    jlpt: Option<u8>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    variant: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    onyomi: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    kunyomi: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    chinese: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    korean_r: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    korean_h: Vec<String>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    parts: Vec<String>,\n    radical: String,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    stroke_frames: Option<String>,\n}\n\nimpl Kanji {\n    pub fn from<P: AsRef<Path>>(kanji: &crate::jotoba::kanji::Kanji, assets_path: P) -> Self {\n        let frames = kanji\n            .has_stroke_frames(assets_path)\n            .then(|| kanji.get_stroke_frames_url());\n\n        Self {\n            literal: kanji.literal.to_string(),\n            meanings: kanji.meanings.clone(),\n            grade: kanji.grade,\n            stroke_count: kanji.stroke_count,\n            frequency: kanji.frequency,\n            jlpt: kanji.jlpt,\n            variant: kanji.variant.clone(),\n            onyomi: kanji.onyomi.clone(),\n            kunyomi: kanji.kunyomi.clone(),\n            chinese: kanji.chinese.clone(),\n            korean_r: kanji.korean_r.clone(),\n            korean_h: kanji.korean_h.clone(),\n            parts: kanji.parts.iter().map(|i| i.to_string()).collect(),\n            radical: kanji.radical.literal.to_string(),\n            stroke_frames: frames,\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/search/mod.rs",
    "content": "pub mod kanji;\npub mod name;\npub mod sentence;\npub mod word;\n\nuse serde::Deserialize;\n\nuse crate::jotoba::language::Language;\n\n/// An Search API payload\n#[derive(Deserialize)]\npub struct SearchRequest {\n    #[serde(rename = \"query\")]\n    pub query_str: String,\n\n    #[serde(default)]\n    pub language: Language,\n\n    #[serde(default)]\n    pub no_english: bool,\n}\n"
  },
  {
    "path": "lib/types/src/api/search/name.rs",
    "content": "use serde::{Deserialize, Serialize};\n\nuse crate::jotoba::names::name_type::NameType;\n\n#[derive(Serialize, Deserialize)]\npub struct Response {\n    names: Vec<Name>,\n}\n\n#[derive(Serialize, Deserialize)]\npub struct Name {\n    pub kana: String,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub kanji: Option<String>,\n    pub transcription: String,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub name_type: Option<Vec<NameType>>,\n}\n\nimpl From<&crate::jotoba::names::Name> for Name {\n    #[inline]\n    fn from(name: &crate::jotoba::names::Name) -> Self {\n        Self {\n            kana: name.kana.clone(),\n            kanji: name.kanji.clone(),\n            transcription: name.transcription.clone(),\n            name_type: name.name_type.clone(),\n        }\n    }\n}\n\nimpl From<Vec<&crate::jotoba::names::Name>> for Response {\n    #[inline]\n    fn from(name: Vec<&crate::jotoba::names::Name>) -> Self {\n        let names: Vec<Name> = name.into_iter().map(Name::from).collect();\n        Self { names }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/search/sentence.rs",
    "content": "use serde::{Deserialize, Serialize};\n\nuse crate::jotoba::language::Language;\n\n#[derive(Serialize, Deserialize)]\npub struct Response {\n    sentences: Vec<Sentence>,\n}\n\n#[derive(Serialize, Deserialize)]\npub struct Sentence {\n    pub content: String,\n    pub furigana: String,\n    pub translation: String,\n    pub language: Language,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub eng: Option<String>,\n}\n\nimpl From<Vec<Sentence>> for Response {\n    #[inline]\n    fn from(sentences: Vec<Sentence>) -> Self {\n        Self { sentences }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/api/search/word.rs",
    "content": "use std::path::Path;\n\nuse crate::{\n    api::search::kanji::Kanji,\n    jotoba::{\n        language::Language,\n        words::{\n            dialect::Dialect, field::Field, misc::Misc, part_of_speech::PartOfSpeech,\n            pitch::PitchPart,\n        },\n    },\n};\n\nuse serde::{Deserialize, Serialize};\n\n/// The API response struct for a word search\n#[derive(Serialize, Deserialize)]\npub struct Response {\n    kanji: Vec<Kanji>,\n    words: Vec<Word>,\n}\n\nimpl Response {\n    pub fn new(words: Vec<Word>, kanji: Vec<Kanji>) -> Self {\n        Self { kanji, words }\n    }\n\n    #[cfg(feature = \"jotoba_intern\")]\n    pub fn from<P: AsRef<Path>>(\n        wres: (\n            Vec<&crate::jotoba::words::Word>,\n            Vec<&crate::jotoba::kanji::Kanji>,\n        ),\n        assets_path: P,\n    ) -> Self {\n        let kanji = convert_kanji(wres.1, assets_path);\n        let words = convert_words(wres.0);\n\n        Self { kanji, words }\n    }\n}\n\n/// Represents a single Word result with 1 (main) Japanese reading and n glosses\n#[derive(Serialize, Deserialize)]\npub struct Word {\n    reading: Reading,\n    common: bool,\n    senses: Vec<Sense>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    alt_readings: Option<Vec<Reading>>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    audio: Option<String>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pitch: Option<Vec<PitchPart>>,\n}\n\n#[derive(Serialize, Deserialize)]\npub struct Reading {\n    kana: String,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    kanji: Option<String>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    furigana: Option<String>,\n}\n\n#[derive(Serialize, Deserialize)]\npub struct Sense {\n    glosses: Vec<String>,\n    pos: Vec<PartOfSpeech>,\n    language: Language,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    dialect: Option<Dialect>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    field: Option<Field>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    information: Option<String>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    antonym: Option<String>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    misc: Option<Misc>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    xref: Option<String>,\n}\n\nimpl From<&crate::jotoba::words::sense::Sense> for Sense {\n    fn from(sense: &crate::jotoba::words::sense::Sense) -> Self {\n        let pos = sense.part_of_speech.clone();\n\n        let glosses = sense\n            .glosses\n            .iter()\n            .map(|i| i.gloss.clone())\n            .collect::<Vec<_>>();\n\n        Self {\n            glosses,\n            pos,\n            language: sense.language,\n            dialect: sense.dialect,\n            field: sense.field,\n            information: sense.information.as_ref().cloned(),\n            antonym: sense.antonym.as_ref().cloned(),\n            misc: sense.misc,\n            xref: sense.xref.as_ref().cloned(),\n        }\n    }\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl From<&crate::jotoba::words::Word> for Word {\n    #[inline]\n    fn from(word: &crate::jotoba::words::Word) -> Self {\n        let kanji = word.reading.kanji.as_ref().map(|i| i.reading.clone());\n        let kana = word.reading.kana.clone().reading;\n        let furigana = word.furigana.clone();\n\n        let senses = word.senses.iter().map(|i| Sense::from(i)).collect();\n\n        let pitch = word.get_first_pitch().map(|i| i.parts.clone());\n\n        Self {\n            common: word.is_common(),\n            reading: Reading {\n                kanji,\n                kana,\n                furigana,\n            },\n            senses,\n            alt_readings: None,\n            audio: word.audio_file_name(),\n            pitch,\n        }\n    }\n}\n\n#[cfg(feature = \"jotoba_intern\")]\n#[inline]\nfn convert_kanji<P: AsRef<std::path::Path>>(\n    wres: Vec<&crate::jotoba::kanji::Kanji>,\n    assets_path: P,\n) -> Vec<Kanji> {\n    wres.into_iter()\n        .map(|i| Kanji::from(i, assets_path.as_ref()))\n        .collect()\n}\n\n#[cfg(feature = \"jotoba_intern\")]\n#[inline]\nfn convert_words(wres: Vec<&crate::jotoba::words::Word>) -> Vec<Word> {\n    wres.into_iter().map(|i| i.into()).collect()\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/indexes/hashtag.rs",
    "content": "use std::str::FromStr;\n\nuse crate::jotoba::search::SearchTarget;\nuse serde::{Deserialize, Serialize};\n\n#[derive(Debug, Serialize, Deserialize)]\npub struct RawHashtag {\n    pub tag: String,\n    pub s_targets: Vec<SearchTarget>,\n    pub freq: f32,\n}\n\nimpl RawHashtag {\n    pub fn new(tag: String, s_targets: Vec<SearchTarget>, freq: f32) -> Self {\n        Self {\n            tag,\n            s_targets,\n            freq,\n        }\n    }\n}\n\nimpl FromStr for RawHashtag {\n    type Err = ();\n\n    fn from_str(s: &str) -> Result<Self, Self::Err> {\n        if s.is_empty() {\n            return Err(());\n        }\n\n        let mut split = s.trim().split(' ');\n        let tag = split.next().ok_or(())?.to_string();\n        let freq = split.next().and_then(|i| i.parse::<f32>().ok()).ok_or(())?;\n        let s_targets = split\n            .map(|o| {\n                o.parse::<u8>()\n                    .ok()\n                    .and_then(|i| SearchTarget::try_from(i).ok())\n                    .unwrap()\n            })\n            .collect::<Vec<_>>();\n\n        Ok(RawHashtag::new(tag, s_targets, freq))\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/indexes/mod.rs",
    "content": "pub mod hashtag;\n"
  },
  {
    "path": "lib/types/src/jotoba/kanji/mod.rs",
    "content": "pub mod radical;\npub mod reading;\n\nuse self::{\n    radical::DetailedRadical,\n    reading::{Reading, ReadingType},\n};\nuse serde::{Deserialize, Serialize};\nuse std::{\n    char,\n    path::{Path, PathBuf},\n};\n\n/// A Kanji representing structure containing all available information about a single kanji\n/// character.\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\npub struct Kanji {\n    pub literal: char,\n    pub grade: Option<u8>,\n    pub stroke_count: u8,\n    pub frequency: Option<u16>,\n    pub jlpt: Option<u8>,\n    pub variant: Vec<String>,\n    pub onyomi: Vec<String>,\n    /// Japanese name readings\n    pub nanori: Vec<String>,\n    pub kunyomi: Vec<String>,\n    pub chinese: Vec<String>,\n    pub korean_r: Vec<String>,\n    pub korean_h: Vec<String>,\n    pub vietnamese: Vec<String>,\n    pub kun_dicts: Vec<u32>,\n    pub on_dicts: Vec<u32>,\n    pub similar_kanji: Vec<char>,\n    pub meanings: Vec<String>,\n    pub radical: DetailedRadical,\n    pub parts: Vec<char>,\n}\n\nimpl Kanji {\n    /// Returns the `ReadingType` of `reading` within readings of a kanji\n    pub fn get_reading_type(&self, reading: &str) -> Option<ReadingType> {\n        let in_on = self.in_on_reading(reading);\n        let in_kun = self.in_kun_reading(reading);\n\n        if in_on && !in_kun {\n            return Some(ReadingType::Onyomi);\n        } else if !in_on && in_kun {\n            return Some(ReadingType::Kunyomi);\n        }\n\n        None\n    }\n\n    /// Returns `true` if the kanji has `reading` within the `kunyomi`\n    #[inline]\n    pub fn in_kun_reading(&self, reading: &str) -> bool {\n        self.kunyomi.iter().any(|i| i.as_str() == reading)\n    }\n\n    /// Returns `true` if the kanji has `reading` within the `onyomi`\n    #[inline]\n    pub fn in_on_reading(&self, reading: &str) -> bool {\n        self.onyomi.iter().any(|i| i.as_str() == reading)\n    }\n\n    /// Tries to find the given reading in the kanjis readings and returns a `Reading` value if\n    /// found\n    pub fn find_reading(&self, reading: &str) -> Option<Reading> {\n        let on = self.onyomi.iter().find(|i| i == &reading);\n        let kun = self.kunyomi.iter().find(|i| i == &reading);\n\n        let r = on.or(kun)?;\n\n        let rt = if on.is_some() {\n            ReadingType::Onyomi\n        } else {\n            ReadingType::Kunyomi\n        };\n\n        Some(Reading::new(rt, self.literal, r.to_string()))\n    }\n\n    /// Returns an iteratort over all readings\n    pub fn reading_iter(&self) -> impl Iterator<Item = (&String, u32)> {\n        self.kunyomi\n            .iter()\n            .chain(self.onyomi.iter())\n            .enumerate()\n            .map(|i| (i.1, i.0 as u32))\n    }\n\n    pub fn reading_from_pos(&self, pos: usize) -> Option<Reading> {\n        if pos < self.kunyomi.len() {\n            let r = self.kunyomi.get(pos).unwrap();\n            Some(Reading::new(\n                ReadingType::Kunyomi,\n                self.literal,\n                r.to_string(),\n            ))\n        } else {\n            let k_len = self.kunyomi.len();\n            let r = self.onyomi.get(pos - k_len)?;\n            Some(Reading::new(\n                ReadingType::Onyomi,\n                self.literal,\n                r.to_string(),\n            ))\n        }\n    }\n\n    #[deprecated(note = \"use find_reading instead\")]\n    #[inline]\n    pub fn get_literal_reading(&self, reading: &str) -> Option<String> {\n        Some(match self.get_reading_type(reading)? {\n            ReadingType::Kunyomi => literal_kun_reading(reading),\n            ReadingType::Onyomi => format_reading(reading),\n        })\n    }\n\n    /// Returns true if kanji has a given reading\n    #[inline]\n    pub fn has_reading(&self, reading: &str) -> bool {\n        self.in_on_reading(reading) || self.in_kun_reading(reading)\n    }\n\n    /// Returns `true` if the kanji has stroke frames\n    #[inline]\n    pub fn has_stroke_frames<P: AsRef<Path>>(&self, assets_path: P) -> bool {\n        self.get_animation_path(assets_path).exists()\n    }\n\n    /// Returns the url to stroke-frames svg\n    #[inline]\n    pub fn get_stroke_frames_url(&self) -> String {\n        format!(\"/assets/svg/kanji/{}_frames.svg\", self.literal)\n    }\n\n    /// Returns the local path of the stroke-frames\n    #[inline]\n    pub fn get_stroke_frames_path<P: AsRef<Path>>(&self, assets_path: P) -> PathBuf {\n        let frame_path = format!(\"svg/kanji/{}_frames.svg\", self.literal);\n        let frame_path = Path::new(&frame_path);\n        assets_path.as_ref().join(frame_path)\n        //format!(\"html/assets/svg/kanji/{}_frames.svg\", self.literal)\n    }\n\n    /// Returns the local path of the kanjis stroke-animation\n    #[inline]\n    pub fn get_animation_path<P: AsRef<Path>>(&self, assets_path: P) -> PathBuf {\n        //format!(\"html/assets/svg/kanji/{}.svg\", self.literal)\n        let frame_path = format!(\"svg/kanji/{}.svg\", self.literal);\n        let frame_path = Path::new(&frame_path);\n        assets_path.as_ref().join(frame_path)\n    }\n\n    /// Returns `true` if the kanji has a stroke animation file\n    #[inline]\n    pub fn has_animation_file<P: AsRef<Path>>(&self, assets_path: P) -> bool {\n        //Path::new(&self.get_animation_path()).exists()\n        self.get_animation_path(assets_path).exists()\n    }\n\n    /// Returns `true` if kanji has on or kun compounds (or both)\n    #[inline]\n    pub fn has_compounds(&self) -> bool {\n        (!self.on_dicts.is_empty()) || (!self.kun_dicts.is_empty())\n    }\n}\n\n/// Formats a kun/on reading to a kana entry\n#[inline]\npub fn format_reading(reading: &str) -> String {\n    reading.replace('-', \"\").replace('.', \"\")\n}\n\n/// Returns the reading of a kanjis literal, given the kun reading\n#[inline]\npub fn literal_kun_reading(kun: &str) -> String {\n    kun.replace('-', \"\").split('.').next().unwrap().to_string()\n}\n\n/// Formats `literal` with `reading`, based on `ReadingType`\n///\n/// Example:\n///\n/// literal: 捗\n/// reading: はかど.る\n/// r_type: ReadingType::Kunyomi\n/// returns: 捗る\npub fn format_reading_with_literal(literal: char, reading: &str, r_type: ReadingType) -> String {\n    match r_type {\n        ReadingType::Kunyomi => {\n            let r = if reading.contains('.') {\n                let right = reading.split('.').nth(1).unwrap_or_default();\n                format!(\"{}{}\", literal, right)\n            } else {\n                literal.to_string()\n            };\n            r.replace(\"-\", \"\")\n        }\n        ReadingType::Onyomi => literal.to_string(),\n    }\n}\n\n#[cfg(test)]\nmod test {\n    use super::*;\n\n    fn reading_on1() -> Reading {\n        Reading::new(ReadingType::Onyomi, '長', \"ちょう\".to_string())\n    }\n\n    fn reading_kun() -> Reading {\n        Reading::new(ReadingType::Kunyomi, '長', \"なが.い\".to_string())\n    }\n\n    fn reading_kun2() -> Reading {\n        Reading::new(ReadingType::Kunyomi, '車', \"くるま\".to_string())\n    }\n\n    fn reading_kun3() -> Reading {\n        Reading::new(ReadingType::Kunyomi, '大', \"-おお.いに\".to_string())\n    }\n\n    #[test]\n    fn test_reading() {\n        let on1 = reading_on1();\n        let kun1 = reading_kun();\n        let kun2 = reading_kun2();\n        let kun3 = reading_kun3();\n        let readings = &[on1, kun1, kun2, kun3];\n\n        let formatted = &[\"長\", \"長い\", \"車\", \"大いに\"];\n        for (i, r) in readings.iter().enumerate() {\n            assert_eq!(r.format_reading_with_literal(), formatted[i]);\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/kanji/radical.rs",
    "content": "use serde::{Deserialize, Serialize};\n\n/// A single radical representing structure\n#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]\npub struct DetailedRadical {\n    pub id: u16,\n    pub literal: char,\n    pub alternative: Option<char>,\n    pub stroke_count: u8,\n    pub readings: Vec<String>,\n    pub translations: Option<Vec<String>>,\n}\n\n#[derive(Clone, Serialize, Deserialize)]\npub struct SearchRadicalInfo {\n    pub literal: char,\n    pub frequency: u16,\n    pub meanings: Vec<String>,\n}\n\n/// Represents a radical which gets used for kanji-searches\n#[derive(Debug, Clone, PartialEq)]\npub struct SearchRadical {\n    pub radical: char,\n    pub stroke_count: i32,\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/kanji/reading.rs",
    "content": "#[cfg(feature = \"jotoba_intern\")]\nuse japanese::ToKanaExt;\n\nuse super::Kanji;\n\n/// ReadingType of a kanji's reading. `Kunyomi` represents japanese readings and `Onyomi`\n/// represents original chinese readings.\n#[derive(Clone, Copy, Debug, PartialEq)]\npub enum ReadingType {\n    Kunyomi,\n    Onyomi,\n}\n\n#[derive(Clone, Debug)]\npub struct Reading {\n    r_type: ReadingType,\n    literal: char,\n    inner: String,\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Reading {\n    /// Returns a string with the reading and literal merged. If the reading is an onyomi reading,\n    /// this is equal to the literal. For kunyomi readings this can be an example: (inner: \"だま.る\") => \"黙る\".\n    /// This also formats the reading to hiragana\n    pub fn format_reading_with_literal(&self) -> String {\n        match self.r_type {\n            ReadingType::Kunyomi => {\n                let r = if self.inner.contains('.') {\n                    let right = self.inner.split('.').nth(1).unwrap_or_default();\n                    format!(\"{}{}\", self.literal, right)\n                } else {\n                    self.literal.to_string()\n                };\n                r.replace(\"-\", \"\")\n            }\n            ReadingType::Onyomi => self.literal.to_hiragana(),\n        }\n    }\n}\n\nimpl Reading {\n    pub(crate) fn new(r_type: ReadingType, literal: char, inner: String) -> Self {\n        Reading {\n            r_type,\n            literal,\n            inner,\n        }\n    }\n\n    /// Get the reading's r type.\n    #[inline]\n    pub fn get_type(&self) -> ReadingType {\n        self.r_type\n    }\n\n    /// Get a mutable reference to the reading's literal.\n    #[inline]\n    pub fn get_literal(&self) -> &char {\n        &self.literal\n    }\n\n    /// Get a reference to the reading's inner.\n    #[inline]\n    pub fn get_raw(&self) -> &str {\n        self.inner.as_ref()\n    }\n\n    /// Returns `true` if `kanji` has this reading\n    #[inline]\n    pub fn matches_kanji(&self, kanji: &Kanji) -> bool {\n        self.literal == kanji.literal && kanji.has_reading(&self.inner)\n    }\n\n    /// Returns the literal as newly allocated `String`\n    #[inline]\n    pub fn get_lit_str(&self) -> String {\n        self.get_literal().to_string()\n    }\n\n    /// Returns `true` if the literal captures the entire literal\n    #[inline]\n    pub fn is_full_reading(&self) -> bool {\n        !self.inner.contains('-') && !self.inner.contains('.')\n    }\n}\n\nimpl PartialEq<ReadingType> for &Reading {\n    #[inline]\n    fn eq(&self, other: &ReadingType) -> bool {\n        self.r_type == *other\n    }\n}\n\n/// A kanji-reading search item\n#[derive(Debug, Clone, PartialEq, Hash)]\npub struct ReadingSearch {\n    /// The provided kanji literal\n    pub literal: char,\n    /// The provided kanji reading\n    pub reading: String,\n}\n\nimpl ReadingSearch {\n    #[inline]\n    pub fn new(literal: &str, reading: &str) -> Self {\n        ReadingSearch {\n            literal: literal.chars().next().unwrap(),\n            reading: reading.to_string(),\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/language/mod.rs",
    "content": "pub mod param;\n\npub use param::LangParam;\n\n#[cfg(feature = \"jotoba_intern\")]\nuse localization::traits::Translatable;\n\nuse serde::{Deserialize, Serialize};\nuse std::{array::IntoIter, convert::TryFrom};\nuse strum_macros::{AsRefStr, Display, EnumString};\n\n#[derive(\n    Debug, Display, PartialEq, Eq, Clone, Copy, AsRefStr, EnumString, Hash, Deserialize, Serialize,\n)]\n#[repr(u8)]\npub enum Language {\n    #[strum(serialize = \"eng\", serialize = \"en-US\")]\n    English,\n    #[strum(serialize = \"ger\", serialize = \"de-DE\", serialize = \"deu\")]\n    German,\n    #[strum(serialize = \"rus\", serialize = \"ru\")]\n    Russian,\n    #[strum(serialize = \"spa\", serialize = \"es-ES\")]\n    Spanish,\n    #[strum(serialize = \"swe\", serialize = \"sv-SE\")]\n    Swedish,\n    #[strum(serialize = \"fre\", serialize = \"fr-FR\", serialize = \"fra\")]\n    French,\n    #[strum(serialize = \"dut\", serialize = \"nl-NL\", serialize = \"nld\")]\n    Dutch,\n    #[strum(serialize = \"hun\", serialize = \"hu\")]\n    Hungarian,\n    #[strum(serialize = \"slv\", serialize = \"sl-SL\", serialize = \"svl\")]\n    Slovenian,\n    #[strum(serialize = \"jpn\", serialize = \"ja\", serialize = \"jp\")]\n    Japanese,\n}\n\nimpl Language {\n    /// Returns an iterator over all Languages\n    #[inline]\n    pub fn iter() -> IntoIter<Language, 10> {\n        [\n            Language::English,\n            Language::German,\n            Language::Russian,\n            Language::Spanish,\n            Language::Swedish,\n            Language::French,\n            Language::Dutch,\n            Language::Hungarian,\n            Language::Slovenian,\n            Language::Japanese,\n        ]\n        .into_iter()\n    }\n\n    /// Returns an iterator over all Languages which have words with this language\n    #[inline]\n    pub fn iter_word() -> IntoIter<Language, 9> {\n        [\n            Language::English,\n            Language::German,\n            Language::Russian,\n            Language::Spanish,\n            Language::Swedish,\n            Language::French,\n            Language::Dutch,\n            Language::Hungarian,\n            Language::Slovenian,\n        ]\n        .into_iter()\n    }\n\n    pub fn to_query_format(&self) -> &'static str {\n        match *self {\n            Language::English => \"eng\",\n            Language::German => \"ger\",\n            Language::Russian => \"rus\",\n            Language::Spanish => \"spa\",\n            Language::Swedish => \"swe\",\n            Language::French => \"fre\",\n            Language::Dutch => \"dut\",\n            Language::Hungarian => \"hun\",\n            Language::Slovenian => \"slv\",\n            Language::Japanese => \"jpn\",\n        }\n    }\n}\n\nimpl Default for Language {\n    #[inline]\n    fn default() -> Self {\n        Self::English\n    }\n}\n\nimpl TryFrom<i32> for Language {\n    type Error = ();\n    #[inline]\n    fn try_from(i: i32) -> Result<Self, Self::Error> {\n        Ok(match i {\n            0 => Self::English,\n            1 => Self::German,\n            2 => Self::Russian,\n            3 => Self::Spanish,\n            4 => Self::Swedish,\n            5 => Self::French,\n            6 => Self::Dutch,\n            7 => Self::Hungarian,\n            8 => Self::Slovenian,\n            9 => Self::Japanese,\n            _ => return Err(()),\n        })\n    }\n}\n\nimpl Into<i32> for Language {\n    #[inline]\n    fn into(self) -> i32 {\n        match self {\n            Self::English => 0,\n            Self::German => 1,\n            Self::Russian => 2,\n            Self::Spanish => 3,\n            Self::Swedish => 4,\n            Self::French => 5,\n            Self::Dutch => 6,\n            Self::Hungarian => 7,\n            Self::Slovenian => 8,\n            Self::Japanese => 9,\n        }\n    }\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for Language {\n    #[inline]\n    fn get_id(&self) -> &'static str {\n        match self {\n            Language::English => \"English\",\n            Language::German => \"German\",\n            Language::Russian => \"Russian\",\n            Language::Spanish => \"Spanish\",\n            Language::Swedish => \"Swedish\",\n            Language::French => \"French\",\n            Language::Dutch => \"Dutch\",\n            Language::Hungarian => \"Hungarian\",\n            Language::Slovenian => \"Slovenian\",\n            Language::Japanese => \"Japanese\",\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/language/param.rs",
    "content": "use super::Language;\nuse serde::{Deserialize, Serialize};\nuse std::ops::Deref;\n\n/// Language parameter that contains a Language and whether English should be used as fallback\n#[derive(Clone, Copy, Debug, Deserialize, Serialize)]\npub struct LangParam {\n    lang: Language,\n    use_en: bool,\n}\n\nimpl LangParam {\n    /// Creates a new LangParam with English fallback disabled\n    #[inline]\n    pub fn new(lang: Language) -> Self {\n        Self::with_en_raw(lang, false)\n    }\n\n    /// Creates a new LangParam with English fallback enabled\n    #[inline]\n    pub fn with_en(lang: Language) -> Self {\n        Self::with_en_raw(lang, true)\n    }\n\n    /// Creates a new LangParam with English fallback as custom parameter\n    #[inline]\n    pub fn with_en_raw(lang: Language, use_en: bool) -> Self {\n        Self { lang, use_en }\n    }\n\n    /// Returns `true` whether English can be used\n    #[inline]\n    pub fn en_fallback(&self) -> bool {\n        self.use_en\n    }\n\n    /// Returns `true` if the language is `Language::English`\n    #[inline]\n    pub fn is_english(&self) -> bool {\n        self.lang == Language::English\n    }\n\n    /// Returns the params language\n    #[inline]\n    pub fn language(&self) -> Language {\n        self.lang\n    }\n\n    /// Returns `true` if the language param matches the given language. This also uses `use_en`\n    /// for the comparison\n    #[inline]\n    pub fn eq_to_lang(&self, lang: &Language) -> bool {\n        self.lang == *lang || (self.en_fallback() && *lang == Language::English)\n    }\n}\n\nimpl Deref for LangParam {\n    type Target = Language;\n\n    #[inline]\n    fn deref(&self) -> &Self::Target {\n        &self.lang\n    }\n}\n\n// Little shortcut to make trait bounds easier to read\npub trait AsLangParam: Copy {\n    fn as_lang(self) -> LangParam;\n}\n\nimpl<T: Into<LangParam> + Copy> AsLangParam for T {\n    #[inline]\n    fn as_lang(self) -> LangParam {\n        self.into()\n    }\n}\n\nimpl From<&Language> for LangParam {\n    #[inline]\n    fn from(lang: &Language) -> Self {\n        Self::new(*lang)\n    }\n}\n\nimpl From<Language> for LangParam {\n    #[inline]\n    fn from(lang: Language) -> Self {\n        Self::new(lang)\n    }\n}\n\nimpl From<(Language, bool)> for LangParam {\n    #[inline]\n    fn from(lang: (Language, bool)) -> Self {\n        Self::with_en_raw(lang.0, lang.1)\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/mod.rs",
    "content": "/// Contains all structures and enums for Jotoba kanji\npub mod kanji;\npub mod language;\n/// Contains all structures and enums for Jotoba names\npub mod names;\n/// Contains structures used for pagination\npub mod pagination;\n/// Contains search related structures and enums\npub mod search;\npub mod sentences;\n/// Contains all structures and enums for Jotoba words\npub mod words;\n\n/// Types used in indexes\npub mod indexes;\n"
  },
  {
    "path": "lib/types/src/jotoba/names/mod.rs",
    "content": "pub mod name_type;\n\nuse name_type::NameType;\nuse serde::{Deserialize, Serialize};\nuse std::hash::{Hash, Hasher};\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\npub struct Name {\n    pub sequence: u32,\n    pub kana: String,\n    pub kanji: Option<String>,\n    pub transcription: String,\n    pub name_type: Option<Vec<NameType>>,\n    pub xref: Option<String>,\n}\n\nimpl Name {\n    /// Return `true` if name is gendered\n    pub fn is_gendered(&self) -> bool {\n        self.name_type\n            .as_ref()\n            .map(|i| i.iter().any(|i| i.is_gender()))\n            .unwrap_or(false)\n    }\n\n    /// Get the gender name-type if exists\n    pub fn get_gender(&self) -> Option<NameType> {\n        self.name_type\n            .as_ref()\n            .and_then(|i| i.iter().find(|i| i.is_gender()).copied())\n    }\n\n    /// Returns `true` if name has at least one non-gender tag\n    pub fn has_non_gender_tags(&self) -> bool {\n        self.name_type\n            .as_ref()\n            .map(|i| i.iter().any(|j| !j.is_gender()))\n            .unwrap_or(false)\n    }\n\n    #[inline]\n    pub fn get_reading(&self) -> &str {\n        self.kanji.as_ref().unwrap_or(&self.kana)\n    }\n\n    #[inline]\n    pub fn has_kanji(&self) -> bool {\n        self.kanji.is_some()\n    }\n}\n\nimpl PartialEq for Name {\n    #[inline]\n    fn eq(&self, other: &Self) -> bool {\n        self.sequence == other.sequence\n    }\n}\n\nimpl Eq for Name {}\n\nimpl Hash for Name {\n    #[inline]\n    fn hash<H: Hasher>(&self, state: &mut H) {\n        self.sequence.hash(state);\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/names/name_type.rs",
    "content": "#[cfg(feature = \"jotoba_intern\")]\nuse localization::traits::Translatable;\n\nuse serde::{Deserialize, Serialize};\nuse strum_macros::EnumString;\n\n#[derive(Debug, Clone, Copy, EnumString, Serialize, Deserialize, PartialEq, Hash)]\n#[repr(u8)]\npub enum NameType {\n    #[strum(serialize = \"company\")]\n    Company,\n    #[strum(serialize = \"fem\")]\n    Female,\n    #[strum(serialize = \"masc\")]\n    Male,\n    #[strum(serialize = \"given\")]\n    Given,\n    #[strum(serialize = \"organization\")]\n    Organization,\n    #[strum(serialize = \"person\")]\n    Person,\n    #[strum(serialize = \"place\")]\n    Place,\n    #[strum(serialize = \"product\")]\n    Product,\n    #[strum(serialize = \"station\")]\n    RailwayStation,\n    #[strum(serialize = \"surname\")]\n    Surname,\n    #[strum(serialize = \"unclass\")]\n    Unclassified,\n    #[strum(serialize = \"work\")]\n    Work,\n    #[strum(serialize = \"char\")]\n    Character,\n    #[strum(serialize = \"creat\")]\n    Creature,\n    #[strum(serialize = \"dei\")]\n    Deity,\n    #[strum(serialize = \"doc\")]\n    Document,\n    #[strum(serialize = \"ev\")]\n    Event,\n    #[strum(serialize = \"fict\")]\n    Fiction,\n    #[strum(serialize = \"group\")]\n    Group,\n    #[strum(serialize = \"leg\")]\n    Legend,\n    #[strum(serialize = \"myth\")]\n    Mythology,\n    #[strum(serialize = \"obj\")]\n    Object,\n    #[strum(serialize = \"oth\")]\n    Other,\n    #[strum(serialize = \"relig\")]\n    Religion,\n    #[strum(serialize = \"serv\")]\n    Service,\n    #[strum(serialize = \"ship\")]\n    Ship,\n}\n\nimpl NameType {\n    #[inline]\n    pub fn is_gender(&self) -> bool {\n        matches!(self, Self::Female | Self::Male)\n    }\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for NameType {\n    #[inline]\n    fn get_id(&self) -> &'static str {\n        match self {\n            NameType::Company => \"Company\",\n            NameType::Female => \"Female\",\n            NameType::Male => \"Male\",\n            NameType::Given => \"Given name\",\n            NameType::Organization => \"Organization\",\n            NameType::Person => \"Persons name\",\n            NameType::Place => \"Place\",\n            NameType::Product => \"Product\",\n            NameType::RailwayStation => \"(Railway)Station\",\n            NameType::Surname => \"Surname\",\n            NameType::Unclassified => \"Unknown\",\n            NameType::Work => \"Art work\",\n            NameType::Character => \"Character\",\n            NameType::Creature => \"Creature\",\n            NameType::Deity => \"Deity\",\n            NameType::Document => \"Document\",\n            NameType::Event => \"Event\",\n            NameType::Fiction => \"Fiction\",\n            NameType::Group => \"Group\",\n            NameType::Legend => \"Legend\",\n            NameType::Mythology => \"Mythology\",\n            NameType::Object => \"Object\",\n            NameType::Other => \"Other\",\n            NameType::Religion => \"Religion\",\n            NameType::Service => \"Service\",\n            NameType::Ship => \"Ship\",\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/pagination/mod.rs",
    "content": "pub mod page;\n\nuse page::Page;\nuse serde::Serialize;\nuse std::cmp::min;\n\n/// The amount of buttons the paginator should display max.\nconst BUTTONS_TO_DISPLAY: u8 = 5;\n\n/// A Pagination structure holding information about page\n#[derive(Clone, Copy, Default, Debug)]\npub struct Pagination {\n    pub curr_page: u32,\n    pub items: u32,\n    pub items_per_page: u32,\n    pub max_pages: u32,\n}\n\nimpl Pagination {\n    #[inline]\n    pub fn new(curr_page: u32, items: u32, items_per_page: u32, max_pages: u32) -> Self {\n        Self {\n            curr_page,\n            items,\n            items_per_page,\n            max_pages,\n        }\n    }\n\n    #[inline]\n    pub fn new_page<T: Serialize + Clone>(\n        v: T,\n        curr_page: u32,\n        items: u32,\n        items_per_page: u32,\n        max_pages: u32,\n    ) -> Page<T> {\n        Self::new(curr_page, items, items_per_page, max_pages).with_value(v)\n    }\n\n    /// Returns the number of the last page\n    #[inline]\n    pub fn get_last(&self) -> u32 {\n        ((self.items as f32 / self.items_per_page as f32).ceil() as u32).min(self.max_pages)\n    }\n\n    /// Returns `true` if the current page is the first page\n    #[inline]\n    pub fn is_first(&self) -> bool {\n        self.curr_page == 1\n    }\n\n    /// Returns `true` if the current page is the last page\n    #[inline]\n    pub fn is_last(&self) -> bool {\n        self.curr_page == self.get_last()\n    }\n\n    pub fn with_value<T: Serialize + Clone>(&self, v: T) -> Page<T> {\n        // always show at least one page. Otherwise it would panic\n        let last = self.get_last().max(1);\n        let curr = self.curr_page.min(last);\n        Page::with_pages(v, curr, last)\n    }\n\n    /// Generates the pagination buttons\n    pub fn gen_page_buttons(&self) -> impl Iterator<Item = PaginationButton> + '_ {\n        let btn_count = min(BUTTONS_TO_DISPLAY as u32, self.get_last());\n        let h_btns = btn_count / 2;\n\n        let right_btns_inv = h_btns - (self.get_last() - self.curr_page).min(h_btns);\n        let start = self\n            .curr_page\n            .saturating_sub(h_btns + right_btns_inv)\n            // Don't show 0 pages if only one exists\n            .max(1);\n\n        let end = min(start + btn_count, self.get_last() + 1);\n\n        (start..end).map(move |page| PaginationButton::new(page, page == self.curr_page))\n    }\n}\n\n/// Data for a single frontend pagination button.\n#[derive(Copy, Clone)]\npub struct PaginationButton {\n    pub page_nr: u32,\n    pub active: bool,\n}\n\nimpl PaginationButton {\n    /// Create a new `PaginationButton`\n    #[inline]\n    fn new(page: u32, active: bool) -> PaginationButton {\n        PaginationButton {\n            page_nr: page,\n            active,\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/pagination/page.rs",
    "content": "use serde::Serialize;\n\n/// A generic API Response type implementing Serialize that can be used for any kind of Response\n/// that can be a part of multiple pages\n#[derive(Serialize, Clone)]\npub struct Page<T: Serialize> {\n    /// Paginator content\n    content: T,\n\n    /// Total amount of Pages\n    pages: u32,\n\n    /// Current page\n    current_page: u32,\n}\n\nimpl<T: Serialize> Page<T> {\n    /// Creates a new Paginator with default values\n    pub fn new(content: T) -> Self {\n        Self {\n            content,\n            pages: 1,\n            current_page: 1,\n        }\n    }\n\n    /// Creates a new Paginator with non default page values\n    ///\n    /// # Panics\n    ///\n    /// Panics if `current_page` > `pages`\n    pub fn with_pages(content: T, current_page: u32, pages: u32) -> Self {\n        assert!(current_page <= pages);\n        Self {\n            content,\n            current_page,\n            pages,\n        }\n    }\n\n    /// Set the paginator's current page.\n    ///\n    /// # Panics\n    ///\n    /// Panics if `current_page` > `pages`\n    pub fn set_current_page(&mut self, current_page: u32) {\n        assert!(current_page <= self.pages);\n        self.current_page = current_page;\n    }\n\n    /// Set the paginator's pages.\n    ///\n    /// # Panics\n    ///\n    /// Panics if `current_page` > `pages`\n    pub fn set_pages(&mut self, pages: u32) {\n        assert!(self.current_page <= pages);\n        self.pages = pages;\n    }\n\n    /// Get the paginator's pages.\n    pub fn pages(&self) -> u32 {\n        self.pages\n    }\n\n    /// Returns `true` if the page is blank\n    #[inline]\n    pub fn is_empty(&self) -> bool {\n        self.pages == 0\n    }\n\n    /// Get the paginator's current page.\n    pub fn current_page(&self) -> u32 {\n        self.current_page\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/search/guess.rs",
    "content": "use serde::{Deserialize, Serialize};\n\n/// A guess representing structure. Gives some vague information about the relation to the\n/// actual value i.e if its likely to be exact, less, etc..\n#[derive(Clone, Copy, Debug, Serialize, Deserialize)]\npub struct Guess {\n    pub value: u32,\n    pub guess_type: GuessType,\n}\n\n/// Vague guess relation to a guesses actual value\n#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq)]\npub enum GuessType {\n    Accurate,\n    MoreThan,\n    LessThan,\n    Undefined,\n}\n\nimpl Guess {\n    /// Creates a new `Guess`\n    #[inline]\n    pub fn new(value: u32, guess_type: GuessType) -> Self {\n        Self { value, guess_type }\n    }\n\n    /// Creates a new guess value with a given limit. If `value` exceeds the with_limit\n    /// `GuessType::MoreThan` will be used. Otherwise GuessType::Accurate\n    pub fn with_limit(value: u32, limit: u32) -> Self {\n        let gt;\n        if value > limit {\n            gt = GuessType::MoreThan;\n        } else {\n            gt = GuessType::Accurate;\n        }\n\n        Self {\n            value: value.min(limit),\n            guess_type: gt,\n        }\n    }\n\n    /// Formats the guess to a human readable string\n    pub fn format(&self) -> String {\n        let prefix = self.guess_type.get_prefix();\n        format!(\"{}{}\", prefix, self.value)\n    }\n}\n\nimpl GuessType {\n    #[inline]\n    pub fn get_prefix(&self) -> &'static str {\n        match self {\n            GuessType::Accurate => \"\",\n            GuessType::Undefined => \"\",\n            GuessType::MoreThan => \">\",\n            GuessType::LessThan => \"<\",\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/search/help.rs",
    "content": "use serde::{Deserialize, Serialize};\n\nuse crate::jotoba::language::Language;\n\nuse super::{guess::Guess, SearchTarget};\n\n/// Structure containing information for better search help in case no item was\n/// found in a search\n#[derive(Clone, Default, Debug, Serialize, Deserialize)]\npub struct SearchHelp {\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub words: Option<Guess>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub names: Option<Guess>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub sentences: Option<Guess>,\n    #[serde(skip_serializing_if = \"Option::is_none\")]\n    pub kanji: Option<Guess>,\n    #[serde(skip_serializing_if = \"Vec::is_empty\")]\n    pub other_langs: Vec<Language>,\n}\n\nimpl SearchHelp {\n    pub fn new(\n        words: Option<Guess>,\n        names: Option<Guess>,\n        sentences: Option<Guess>,\n        kanji: Option<Guess>,\n        other_langs: Vec<Language>,\n    ) -> Self {\n        Self {\n            words,\n            names,\n            sentences,\n            kanji,\n            other_langs,\n        }\n    }\n\n    /// Returns `true` if `SearchHelp` is not helpful at all (empty)\n    pub fn is_empty(&self) -> bool {\n        self.iter_items().next().is_none()\n    }\n\n    /// Returns an iterator over all (QueryType, Guess) pairs that have a value\n    pub fn iter_items(&self) -> impl Iterator<Item = (SearchTarget, Guess)> {\n        let types = &[\n            (self.words, SearchTarget::Words),\n            (self.names, SearchTarget::Names),\n            (self.sentences, SearchTarget::Sentences),\n            (self.kanji, SearchTarget::Kanji),\n        ];\n\n        types\n            .iter()\n            .filter_map(|i| i.0.is_some().then(|| (i.1, i.0.unwrap())))\n            .filter(|i| i.1.value != 0)\n            .collect::<Vec<_>>()\n            .into_iter()\n    }\n\n    pub fn iter_langs(&self) -> impl Iterator<Item = (Language, &'static str)> + '_ {\n        self.other_langs\n            .iter()\n            .map(|lang| (*lang, lang.to_query_format()))\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/search/mod.rs",
    "content": "pub mod guess;\npub mod help;\npub mod query_type;\n\npub use query_type::SearchTarget;\n"
  },
  {
    "path": "lib/types/src/jotoba/search/query_type.rs",
    "content": "#[cfg(feature = \"jotoba_intern\")]\nuse localization::{language::Language, traits::Translatable, TranslationDict};\n\nuse serde::{Deserialize, Serialize};\n\n#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Hash, Default)]\npub enum SearchTarget {\n    #[serde(rename = \"1\")]\n    Kanji,\n    #[serde(rename = \"2\")]\n    Sentences,\n    #[serde(rename = \"3\")]\n    Names,\n    #[default]\n    #[serde(rename = \"0\", other)]\n    Words,\n}\n\nimpl SearchTarget {\n    /// Iterate over all query types\n    #[inline]\n    pub fn iterate() -> impl Iterator<Item = Self> {\n        [Self::Kanji, Self::Sentences, Self::Names, Self::Words].into_iter()\n    }\n\n    #[cfg(feature = \"jotoba_intern\")]\n    pub fn get_translated<'a>(\n        &self,\n        dict: &'a TranslationDict,\n        language: Option<Language>,\n    ) -> &'a str {\n        dict.gettext(self.get_id(), language)\n    }\n\n    #[inline]\n    pub fn get_type_id(&self) -> u8 {\n        match self {\n            SearchTarget::Kanji => 1,\n            SearchTarget::Sentences => 2,\n            SearchTarget::Names => 3,\n            SearchTarget::Words => 0,\n        }\n    }\n}\n\nimpl TryFrom<u8> for SearchTarget {\n    type Error = ();\n\n    #[inline]\n    fn try_from(value: u8) -> Result<Self, Self::Error> {\n        Ok(match value {\n            0 => Self::Words,\n            1 => Self::Kanji,\n            2 => Self::Sentences,\n            3 => Self::Names,\n            _ => return Err(()),\n        })\n    }\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for SearchTarget {\n    #[inline]\n    fn get_id(&self) -> &'static str {\n        match self {\n            SearchTarget::Kanji => \"Kanji\",\n            SearchTarget::Sentences => \"Sentences\",\n            SearchTarget::Names => \"Names\",\n            SearchTarget::Words => \"Words\",\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/sentences/mod.rs",
    "content": "pub mod tag;\npub mod translation;\n\npub use self::tag::Tag;\n\nuse super::language::{param::AsLangParam, Language};\nuse bitflags::BitFlag;\nuse jp_utils::furi::Furigana;\nuse serde::{Deserialize, Serialize};\nuse std::{\n    hash::{Hash, Hasher},\n    num::{NonZeroI8, NonZeroU8},\n};\nuse translation::Translation;\n\n/// A single Sentence with multiple translations.\n#[derive(Clone, Deserialize, Serialize, Default)]\npub struct Sentence {\n    pub id: u32,\n    pub japanese: String,\n    pub furigana: String,\n    pub translations: Vec<Translation>,\n    pub jlpt_guess: Option<NonZeroU8>,\n    pub level: Option<NonZeroI8>,\n    pub tags: Vec<Tag>,\n}\n\nimpl Sentence {\n    /// Create a new sentence\n    #[inline]\n    pub fn new(\n        id: u32,\n        japanese: String,\n        furigana: String,\n        translations: Vec<Translation>,\n        tags: Vec<Tag>,\n    ) -> Self {\n        Sentence {\n            id,\n            japanese,\n            furigana,\n            translations,\n            jlpt_guess: None,\n            level: None,\n            tags,\n        }\n    }\n\n    /// Returns `true` if the sentence has the given tag\n    #[inline]\n    pub fn has_tag(&self, tag: &Tag) -> bool {\n        self.tags.iter().any(|i| i == tag)\n    }\n\n    /// Returns `true` if the sentence contains a translation for `language`\n    #[inline]\n    pub fn has_translation(&self, lang: impl AsLangParam) -> bool {\n        let lang = lang.as_lang();\n        self.translations\n            .iter()\n            .any(|tr| lang.eq_to_lang(&tr.language))\n    }\n\n    /// Returns the translation for a given language if exists\n    #[inline]\n    pub fn translation_for(&self, language: Language) -> Option<&str> {\n        self.translations\n            .iter()\n            .find(|i| i.language == language)\n            .map(|i| i.text.as_str())\n    }\n\n    pub fn get_translation(&self, lang: impl AsLangParam) -> Option<&str> {\n        let lang = lang.as_lang();\n\n        if let Some(s) = self.translation_for(lang.language()) {\n            return Some(s);\n        }\n\n        if lang.en_fallback() {\n            return self.translation_for(Language::English);\n        }\n\n        None\n    }\n\n    pub fn set_jlpt_guess(&mut self, guess: u8) {\n        if !(1..=5).contains(&guess) {\n            return;\n        }\n\n        self.jlpt_guess = Some(NonZeroU8::new(guess).unwrap())\n    }\n\n    /// Calculates a bitmask to efficiently determine the supported languages of a sentence\n    pub fn calc_lang_mask(&self) -> u16 {\n        lang_mask(self.translations.iter().map(|i| i.language))\n    }\n\n    #[inline]\n    pub fn level(&self) -> Option<i8> {\n        // We add 10 to each value in preprocessing to prevent it reaching 0 which\n        // we want to be able to use NonZeroI8\n        self.level.map(|i| i.get() - 10)\n    }\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Sentence {\n    /// Returns the kana reading of a sentence\n    #[inline]\n    pub fn get_kana(&self) -> String {\n        Furigana(&self.furigana).kana_str()\n    }\n}\n\npub fn lang_mask<I>(langs: I) -> u16\nwhere\n    I: Iterator<Item = Language>,\n{\n    let mut lang_mask = BitFlag::<u16>::new();\n    for lang in langs {\n        let lang: i32 = lang.into();\n        lang_mask.set_unchecked(lang as u16, true);\n    }\n    lang_mask.raw()\n}\n\npub fn parse_lang_mask(mask: u16) -> Vec<Language> {\n    let mut langs = Vec::new();\n    for i in 0..10 {\n        if mask & (1 << i) == 0 {\n            continue;\n        }\n        if let Ok(lang) = Language::try_from(i as i32) {\n            langs.push(lang);\n        }\n    }\n    langs\n}\n\nimpl Eq for Sentence {}\n\nimpl PartialEq for Sentence {\n    #[inline]\n    fn eq(&self, other: &Self) -> bool {\n        self.id == other.id\n    }\n}\n\nimpl Hash for Sentence {\n    #[inline]\n    fn hash<H: Hasher>(&self, state: &mut H) {\n        self.id.hash(state);\n    }\n}\n\nimpl std::fmt::Debug for Sentence {\n    #[inline]\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        write!(f, \"{}\", self.japanese)\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/sentences/tag.rs",
    "content": "use serde::{Deserialize, Serialize};\nuse strum::IntoEnumIterator;\nuse strum_macros::{AsRefStr, EnumIter, EnumString};\n\n#[derive(\n    Debug, PartialEq, Clone, Copy, AsRefStr, Serialize, Deserialize, EnumString, EnumIter, Hash, Eq,\n)]\n#[repr(u8)]\npub enum Tag {\n    #[strum(serialize = \"casual\")]\n    Casual,\n    #[strum(serialize = \"formal\")]\n    Formal,\n    #[strum(serialize = \"humble\")]\n    Humble,\n    #[strum(serialize = \"kansai\", serialize = \"kansai dialect\")]\n    Kansai,\n    #[strum(serialize = \"female\", serialize = \"female speaker\")]\n    Female,\n    #[strum(serialize = \"male\", serialize = \"male speaker\")]\n    Male,\n    #[strum(serialize = \"proverb\")]\n    Proverb,\n    #[strum(serialize = \"translatedproverb\")]\n    TranslatedProverb,\n    #[strum(serialize = \"quote\")]\n    Quote,\n    #[strum(serialize = \"pun\", serialize = \"japanese puns\")]\n    Pun,\n    #[strum(serialize = \"ok\")]\n    Ok,\n    #[strum(serialize = \"japanglish\")]\n    Japanglish,\n    #[strum(serialize = \"haiku\")]\n    Haiku,\n    #[strum(serialize = \"vulgar\")]\n    Vulgar,\n    #[strum(serialize = \"conversation\")]\n    Conversation,\n    #[strum(serialize = \"slang\")]\n    Slang,\n    #[strum(serialize = \"meme\")]\n    Meme,\n\n    #[strum(serialize = \"bungo\")]\n    /// 文語\n    Bungo,\n\n    #[strum(serialize = \"dialectal\")]\n    Dialectal,\n    #[strum(serialize = \"poetry\")]\n    Poetry,\n    #[strum(serialize = \"game\")]\n    Game,\n    #[strum(serialize = \"manga\")]\n    Manga,\n    #[strum(serialize = \"lie\")]\n    Lie,\n}\n\nimpl Tag {\n    #[inline]\n    pub fn iter() -> impl Iterator<Item = Tag> {\n        <Tag as IntoEnumIterator>::iter()\n    }\n\n    #[inline]\n    pub fn as_str(&self) -> &str {\n        self.as_ref()\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/sentences/translation.rs",
    "content": "use crate::jotoba::language::Language;\nuse serde::{Deserialize, Serialize};\n\n/// A Translation for a sentence\n#[derive(Debug, Clone, Serialize, Deserialize)]\npub struct Translation {\n    pub text: String,\n    pub language: Language,\n}\n\nimpl From<(String, Language)> for Translation {\n    #[inline]\n    fn from((text, language): (String, Language)) -> Self {\n        Self { text, language }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/dialect.rs",
    "content": "use std::fmt::Display;\n\n#[cfg(feature = \"jotoba_intern\")]\nuse localization::{language::Language, traits::Translatable, TranslationDict};\n\nuse serde::{Deserialize, Serialize};\nuse strum_macros::EnumString;\n\n#[derive(Debug, PartialEq, Clone, Copy, EnumString, Serialize, Deserialize, Hash)]\n#[repr(u8)]\npub enum Dialect {\n    #[strum(serialize = \"bra\")]\n    Brazilian,\n    #[strum(serialize = \"hob\")]\n    Hokkaido,\n    #[strum(serialize = \"ksb\")]\n    Kansai,\n    #[strum(serialize = \"ktb\")]\n    Kantou,\n    #[strum(serialize = \"kyb\")]\n    Kyoto,\n    #[strum(serialize = \"kyu\")]\n    Kyuushuu,\n    #[strum(serialize = \"nab\")]\n    Nagano,\n    #[strum(serialize = \"osb\")]\n    Osaka,\n    #[strum(serialize = \"rkb\")]\n    Ryuukyuu,\n    #[strum(serialize = \"thb\")]\n    Touhoku,\n    #[strum(serialize = \"tsb\")]\n    Tosa,\n    #[strum(serialize = \"tsug\")]\n    Tsugaru,\n}\n\nimpl Display for Dialect {\n    #[inline]\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        write!(f, \"{:?}\", self)\n    }\n}\n\nimpl Into<&'static str> for Dialect {\n    #[inline]\n    fn into(self) -> &'static str {\n        match self {\n            Dialect::Hokkaido => \"Hokkaido\",\n            Dialect::Brazilian => \"Brazilian\",\n            Dialect::Kansai => \"Kansai\",\n            Dialect::Kantou => \"Kantou\",\n            Dialect::Kyoto => \"Kyoto\",\n            Dialect::Kyuushuu => \"Kyuushuu\",\n            Dialect::Nagano => \"Nagano\",\n            Dialect::Osaka => \"Osaka\",\n            Dialect::Ryuukyuu => \"Ryuukyuu\",\n            Dialect::Touhoku => \"Touhoku\",\n            Dialect::Tosa => \"Tosa\",\n            Dialect::Tsugaru => \"Tsugaru\",\n        }\n    }\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for Dialect {\n    #[inline]\n    fn get_id(&self) -> &'static str {\n        (*self).into()\n    }\n\n    #[inline]\n    fn gettext_custom(&self, dict: &TranslationDict, language: Option<Language>) -> String {\n        dict.gettext_fmt(\"{} dialect\", &[self.gettext(dict, language)], language)\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/dict.rs",
    "content": "use serde::{Deserialize, Serialize};\n\nuse super::{information::Information, priority::Priority};\n\n/// A single dictionary entry representing a words reading\n#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, Hash, Eq)]\npub struct Dict {\n    pub reading: String,\n    pub kanji: bool,\n    pub no_kanji: bool,\n    pub priorities: Option<Vec<Priority>>,\n    pub reading_info: Option<Vec<Information>>,\n    pub is_main: bool,\n}\n\nimpl Dict {\n    /// Returns the length of the dictionaries reading\n    #[inline]\n    pub fn len(&self) -> usize {\n        // TODO: use proper len calculation here\n        self.reading.chars().count()\n    }\n\n    /// Returns `true` if the reading has a length of zero\n    #[inline]\n    pub fn is_empty(&self) -> bool {\n        self.reading.is_empty()\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/field.rs",
    "content": "#[cfg(feature = \"jotoba_intern\")]\nuse localization::{language::Language, traits::Translatable, TranslationDict};\n\nuse serde::{Deserialize, Serialize};\nuse strum_macros::{AsRefStr, EnumString};\n\n#[derive(Debug, PartialEq, Clone, Copy, AsRefStr, EnumString, Serialize, Deserialize, Hash)]\n#[repr(u8)]\npub enum Field {\n    #[strum(serialize = \"ski\")]\n    Ski,\n    #[strum(serialize = \"psyanal\")]\n    Psyanal,\n    #[strum(serialize = \"agric\")]\n    Agriculture,\n    #[strum(serialize = \"anat\")]\n    Anatomy,\n    #[strum(serialize = \"archeol\")]\n    Archeology,\n    #[strum(serialize = \"archit\")]\n    Architecture,\n    #[strum(serialize = \"art\")]\n    ArtAesthetics,\n    #[strum(serialize = \"astron\")]\n    Astronomy,\n    #[strum(serialize = \"audvid\")]\n    AudioVisual,\n    #[strum(serialize = \"aviat\")]\n    Aviation,\n    #[strum(serialize = \"baseb\")]\n    Baseball,\n    #[strum(serialize = \"biochem\")]\n    Biochemistry,\n    #[strum(serialize = \"biol\")]\n    Biology,\n    #[strum(serialize = \"bot\")]\n    Botany,\n    #[strum(serialize = \"Buddh\")]\n    Buddhism,\n    #[strum(serialize = \"bus\")]\n    Business,\n    #[strum(serialize = \"cards\")]\n    Cards,\n    #[strum(serialize = \"chem\")]\n    Chemistry,\n    #[strum(serialize = \"Christn\")]\n    Christianity,\n    #[strum(serialize = \"comp\")]\n    Computing,\n    #[strum(serialize = \"cloth\")]\n    Clothing,\n    #[strum(serialize = \"cryst\")]\n    Crystallography,\n    #[strum(serialize = \"dent\")]\n    Dentistry,\n    #[strum(serialize = \"ecol\")]\n    Ecology,\n    #[strum(serialize = \"econ\")]\n    Economics,\n    #[strum(serialize = \"elec\")]\n    Electricity,\n    #[strum(serialize = \"electr\")]\n    Electronics,\n    #[strum(serialize = \"embryo\")]\n    Embryology,\n    #[strum(serialize = \"engr\")]\n    Engineering,\n    #[strum(serialize = \"ent\")]\n    Entomology,\n    #[strum(serialize = \"finc\")]\n    Finance,\n    #[strum(serialize = \"film\")]\n    Film,\n    #[strum(serialize = \"fish\")]\n    Fishing,\n    #[strum(serialize = \"food\")]\n    FoodCooking,\n    #[strum(serialize = \"gardn\")]\n    Gardening,\n    #[strum(serialize = \"genet\")]\n    Genetics,\n    #[strum(serialize = \"geogr\")]\n    Geography,\n    #[strum(serialize = \"geol\")]\n    Geology,\n    #[strum(serialize = \"geom\")]\n    Geometry,\n    #[strum(serialize = \"go\")]\n    GoGame,\n    #[strum(serialize = \"golf\")]\n    Golf,\n    #[strum(serialize = \"gramm\")]\n    Grammar,\n    #[strum(serialize = \"grmyth\")]\n    GreekMythology,\n    #[strum(serialize = \"hanaf\")]\n    Hanafuda,\n    #[strum(serialize = \"horse\")]\n    Horseracing,\n    #[strum(serialize = \"law\")]\n    Law,\n    #[strum(serialize = \"kabuki\")]\n    Kabuki,\n    #[strum(serialize = \"ling\")]\n    Linguistics,\n    #[strum(serialize = \"logic\")]\n    Logic,\n    #[strum(serialize = \"MA\")]\n    MartialArts,\n    #[strum(serialize = \"mahj\")]\n    Mahjong,\n    #[strum(serialize = \"manga\")]\n    Manga,\n    #[strum(serialize = \"math\")]\n    Mathematics,\n    #[strum(serialize = \"mech\")]\n    MechanicalEngineering,\n    #[strum(serialize = \"med\")]\n    Medicine,\n    #[strum(serialize = \"met\")]\n    ClimateWeather,\n    #[strum(serialize = \"mining\")]\n    Mining,\n    #[strum(serialize = \"mil\")]\n    Military,\n    #[strum(serialize = \"noh\")]\n    Noh,\n    #[strum(serialize = \"music\")]\n    Music,\n    #[strum(serialize = \"ornith\")]\n    Ornithology,\n    #[strum(serialize = \"paleo\")]\n    Paleontology,\n    #[strum(serialize = \"pathol\")]\n    Pathology,\n    #[strum(serialize = \"pharm\")]\n    Pharmacy,\n    #[strum(serialize = \"phil\")]\n    Philosophy,\n    #[strum(serialize = \"photo\")]\n    Photography,\n    #[strum(serialize = \"physics\")]\n    Physics,\n    #[strum(serialize = \"physiol\")]\n    Physiology,\n    #[strum(serialize = \"politics\")]\n    Politics,\n    #[strum(serialize = \"print\")]\n    Printing,\n    #[strum(serialize = \"psych\")]\n    Psychology,\n    #[strum(serialize = \"psy\")]\n    Psychitatry,\n    #[strum(serialize = \"Shinto\")]\n    Shinto,\n    #[strum(serialize = \"rail\")]\n    Railway,\n    #[strum(serialize = \"rommyth\")]\n    RomanMythology,\n    #[strum(serialize = \"stockm\")]\n    StockMarket,\n    #[strum(serialize = \"shogi\")]\n    Shogi,\n    #[strum(serialize = \"sports\")]\n    Sports,\n    #[strum(serialize = \"stat\")]\n    Statistics,\n    #[strum(serialize = \"sumo\")]\n    Sumo,\n    #[strum(serialize = \"telec\")]\n    Telecommunications,\n    #[strum(serialize = \"tradem\")]\n    Trademark,\n    #[strum(serialize = \"tv\")]\n    TV,\n    #[strum(serialize = \"vidg\")]\n    Videogame,\n    #[strum(serialize = \"zool\")]\n    Zoology,\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for Field {\n    fn get_id(&self) -> &'static str {\n        match self {\n            Field::Agriculture => \"Agriculture\",\n            Field::Anatomy => \"Anatomy\",\n            Field::Archeology => \"Archeology\",\n            Field::Architecture => \"Architecture\",\n            Field::ArtAesthetics => \"Art aesthetics\",\n            Field::Astronomy => \"Astronomy\",\n            Field::AudioVisual => \"Audio/visual\",\n            Field::Aviation => \"Aviation\",\n            Field::Baseball => \"Baseball\",\n            Field::Biochemistry => \"Biochemistry\",\n            Field::Biology => \"Biology\",\n            Field::Botany => \"Botany\",\n            Field::Buddhism => \"Buddhism\",\n            Field::Business => \"Business\",\n            Field::Cards => \"Cards\",\n            Field::Chemistry => \"Chemistry\",\n            Field::Christianity => \"Christianity\",\n            Field::Computing => \"Computing\",\n            Field::Crystallography => \"Crystallography\",\n            Field::Ecology => \"Ecology\",\n            Field::Economics => \"Economics\",\n            Field::Electricity => \"Electricity\",\n            Field::Electronics => \"Electronics\",\n            Field::Embryology => \"Embryology\",\n            Field::Engineering => \"Engineering\",\n            Field::Entomology => \"Entomology\",\n            Field::Film => \"Film\",\n            Field::Finance => \"Finance\",\n            Field::Fishing => \"Fishing\",\n            Field::FoodCooking => \"FoodCooking\",\n            Field::Gardening => \"Gardening\",\n            Field::Genetics => \"Genetics\",\n            Field::Geography => \"Geography\",\n            Field::Geology => \"Geology\",\n            Field::Geometry => \"Geometry\",\n            Field::GoGame => \"Go (game)\",\n            Field::Golf => \"Golf\",\n            Field::Grammar => \"Grammar\",\n            Field::GreekMythology => \"Greek mythology\",\n            Field::Hanafuda => \"Hanafuda\",\n            Field::Horseracing => \"Horseracing\",\n            Field::Kabuki => \"Kabuki\",\n            Field::Law => \"Law\",\n            Field::Linguistics => \"Linguistics\",\n            Field::Logic => \"Logic\",\n            Field::MartialArts => \"Martial arts\",\n            Field::Mahjong => \"Mahjong\",\n            Field::Mathematics => \"Mathematics\",\n            Field::MechanicalEngineering => \"MechanicalEngineering\",\n            Field::Medicine => \"Medicine\",\n            Field::Mining => \"Mining\",\n            Field::ClimateWeather => \"Climate/weather\",\n            Field::Manga => \"Manga\",\n            Field::Military => \"Military\",\n            Field::Music => \"Music\",\n            Field::Ornithology => \"Ornithology\",\n            Field::Paleontology => \"Paleontology\",\n            Field::Pathology => \"Pathology\",\n            Field::Pharmacy => \"Pharmacy\",\n            Field::Philosophy => \"Philosophy\",\n            Field::Photography => \"Photography\",\n            Field::Physics => \"Physics\",\n            Field::Physiology => \"Physiology\",\n            Field::Printing => \"Printing\",\n            Field::Psychology => \"Psychology\",\n            Field::Psychitatry => \"Psychiatry\",\n            Field::Railway => \"Railway\",\n            Field::RomanMythology => \"Roman Mythology\",\n            Field::StockMarket => \"Stock market\",\n            Field::Shinto => \"Shinto\",\n            Field::Shogi => \"Shogi\",\n            Field::Sports => \"Sports\",\n            Field::Statistics => \"Statistics\",\n            Field::Sumo => \"Sumo\",\n            Field::Telecommunications => \"Telecommunications\",\n            Field::Trademark => \"Trademark\",\n            Field::TV => \"TV\",\n            Field::Videogame => \"Videogame\",\n            Field::Zoology => \"Zoology\",\n            Field::Clothing => \"Clothing\",\n            Field::Dentistry => \"Dentistry\",\n            Field::Politics => \"Politics\",\n            Field::Noh => \"Noh\",\n            Field::Psyanal => \"Psyanal\",\n            Field::Ski => \"Ski\",\n        }\n    }\n\n    // Translate to eg \"Zoology term\"\n    fn gettext_custom(&self, dict: &TranslationDict, language: Option<Language>) -> String {\n        dict.gettext_fmt(\"{} term\", &[self.gettext(dict, language)], language)\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/foreign_language.rs",
    "content": "#[cfg(feature = \"jotoba_intern\")]\nuse localization::traits::Translatable;\n\nuse serde::{Deserialize, Serialize};\nuse strum_macros::{AsRefStr, EnumString};\n\n#[derive(Debug, PartialEq, Clone, Copy, AsRefStr, EnumString, Serialize, Deserialize, Hash)]\n#[repr(u8)]\npub enum ForeignLanguage {\n    #[strum(serialize = \"eng\")]\n    English,\n    #[strum(serialize = \"geo\")]\n    Georgian,\n    #[strum(serialize = \"ger\")]\n    German,\n    #[strum(serialize = \"chi\")]\n    Chinese,\n    #[strum(serialize = \"may\")]\n    Manchu,\n    #[strum(serialize = \"kur\")]\n    Kurdish,\n    #[strum(serialize = \"mnc\")]\n    ChinookJargon,\n    #[strum(serialize = \"ita\")]\n    Italian,\n    #[strum(serialize = \"mal\")]\n    Malayalam,\n    #[strum(serialize = \"tib\")]\n    Tibetian,\n    #[strum(serialize = \"m\")]\n    Mongolian,\n    #[strum(serialize = \"ru\")]\n    Romanian,\n    #[strum(serialize = \"b\")]\n    Bantu,\n    #[strum(serialize = \"nor\")]\n    Norwegian,\n    #[strum(serialize = \"gr\", serialize = \"grc\")]\n    Greek,\n    #[strum(serialize = \"ice\")]\n    Icelandic,\n    #[strum(serialize = \"br\")]\n    Breton,\n    #[strum(serialize = \"mao\")]\n    Maori,\n    #[strum(serialize = \"lat\")]\n    Latin,\n    #[strum(serialize = \"amh\")]\n    Amharic,\n    #[strum(serialize = \"khm\")]\n    Khmer,\n    #[strum(serialize = \"swa\")]\n    Swahili,\n    #[strum(serialize = \"heb\")]\n    Hebrew,\n    #[strum(serialize = \"glg\")]\n    Galician,\n    #[strum(serialize = \"kor\")]\n    Korean,\n    #[strum(serialize = \"tam\")]\n    Tamil,\n    #[strum(serialize = \"vie\")]\n    Viatnamese,\n    #[strum(serialize = \"pol\")]\n    Polish,\n    #[strum(serialize = \"san\")]\n    Sanskrit,\n    #[strum(serialize = \"per\")]\n    Persian,\n    #[strum(serialize = \"fil\")]\n    Filipino,\n    #[strum(serialize = \"mol\")]\n    Moldavian,\n    #[strum(serialize = \"scr\")]\n    Croatian,\n    #[strum(serialize = \"tha\")]\n    Thai,\n    #[strum(serialize = \"bur\")]\n    Burmese,\n    #[strum(serialize = \"slo\")]\n    Slovak,\n    #[strum(serialize = \"cze\")]\n    Czech,\n    #[strum(serialize = \"hin\")]\n    Hindi,\n    #[strum(serialize = \"arn\")]\n    Mapudungun,\n    #[strum(serialize = \"tur\")]\n    Turkish,\n    #[strum(serialize = \"haw\")]\n    Hawaiian,\n    #[strum(serialize = \"afr\")]\n    Afrikaans,\n    #[strum(serialize = \"epo\")]\n    Esperanto,\n    #[strum(serialize = \"yid\")]\n    Yiddish,\n    #[strum(serialize = \"som\")]\n    Somali,\n    #[strum(serialize = \"tah\")]\n    Tahitian,\n    #[strum(serialize = \"urd\")]\n    Urdu,\n    #[strum(serialize = \"ind\")]\n    Indonesian,\n    #[strum(serialize = \"est\")]\n    Estonian,\n    #[strum(serialize = \"bul\")]\n    Bulgarian,\n    #[strum(serialize = \"ara\")]\n    Arabic,\n    #[strum(serialize = \"dan\")]\n    Danish,\n    #[strum(serialize = \"por\")]\n    Portuguese,\n    #[strum(serialize = \"fin\")]\n    Finnish,\n    #[strum(serialize = \"ain\")]\n    Ainu,\n    #[strum(serialize = \"alg\")]\n    Algonquian,\n    #[strum(serialize = \"fre\")]\n    French,\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for ForeignLanguage {\n    fn get_id(&self) -> &'static str {\n        match self {\n            ForeignLanguage::English => \"English\",\n            ForeignLanguage::Georgian => \"Georgian\",\n            ForeignLanguage::German => \"German\",\n            ForeignLanguage::Chinese => \"Chinese\",\n            ForeignLanguage::Manchu => \"Manchu\",\n            ForeignLanguage::Kurdish => \"Kurdish\",\n            ForeignLanguage::ChinookJargon => \"ChinookJargon\",\n            ForeignLanguage::Italian => \"Italian\",\n            ForeignLanguage::Malayalam => \"Malayalam\",\n            ForeignLanguage::Tibetian => \"Tibetian\",\n            ForeignLanguage::Mongolian => \"Mongolian\",\n            ForeignLanguage::Romanian => \"Romanian\",\n            ForeignLanguage::Bantu => \"Bantu\",\n            ForeignLanguage::Norwegian => \"Norwegian\",\n            ForeignLanguage::Greek => \"Greek\",\n            ForeignLanguage::Icelandic => \"Icelandic\",\n            ForeignLanguage::Breton => \"Breton\",\n            ForeignLanguage::Maori => \"Maori\",\n            ForeignLanguage::Latin => \"Latin\",\n            ForeignLanguage::Amharic => \"Amharic\",\n            ForeignLanguage::Khmer => \"Khmer\",\n            ForeignLanguage::Swahili => \"Swahili \",\n            ForeignLanguage::Hebrew => \"Hebrew\",\n            ForeignLanguage::Galician => \"Galician\",\n            ForeignLanguage::Korean => \"Korean\",\n            ForeignLanguage::Tamil => \"Tamil\",\n            ForeignLanguage::Viatnamese => \"Viatnamese\",\n            ForeignLanguage::Polish => \"Polish\",\n            ForeignLanguage::Sanskrit => \"Sanskrit\",\n            ForeignLanguage::Persian => \"Persian\",\n            ForeignLanguage::Filipino => \"Filipino\",\n            ForeignLanguage::Moldavian => \"Moldavian\",\n            ForeignLanguage::Croatian => \"Croatian\",\n            ForeignLanguage::Thai => \"Thai\",\n            ForeignLanguage::Burmese => \"Burmese\",\n            ForeignLanguage::Slovak => \"Slovak\",\n            ForeignLanguage::Czech => \"Czech\",\n            ForeignLanguage::Hindi => \"Hindi\",\n            ForeignLanguage::Mapudungun => \"Mapudungun\",\n            ForeignLanguage::Turkish => \"Turkish\",\n            ForeignLanguage::Hawaiian => \"Hawaiian\",\n            ForeignLanguage::Afrikaans => \"Afrikaans\",\n            ForeignLanguage::Esperanto => \"Esperanto\",\n            ForeignLanguage::Yiddish => \"Yiddish\",\n            ForeignLanguage::Somali => \"Somali\",\n            ForeignLanguage::Tahitian => \"Tahitian\",\n            ForeignLanguage::Urdu => \"Urdu\",\n            ForeignLanguage::Indonesian => \"Indonesian\",\n            ForeignLanguage::Estonian => \"Estonian\",\n            ForeignLanguage::Bulgarian => \"Bulgarian\",\n            ForeignLanguage::Arabic => \"Arabic\",\n            ForeignLanguage::Danish => \"Danish\",\n            ForeignLanguage::Portuguese => \"Portuguese\",\n            ForeignLanguage::Finnish => \"Finnish\",\n            ForeignLanguage::Ainu => \"Ainu\",\n            ForeignLanguage::Algonquian => \"Algonquian\",\n            ForeignLanguage::French => \"French\",\n        }\n    }\n}\n\nimpl Default for ForeignLanguage {\n    #[inline]\n    fn default() -> Self {\n        Self::English\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/gtype.rs",
    "content": "use serde::{Deserialize, Serialize};\nuse std::convert::TryFrom;\nuse strum_macros::{AsRefStr, EnumString};\n\n#[derive(Debug, PartialEq, Clone, Copy, AsRefStr, EnumString, Serialize, Deserialize, Hash)]\n#[repr(u8)]\npub enum GType {\n    #[strum(serialize = \"lit\")]\n    Literal,\n    #[strum(serialize = \"fig\")]\n    Figurative,\n    #[strum(serialize = \"expl\")]\n    Explanation,\n}\n\nimpl TryFrom<i32> for GType {\n    type Error = ();\n\n    #[inline]\n    fn try_from(i: i32) -> Result<Self, Self::Error> {\n        Ok(match i {\n            0 => Self::Literal,\n            1 => Self::Figurative,\n            2 => Self::Explanation,\n            _ => return Err(()),\n        })\n    }\n}\n\nimpl Into<i32> for GType {\n    #[inline]\n    fn into(self) -> i32 {\n        match self {\n            Self::Literal => 0,\n            Self::Figurative => 1,\n            Self::Explanation => 2,\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/inflection.rs",
    "content": "use serde::{Deserialize, Serialize};\n\n#[cfg(feature = \"jotoba_intern\")]\nuse jp_inflections::{Verb, VerbType, WordForm};\n\n/// A single Inflection\n#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize)]\npub enum Inflection {\n    Negative,\n    Polite,\n    Present,\n    Past,\n    TeForm,\n    Potential,\n    Passive,\n    Causative,\n    CausativePassive,\n    PotentialOrPassive,\n    Imperative,\n    Tai,\n    TeIru,\n    TeAru,\n    TeMiru,\n    TeShimau,\n    Chau,\n    TeOku,\n    Toku,\n    Tara,\n    Tari,\n    Ba,\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl localization::traits::Translatable for Inflection {\n    fn get_id(&self) -> &'static str {\n        match self {\n            Inflection::Negative => \"Negative\",\n            Inflection::Polite => \"Polite\",\n            Inflection::Present => \"Present\",\n            Inflection::Past => \"Past\",\n            Inflection::TeForm => \"TeForm\",\n            Inflection::Potential => \"Potential\",\n            Inflection::Passive => \"Passive\",\n            Inflection::Causative => \"Causative\",\n            Inflection::CausativePassive => \"CausativePassive\",\n            Inflection::PotentialOrPassive => \"PotentialOrPassive\",\n            Inflection::Imperative => \"Imperative\",\n            Inflection::Tai => \"Tai\",\n            Inflection::TeIru => \"TeIru\",\n            Inflection::TeAru => \"TeAru\",\n            Inflection::TeMiru => \"TeMiru\",\n            Inflection::TeShimau => \"TeShimau\",\n            Inflection::TeOku => \"TeOku\",\n            Inflection::Chau => \"Chau\",\n            Inflection::Toku => \"Toku\",\n            Inflection::Tara => \"Tara\",\n            Inflection::Tari => \"Tari\",\n            Inflection::Ba => \"Ba\",\n        }\n    }\n\n    fn gettext<'a>(\n        &self,\n        dict: &'a localization::TranslationDict,\n        language: Option<localization::language::Language>,\n    ) -> &'a str {\n        self.pgettext(dict, \"inflection\", language)\n    }\n}\n\n/// A set of different inflections which will be displayed for vebs\n#[derive(Serialize, Deserialize)]\npub struct Inflections {\n    pub present: InflectionPair,\n    pub present_polite: InflectionPair,\n\n    pub past: InflectionPair,\n    pub past_polite: InflectionPair,\n\n    pub te_form: InflectionPair,\n\n    pub potential: InflectionPair,\n    pub passive: InflectionPair,\n    pub causative: InflectionPair,\n\n    pub causative_passive: InflectionPair,\n    pub imperative: InflectionPair,\n}\n\n#[derive(Serialize, Deserialize)]\npub struct InflectionPair {\n    #[serde(rename = \"p\")]\n    pub positive: String,\n    #[serde(rename = \"n\")]\n    pub negative: String,\n}\n\npub fn build_inflections(\n    verb: &Verb,\n    is_exception: bool,\n) -> Result<Inflections, jp_inflections::error::Error> {\n    return Ok(Inflections {\n        present: InflectionPair {\n            positive: verb.dictionary(WordForm::Short)?.try_kana(is_exception),\n            negative: verb.negative(WordForm::Short)?.try_kana(is_exception),\n        },\n        present_polite: InflectionPair {\n            positive: verb.dictionary(WordForm::Long)?.try_kana(is_exception),\n            negative: verb.negative(WordForm::Long)?.try_kana(is_exception),\n        },\n\n        past: InflectionPair {\n            positive: verb.past(WordForm::Short)?.try_kana(is_exception),\n            negative: verb.negative_past(WordForm::Short)?.try_kana(is_exception),\n        },\n        past_polite: InflectionPair {\n            positive: verb.past(WordForm::Long)?.try_kana(is_exception),\n            negative: verb.negative_past(WordForm::Long)?.try_kana(is_exception),\n        },\n        te_form: InflectionPair {\n            positive: verb.te_form()?.try_kana(is_exception),\n            negative: verb.negative_te_form()?.try_kana(is_exception),\n        },\n        potential: InflectionPair {\n            positive: verb.potential(WordForm::Short)?.try_kana(is_exception),\n            negative: verb\n                .negative_potential(WordForm::Short)?\n                .try_kana(is_exception),\n        },\n        passive: InflectionPair {\n            positive: verb.passive()?.try_kana(is_exception),\n            negative: verb.negative_passive()?.try_kana(is_exception),\n        },\n        causative: InflectionPair {\n            positive: verb.causative()?.try_kana(is_exception),\n            negative: verb.negative_causative()?.try_kana(is_exception),\n        },\n        causative_passive: InflectionPair {\n            positive: verb.causative_passive()?.try_kana(is_exception),\n            negative: verb.negative_causative_passive()?.try_kana(is_exception),\n        },\n        imperative: InflectionPair {\n            positive: verb.imperative()?.try_kana(is_exception),\n            negative: verb.imperative_negative()?.try_kana(is_exception),\n        },\n    });\n}\n\n/// Returns the inflections of `word` if its a verb\n#[cfg(feature = \"jotoba_intern\")]\npub fn of_word(word: &super::Word) -> Option<Inflections> {\n    let verb = get_jp_verb(word)?;\n    let is_exception = word\n        .reading\n        .kanji\n        .as_ref()\n        .map(|kanji| kanji.reading == \"為る\" || kanji.reading == \"来る\")\n        .unwrap_or(false);\n    build_inflections(&verb, is_exception).ok()\n}\n\n/// Returns a jp_inflections::Verb if [`self`] is a verb\n#[cfg(feature = \"jotoba_intern\")]\npub fn get_jp_verb(word: &super::Word) -> Option<Verb> {\n    use super::part_of_speech::PartOfSpeech;\n    use crate::jotoba::words::part_of_speech::{self, IrregularVerb};\n\n    let is_exception = word.get_pos().any(|i| match i {\n        PartOfSpeech::Verb(v) => match v {\n            part_of_speech::VerbType::Irregular(i) => match i {\n                IrregularVerb::Suru => true,\n                _ => false,\n            },\n            part_of_speech::VerbType::Kuru => true,\n            _ => false,\n        },\n        _ => false,\n    });\n\n    let verb_type = if word.get_pos().any(|i| i.is_ichidan()) {\n        VerbType::Ichidan\n    } else if word.get_pos().any(|i| i.is_godan()) {\n        VerbType::Godan\n    } else if is_exception {\n        VerbType::Exception\n    } else {\n        return None;\n    };\n\n    let verb = Verb::new(\n        jp_inflections::Word::new(\n            &word.reading.kana.reading,\n            word.reading.kanji.as_ref().map(|i| &i.reading),\n        ),\n        verb_type,\n    );\n\n    // Check if [`verb`] really is a valid verb in dictionary form\n    verb.word.is_verb().then(|| verb)\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/information.rs",
    "content": "#[cfg(feature = \"jotoba_intern\")]\nuse localization::traits::Translatable;\nuse strum_macros::{AsRefStr, EnumString};\n\nuse serde::{Deserialize, Serialize};\n\n#[derive(Debug, PartialEq, Clone, Copy, AsRefStr, EnumString, Serialize, Deserialize, Hash, Eq)]\n#[repr(u8)]\npub enum Information {\n    #[strum(serialize = \"ateji\")]\n    Ateji,\n    #[strum(serialize = \"ik\")]\n    IrregularKana,\n    #[strum(serialize = \"iK\")]\n    IrregularKanji,\n    #[strum(serialize = \"io\")]\n    IrregularOkurigana,\n    #[strum(serialize = \"oK\")]\n    OutdatedKanji,\n    #[strum(serialize = \"ok\")]\n    OutdatedKana,\n    #[strum(serialize = \"gikun\")]\n    Gikun,\n    #[strum(serialize = \"uK\")]\n    UsuallyKana,\n    #[strum(serialize = \"rK\")]\n    RarelyUsedKanjiForm,\n    #[strum(serialize = \"sK\")]\n    SearchOnlyKanji,\n    #[strum(serialize = \"sk\")]\n    SearchOnlyKana,\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for Information {\n    fn get_id(&self) -> &'static str {\n        match self {\n            Information::Ateji => \"ateji\",\n            Information::IrregularKana => \"irregular kana\",\n            Information::IrregularKanji => \"irregular kanji\",\n            Information::IrregularOkurigana => \"irregular okurigana\",\n            Information::OutdatedKanji => \"outdated kanji\",\n            Information::OutdatedKana => \"outdated kana\",\n            Information::Gikun => \"gikun\",\n            Information::UsuallyKana => \"usually written in kana\",\n            Information::RarelyUsedKanjiForm => \"rarely used kanji form\",\n            Information::SearchOnlyKanji => \"Seach only kanji form\",\n            Information::SearchOnlyKana => \"Seach only kana form\",\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/misc.rs",
    "content": "#[cfg(feature = \"jotoba_intern\")]\nuse localization::traits::Translatable;\nuse strum::IntoEnumIterator;\nuse strum_macros::{AsRefStr, EnumIter, EnumString};\n\nuse serde::{Deserialize, Serialize};\n\n#[derive(\n    Debug, PartialEq, Clone, Copy, AsRefStr, EnumString, Serialize, Deserialize, Hash, EnumIter,\n)]\n#[repr(u8)]\npub enum Misc {\n    #[strum(serialize = \"ship\")]\n    Ship,\n    #[strum(serialize = \"abbr\", serialize = \"abbreviation\")]\n    Abbreviation,\n    #[strum(serialize = \"arch\", serialize = \"archaism\")]\n    Archaism,\n    #[strum(serialize = \"char\")]\n    Character,\n    #[strum(serialize = \"chn\", serialize = \"childrenslanguage\")]\n    ChildrensLanguage,\n    #[strum(serialize = \"col\", serialize = \"colloquialism\")]\n    Colloquialism,\n    #[strum(serialize = \"company\")]\n    CompanyName,\n    #[strum(serialize = \"creat\")]\n    Creature,\n    #[strum(serialize = \"dated\")]\n    DatedTerm,\n    #[strum(serialize = \"dei\")]\n    Deity,\n    #[strum(serialize = \"derog\", serialize = \"derogatory\")]\n    Derogatory,\n    #[strum(serialize = \"doc\")]\n    Document,\n    #[strum(serialize = \"ev\")]\n    Event,\n    #[strum(serialize = \"euph\")]\n    Euphemistic,\n    #[strum(serialize = \"fam\", serialize = \"familiarlanguage\")]\n    FamiliarLanguage,\n    #[strum(serialize = \"fem\", serialize = \"femaleterm\")]\n    FemaleTermOrLanguage,\n    #[strum(serialize = \"fict\", serialize = \"fiction\")]\n    Fiction,\n    #[strum(serialize = \"given\")]\n    GivenName,\n    #[strum(serialize = \"group\")]\n    Group,\n    #[strum(serialize = \"hist\", serialize = \"Historical\")]\n    HistoricalTerm,\n    #[strum(serialize = \"hon\", serialize = \"honorific\")]\n    HonorificLanguage,\n    #[strum(serialize = \"hum\", serialize = \"humblelanguage\")]\n    HumbleLanguage,\n    #[strum(serialize = \"id\", serialize = \"idomatic\")]\n    IdiomaticExpression,\n    #[strum(serialize = \"joc\")]\n    JocularHumorousTerm,\n    #[strum(serialize = \"leg\", serialize = \"legend\")]\n    Legend,\n    #[strum(serialize = \"form\", serialize = \"formal\")]\n    LiteraryOrFormalTerm,\n    #[strum(serialize = \"m-sl\", serialize = \"mangaslang\")]\n    MangaSlang,\n    #[strum(serialize = \"male\", serialize = \"maleterm\")]\n    MaleTermOrLanguage,\n    #[strum(serialize = \"myth\")]\n    Mythology,\n    #[strum(serialize = \"net-sl\", serialize = \"internetslang\")]\n    InternetSlang,\n    #[strum(serialize = \"obj\", serialize = \"object\")]\n    Object,\n    #[strum(serialize = \"obs\", serialize = \"obsolete\")]\n    ObsoleteTerm,\n    #[strum(serialize = \"obsc\", serialize = \"obscure\")]\n    ObscureTerm,\n    #[strum(serialize = \"on-mim\", serialize = \"onomatopoeic\")]\n    OnomatopoeicOrMimeticWord,\n    #[strum(serialize = \"organization\")]\n    OrganizationName,\n    #[strum(serialize = \"oth\", serialize = \"other\")]\n    Other,\n    #[strum(serialize = \"person\", serialize = \"personname\")]\n    Personname,\n    #[strum(serialize = \"place\", serialize = \"placename\")]\n    PlaceName,\n    #[strum(serialize = \"poet\", serialize = \"poeticalterm\")]\n    PoeticalTerm,\n    #[strum(serialize = \"pol\", serialize = \"politelanguage\")]\n    PoliteLanguage,\n    #[strum(serialize = \"product\", serialize = \"productname\")]\n    ProductName,\n    #[strum(serialize = \"proverb\")]\n    Proverb,\n    #[strum(serialize = \"quote\", serialize = \"quotation\")]\n    Quotation,\n    #[strum(serialize = \"rare\")]\n    Rare,\n    #[strum(serialize = \"relig\")]\n    Religion,\n    #[strum(serialize = \"sens\", serialize = \"sensitive\")]\n    Sensitive,\n    #[strum(serialize = \"serv\")]\n    Service,\n    #[strum(serialize = \"sl\", serialize = \"slang\")]\n    Slang,\n    #[strum(serialize = \"station\")]\n    RailwayStation,\n    #[strum(serialize = \"surname\")]\n    FamilyOrSurname,\n    #[strum(serialize = \"uk\", serialize = \"usuallykana\")]\n    UsuallyWrittenInKana,\n    #[strum(serialize = \"unclass\")]\n    UnclassifiedName,\n    #[strum(serialize = \"vulg\", serialize = \"vulgar\")]\n    VulgarExpressionOrWord,\n    #[strum(serialize = \"work\", serialize = \"artwork\")]\n    ArtWork,\n    #[strum(serialize = \"X\", serialize = \"rude\")]\n    RudeOrXRatedTerm,\n    #[strum(serialize = \"yoji\", serialize = \"yojijukugo\")]\n    Yojijukugo,\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for Misc {\n    fn get_id(&self) -> &'static str {\n        match self {\n            Misc::Abbreviation => \"Abbreviation\",\n            Misc::Archaism => \"Archaism\",\n            Misc::Character => \"Character\",\n            Misc::ChildrensLanguage => \"Childrens language\",\n            Misc::Colloquialism => \"Colloquialism\",\n            Misc::CompanyName => \"Company name\",\n            Misc::Creature => \"Creature\",\n            Misc::DatedTerm => \"Dated term\",\n            Misc::Deity => \"Deity\",\n            Misc::Derogatory => \"Derogatory\",\n            Misc::Document => \"Document\",\n            Misc::Event => \"Event\",\n            Misc::Euphemistic => \"Euphemistic\",\n            Misc::FamiliarLanguage => \"Familiar language\",\n            Misc::FemaleTermOrLanguage => \"Female term/language\",\n            Misc::Fiction => \"Fiction\",\n            Misc::GivenName => \"Given name\",\n            Misc::Group => \"Group\",\n            Misc::HistoricalTerm => \"Historical term\",\n            Misc::HonorificLanguage => \"Honorific language\",\n            Misc::HumbleLanguage => \"Humble language\",\n            Misc::IdiomaticExpression => \"Idiomatic expression\",\n            Misc::JocularHumorousTerm => \"Jocular humorous term\",\n            Misc::Legend => \"Legend\",\n            Misc::LiteraryOrFormalTerm => \"Literary/formal term\",\n            Misc::MangaSlang => \"Manga slang\",\n            Misc::MaleTermOrLanguage => \"Male term/language\",\n            Misc::Mythology => \"Mythology\",\n            Misc::InternetSlang => \"Internet slang\",\n            Misc::Object => \"Object\",\n            Misc::ObsoleteTerm => \"Obsolete term\",\n            Misc::ObscureTerm => \"Obscure term\",\n            Misc::OnomatopoeicOrMimeticWord => \"Onomatopoetic or mimetic word\",\n            Misc::OrganizationName => \"Organization name\",\n            Misc::Other => \"Other\",\n            Misc::Personname => \"Person name\",\n            Misc::PlaceName => \"Place name\",\n            Misc::PoeticalTerm => \"Poetical term\",\n            Misc::PoliteLanguage => \"Polite language\",\n            Misc::ProductName => \"Product name\",\n            Misc::Proverb => \"Proverb\",\n            Misc::Quotation => \"Qutation\",\n            Misc::Rare => \"Rare\",\n            Misc::Religion => \"Religion\",\n            Misc::Sensitive => \"Sensitive\",\n            Misc::Service => \"Service\",\n            Misc::Slang => \"Slang\",\n            Misc::RailwayStation => \"Railway station\",\n            Misc::FamilyOrSurname => \"Family or surname\",\n            Misc::UsuallyWrittenInKana => \"Usually written in kana\",\n            Misc::UnclassifiedName => \"Unclassified name\",\n            Misc::Ship => \"Ship\",\n            Misc::VulgarExpressionOrWord => \"Vulgar expression/word\",\n            Misc::ArtWork => \"Artwork\",\n            Misc::RudeOrXRatedTerm => \"Rude/x-rated term\",\n            Misc::Yojijukugo => \"Yojijukugo\",\n        }\n    }\n}\n\nimpl Misc {\n    #[inline]\n    pub fn iter() -> impl Iterator<Item = Misc> {\n        <Misc as IntoEnumIterator>::iter()\n    }\n\n    #[inline]\n    pub fn as_str(&self) -> &str {\n        self.as_ref()\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/mod.rs",
    "content": "pub mod dialect;\npub mod dict;\npub mod field;\npub mod foreign_language;\npub mod gtype;\npub mod inflection;\npub mod information;\npub mod misc;\npub mod part_of_speech;\npub mod pitch;\npub mod priority;\npub mod reading;\npub mod sense;\n\npub use dict::Dict;\n\nuse super::language::{param::AsLangParam, Language};\nuse bitflags::BitFlag;\nuse itertools::Itertools;\nuse jp_utils::{\n    furi::{parse::FuriParser, segment::SegmentRef},\n    JapaneseExt,\n};\nuse misc::Misc;\nuse part_of_speech::{PartOfSpeech, PosSimple};\nuse pitch::{raw_data::PitchValues, Pitch};\nuse reading::{Reading, ReadingIter};\nuse sense::{Sense, SenseGlossIter};\nuse serde::{Deserialize, Serialize};\nuse std::{\n    hash::{Hash, Hasher},\n    num::{NonZeroU32, NonZeroU8},\n    path::Path,\n};\n\n/// A single word in Jotobas word search\n#[derive(Clone, Default, Serialize, Deserialize, Eq)]\npub struct Word {\n    pub sequence: u32,\n    pub common: bool,\n    pub reading: Reading,\n    pub senses: Vec<Sense>,\n    pub furigana: Option<String>,\n    pub jlpt_lvl: Option<NonZeroU8>,\n    pub collocations: Option<Vec<u32>>,\n    pub transive_version: Option<NonZeroU32>,\n    pub intransive_version: Option<NonZeroU32>,\n    pub sentences_available: u16,\n    pub accents: PitchValues,\n}\n\nimpl Word {\n    /// Returns true if a word is common\n    #[inline]\n    pub fn is_common(&self) -> bool {\n        self.common\n    }\n\n    /// Returns the jlpt level of a word. `None` if a word doesn't have a JLPT lvl assigned\n    #[inline]\n    pub fn get_jlpt_lvl(&self) -> Option<u8> {\n        self.jlpt_lvl.map(|i| i.get())\n    }\n\n    /// Returns the main reading of a word. This is the kanji reading if a kanji reading\n    /// exists. Otherwise its the kana reading\n    #[inline]\n    pub fn get_reading(&self) -> &Dict {\n        self.reading.get_reading()\n    }\n\n    /// Returns the main reading of a word as str. This is the kanji reading if a kanji reading\n    /// exists. Otherwise its the kana reading\n    #[inline]\n    pub fn get_reading_str(&self) -> &str {\n        &self.get_reading().reading\n    }\n\n    /// Returns an iterator over all sense and its glosses\n    #[inline]\n    pub fn sense_gloss_iter(&self) -> SenseGlossIter {\n        SenseGlossIter::new(&self)\n    }\n\n    /// Return all senses of a language\n    #[inline]\n    pub fn senses_by_lang(&self, language: impl AsLangParam) -> Vec<&Sense> {\n        let language = language.as_lang();\n        self.senses\n            .iter()\n            .filter(|i| language.eq_to_lang(&i.language))\n            .collect()\n    }\n\n    /// Get senses ordered by language (non-english first)\n    pub fn get_senses_orderd(&self, english_on_top: bool, _language: Language) -> Vec<Vec<Sense>> {\n        let (english, other): (Vec<Sense>, Vec<Sense>) = self\n            .senses\n            .clone()\n            .into_iter()\n            .partition(|i| i.language == Language::English);\n\n        if english_on_top {\n            vec![english, other]\n        } else {\n            vec![other, english]\n        }\n    }\n\n    /// Get senses ordered by language (non-english first)\n    pub fn get_senses_with_en(&self) -> Vec<Vec<Sense>> {\n        let (english, other): (Vec<Sense>, Vec<Sense>) = self\n            .senses\n            .clone()\n            .into_iter()\n            .partition(|i| i.language == Language::English);\n\n        vec![other, english]\n    }\n\n    /// Returns all senses of the word\n    #[inline]\n    pub fn senses(&self) -> &[Sense] {\n        &self.senses\n    }\n\n    #[inline]\n    pub fn sense_by_id(&self, id: u8) -> Option<&Sense> {\n        self.senses.get(id as usize)\n    }\n\n    pub fn get_sense_gloss(&self, id: u16) -> Option<(&Sense, &sense::Gloss)> {\n        let (sense_id, gloss_id) = sense::from_unique_id(id);\n        let sense = self.sense_by_id(sense_id)?;\n        let gloss = sense.gloss_by_id(gloss_id)?;\n        Some((sense, gloss))\n    }\n\n    /// Returns an Iterator over the words glosses using a given language\n    pub fn gloss_iter_by_lang(&self, lang_param: impl AsLangParam) -> impl Iterator<Item = &str> {\n        let lang_param = lang_param.as_lang();\n        self.sense_gloss_iter()\n            .filter(move |i| lang_param.eq_to_lang(&i.0.language))\n            .map(|i| i.1.gloss.as_str())\n    }\n\n    /// Get amount of tags which will be displayed below the reading\n    pub fn get_word_tag_count(&self) -> u8 {\n        [self.is_common(), self.get_jlpt_lvl().is_some()]\n            .iter()\n            .filter(|b| **b)\n            .count() as u8\n    }\n\n    /// Returns `true` if the word has at least one sentence in the given language\n    pub fn has_sentence(&self, lang: impl AsLangParam) -> bool {\n        let lang_p = lang.as_lang();\n        let lang: i32 = lang_p.language().into();\n\n        BitFlag::<u16>::from(self.sentences_available).get(lang as u16)\n            || (lang_p.en_fallback()\n                && !lang_p.is_english()\n                && BitFlag::<u16>::from(self.sentences_available).get(Language::English as u16))\n    }\n\n    /// Returns true if word has a misc information matching `misc`. This requires english glosses\n    /// to be available since they're the only one holding misc information\n    #[inline]\n    pub fn has_misc(&self, misc: &Misc) -> bool {\n        self.senses\n            .iter()\n            .filter_map(|i| i.misc)\n            .any(|i| i == *misc)\n    }\n\n    /// Returns `true` if word has at least one of the provided part of speech\n    pub fn has_pos(&self, pos_filter: &[PosSimple]) -> bool {\n        for sense in self.senses.iter().map(|i| i.get_pos_simple()) {\n            if sense.iter().any(|i| pos_filter.contains(i)) {\n                return true;\n            }\n        }\n\n        false\n    }\n\n    /// Returns `true` if word has all of the provided part of speech\n    #[inline]\n    pub fn has_all_pos(&self, pos_filter: &[PosSimple]) -> bool {\n        self.has_all_pos_iter(pos_filter.iter())\n    }\n\n    /// Returns `true` if word has all of the provided part of speech\n    #[inline]\n    pub fn has_all_pos_iter<'a, I>(&self, mut pos_filter: I) -> bool\n    where\n        I: Iterator<Item = &'a PosSimple> + 'a,\n    {\n        pos_filter.all(|pos| self.senses.iter().any(|s| s.has_pos_simple(pos)))\n    }\n\n    /// Returns `true` if a word has at least one translation for the provided language, or english\n    /// if `allow_english` is `true`\n    #[inline]\n    pub fn has_language(&self, language: impl AsLangParam) -> bool {\n        let lang = language.as_lang();\n        self.senses.iter().any(|i| lang.eq_to_lang(&i.language))\n    }\n\n    /// Returns `true` if a word has collocations\n    #[inline]\n    pub fn has_collocations(&self) -> bool {\n        self.collocations.is_some()\n    }\n\n    /// Returns an iterator over all reading elements\n    #[inline]\n    pub fn reading_iter(&self, allow_kana: bool) -> ReadingIter<'_> {\n        self.reading.iter(allow_kana)\n    }\n\n    /// Returns true if word has `reading`\n    #[inline]\n    pub fn has_reading(&self, reading: &str) -> bool {\n        self.reading_iter(true).any(|j| j.reading == reading)\n    }\n\n    /// Returns `true` if the word has a kanji reading\n    #[inline]\n    pub fn has_kanji(&self) -> bool {\n        self.get_reading_str().has_kanji()\n    }\n\n    /// Returns `true` if `word` has `reading` as main (main kanji or kana reading)\n    pub fn has_main_reading(&self, reading: &str) -> bool {\n        self.reading.kana.reading == reading\n            || self\n                .reading\n                .kanji\n                .as_ref()\n                .map(|i| i.reading == reading)\n                .unwrap_or(false)\n    }\n\n    /// Returns an iterator over all parts of speech of a word\n    #[inline]\n    pub fn get_pos(&self) -> impl Iterator<Item = &PartOfSpeech> {\n        self.senses\n            .iter()\n            .map(|i| i.part_of_speech.iter())\n            .flatten()\n    }\n\n    #[inline]\n    pub fn get_kana(&self) -> &str {\n        &self.reading.kana.reading\n    }\n\n    #[inline]\n    pub fn has_pitch(&self) -> bool {\n        !self.accents.is_empty()\n    }\n\n    /// Returns a renderable vec of accents with kana characters\n    pub fn get_pitches(&self) -> Vec<Pitch> {\n        self.accents\n            .iter()\n            .filter_map(|drop| Pitch::new(self.get_kana(), drop))\n            .collect()\n    }\n\n    /// Returns a renderable vec of accents with kana characters\n    #[inline]\n    pub fn get_first_pitch(&self) -> Option<Pitch> {\n        let drop = self.accents.get(0)?;\n        Pitch::new(self.get_kana(), drop)\n    }\n\n    /// Return `true` if the word is a katakana word\n    #[inline]\n    pub fn is_katakana_word(&self) -> bool {\n        self.reading.is_katakana()\n    }\n\n    /// Removes all languages except the one specified and potentionally english when enabled\n    #[inline]\n    pub fn adjust_language(&mut self, lang: impl AsLangParam) {\n        let lang = lang.as_lang();\n        self.senses.retain(|j| lang.eq_to_lang(&j.language));\n    }\n\n    /// Returns furigana reading-pairs of an Item\n    #[inline]\n    pub fn get_furigana(&self) -> Option<Vec<SegmentRef>> {\n        let furi = self.furigana.as_ref()?;\n        FuriParser::new(furi)\n            .collect::<Result<Vec<SegmentRef>, _>>()\n            .ok()\n    }\n}\n\n// Jotoba intern only features\n#[cfg(feature = \"jotoba_intern\")]\nimpl Word {\n    /// Get the audio's filename of the word\n    #[inline]\n    pub fn audio_file_name(&self) -> Option<String> {\n        self.reading\n            .kanji\n            .as_ref()\n            .map(|kanji| format!(\"{}【{}】.mp3\", kanji.reading, self.reading.kana.reading))\n    }\n\n    /// Get the audio's filename of the word\n    #[inline]\n    pub fn audio_file_name_old(&self) -> Option<String> {\n        self.reading.kanji.as_ref().and_then(|kanji| {\n            /* let frame_path = format!(\"svg/kanji/{}_frames.svg\", self.literal);\n            let frame_path = Path::new(&frame_path);\n            assets_path.as_ref().join(frame_path) */\n\n            let file = format!(\"{}【{}】.mp3\", kanji.reading, self.reading.kana.reading);\n            std::path::Path::new(&format!(\"html/audio/mp3/{}\", file))\n                .exists()\n                .then(|| file)\n        })\n    }\n\n    /// Get the audio path of a word\n    #[inline]\n    pub fn audio_file<P: AsRef<Path>>(&self, _assets_path: P) -> Option<String> {\n        self.reading.kanji.as_ref().and_then(|kanji| {\n            let file = format!(\"mp3/{}【{}】.mp3\", kanji.reading, self.reading.kana.reading);\n            std::path::Path::new(&format!(\"html/audio/{}\", file))\n                .exists()\n                .then(|| file)\n        })\n    }\n\n    /// Get alternative readings in a beautified, print-ready format\n    #[inline]\n    pub fn alt_readings_beautified(&self) -> String {\n        self.reading\n            .alternative\n            .iter()\n            .map(|i| i.reading.clone())\n            .join(\", \")\n    }\n\n    pub fn glosses_pretty(&self) -> String {\n        let senses = self.get_senses_with_en();\n\n        // Try to use glosses with users language\n        if !senses[0].is_empty() {\n            Self::pretty_print_senses(&senses[0])\n        } else {\n            // Fallback use english gloses\n            Self::pretty_print_senses(&senses[1])\n        }\n    }\n\n    fn pretty_print_senses(senses: &[Sense]) -> String {\n        senses\n            .iter()\n            .map(|i| i.glosses.clone())\n            .flatten()\n            .into_iter()\n            .map(|i| i.gloss)\n            .join(\", \")\n    }\n\n    /// Returns an [`Inflections`] value if [`self`] is a valid verb\n    #[inline]\n    pub fn get_inflections(&self) -> Option<inflection::Inflections> {\n        inflection::of_word(self)\n    }\n}\n\n/// Removes all senses which ain't in the provided language or english in case `show_english` is\n/// `true`\n#[cfg(feature = \"jotoba_intern\")]\npub fn filter_languages<'a, I: 'a + Iterator<Item = &'a mut Word>>(\n    iter: I,\n    lang: impl AsLangParam,\n) {\n    for word in iter {\n        word.adjust_language(lang);\n    }\n}\n\nimpl Hash for Word {\n    #[inline]\n    fn hash<H: Hasher>(&self, state: &mut H) {\n        self.sequence.hash(state);\n    }\n}\n\nimpl PartialEq for Word {\n    #[inline]\n    fn eq(&self, other: &Self) -> bool {\n        self.sequence == other.sequence\n    }\n}\n\nimpl std::fmt::Debug for Word {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        let senses = self\n            .senses_by_lang(Language::English)\n            .into_iter()\n            .map(|i| i.glosses.iter().map(|i| &i.gloss).join(\"|\"))\n            .join(\"\\n\");\n\n        f.debug_struct(\"Word\")\n            .field(\"Seq\", &self.sequence)\n            .field(\"Kana\", &self.reading.kana.reading)\n            .field(\"Reading\", &self.get_reading().reading)\n            .field(\"Common\", &self.is_common())\n            .field(\"JLPT\", &self.jlpt_lvl)\n            .field(\"Translations\", &senses)\n            .finish()\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/part_of_speech.rs",
    "content": "#![allow(clippy::from_over_into)]\nuse std::convert::TryFrom;\n\n#[cfg(feature = \"jotoba_intern\")]\nuse localization::{language::Language, traits::Translatable, TranslationDict};\nuse serde::{Deserialize, Serialize};\nuse strum::{AsRefStr, EnumIter, IntoEnumIterator};\nuse strum_macros::EnumString;\n\n#[derive(\n    Debug,\n    PartialEq,\n    Clone,\n    Copy,\n    Hash,\n    EnumString,\n    EnumIter,\n    AsRefStr,\n    Serialize,\n    Deserialize,\n    Ord,\n    PartialOrd,\n    Eq,\n)]\n#[repr(u8)]\npub enum PosSimple {\n    #[strum(serialize = \"adverb\", serialize = \"adv\")]\n    Adverb,\n    #[strum(serialize = \"auxilary\", serialize = \"aux\")]\n    Auxilary,\n    #[strum(serialize = \"conjunction\", serialize = \"conj\")]\n    Conjunction,\n    #[strum(serialize = \"noun\", serialize = \"n\")]\n    Noun,\n    #[strum(serialize = \"prefix\", serialize = \"pre\")]\n    Prefix,\n    #[strum(serialize = \"suffix\", serialize = \"suf\")]\n    Suffix,\n    #[strum(serialize = \"particle\", serialize = \"part\")]\n    Particle,\n    #[strum(serialize = \"sfx\")]\n    Sfx,\n    #[strum(serialize = \"verb\", serialize = \"v\")]\n    Verb,\n    #[strum(serialize = \"adjective\", serialize = \"adj\")]\n    Adjective,\n    #[strum(serialize = \"counter\", serialize = \"count\")]\n    Counter,\n    #[strum(serialize = \"expression\", serialize = \"expr\")]\n    Expr,\n    #[strum(serialize = \"interjection\", serialize = \"inter\")]\n    Interjection,\n    #[strum(serialize = \"pronoun\", serialize = \"pron\")]\n    Pronoun,\n    #[strum(serialize = \"numeric\", serialize = \"nr\")]\n    Numeric,\n    #[strum(serialize = \"transitive\", serialize = \"tr\")]\n    Transitive,\n    #[strum(serialize = \"intransitive\", serialize = \"itr\")]\n    Intransitive,\n    #[strum(serialize = \"unclassified\", serialize = \"unc\")]\n    Unclassified,\n}\n\nimpl PosSimple {\n    #[inline]\n    pub fn iter() -> impl Iterator<Item = PosSimple> {\n        <PosSimple as IntoEnumIterator>::iter()\n    }\n\n    #[inline]\n    pub fn as_str(&self) -> &str {\n        self.as_ref()\n    }\n}\n\nimpl TryFrom<i32> for PosSimple {\n    type Error = ();\n    fn try_from(i: i32) -> Result<Self, Self::Error> {\n        Ok(match i {\n            0 => Self::Adverb,\n            1 => Self::Auxilary,\n            2 => Self::Conjunction,\n            3 => Self::Noun,\n            4 => Self::Prefix,\n            5 => Self::Suffix,\n            6 => Self::Particle,\n            7 => Self::Sfx,\n            8 => Self::Verb,\n            9 => Self::Adjective,\n            10 => Self::Counter,\n            11 => Self::Expr,\n            12 => Self::Interjection,\n            13 => Self::Pronoun,\n            15 => Self::Numeric,\n            16 => Self::Unclassified,\n            17 => Self::Intransitive,\n            18 => Self::Transitive,\n            _ => return Err(()),\n        })\n    }\n}\n\nimpl Into<i32> for PosSimple {\n    fn into(self) -> i32 {\n        match self {\n            Self::Adverb => 0,\n            Self::Auxilary => 1,\n            Self::Conjunction => 2,\n            Self::Noun => 3,\n            Self::Prefix => 4,\n            Self::Suffix => 5,\n            Self::Particle => 6,\n            Self::Sfx => 7,\n            Self::Verb => 8,\n            Self::Adjective => 9,\n            Self::Counter => 10,\n            Self::Expr => 11,\n            Self::Interjection => 12,\n            Self::Pronoun => 13,\n            Self::Numeric => 15,\n            Self::Unclassified => 16,\n            Self::Intransitive => 17,\n            Self::Transitive => 18,\n        }\n    }\n}\n\nimpl PartOfSpeech {\n    /// Converts a `PartOfSpeech` tag to `PosSimple`\n    pub fn to_pos_simple(&self) -> Vec<PosSimple> {\n        let simple = match *self {\n            PartOfSpeech::Adjective(_) | PartOfSpeech::AuxilaryAdj => PosSimple::Adjective,\n            PartOfSpeech::Adverb | PartOfSpeech::AdverbTo => PosSimple::Adverb,\n            PartOfSpeech::Auxilary => PosSimple::Auxilary,\n            PartOfSpeech::Conjunction => PosSimple::Conjunction,\n            PartOfSpeech::Counter => PosSimple::Counter,\n            PartOfSpeech::Expr => PosSimple::Expr,\n            PartOfSpeech::Interjection => PosSimple::Interjection,\n            PartOfSpeech::Noun(n) => match n {\n                NounType::Suffix => PosSimple::Suffix,\n                _ => PosSimple::Noun,\n            },\n            PartOfSpeech::Numeric => PosSimple::Numeric,\n            PartOfSpeech::Pronoun => PosSimple::Pronoun,\n            PartOfSpeech::Prefix => PosSimple::Prefix,\n            PartOfSpeech::Suffix => PosSimple::Suffix,\n            PartOfSpeech::Particle => PosSimple::Particle,\n            PartOfSpeech::Unclassified => PosSimple::Unclassified,\n            PartOfSpeech::Sfx => PosSimple::Sfx,\n            PartOfSpeech::Verb(_) | PartOfSpeech::AuxilaryVerb => PosSimple::Verb,\n        };\n\n        if let PartOfSpeech::Verb(verb) = self {\n            match verb {\n                VerbType::Intransitive => vec![simple, PosSimple::Intransitive],\n                VerbType::Transitive => vec![simple, PosSimple::Transitive],\n                VerbType::Irregular(irr) => match irr {\n                    IrregularVerb::NounOrAuxSuru => vec![simple, PosSimple::Noun],\n                    _ => vec![simple],\n                },\n                _ => vec![simple],\n            }\n        } else {\n            vec![simple]\n        }\n    }\n}\n\n#[derive(Debug, PartialEq, Clone, Copy, Serialize, PartialOrd, Ord, Eq, Deserialize, Hash)]\n#[repr(u8)]\npub enum PartOfSpeech {\n    // Adjectives\n    Adjective(AdjectiveType),\n\n    // Adverb\n    Adverb,\n    AdverbTo,\n\n    // Auxilary\n    Auxilary,\n    AuxilaryAdj,\n    AuxilaryVerb,\n\n    // Other\n    Conjunction,\n    Counter,\n    Expr,\n    Interjection,\n\n    Noun(NounType),\n\n    Numeric,\n    Pronoun,\n    Prefix,\n    Suffix,\n    Particle,\n    Unclassified,\n\n    Sfx,\n\n    // Verb\n    Verb(VerbType),\n}\n\nimpl PartOfSpeech {\n    /// Returns true if [`self`] is a godan PartOfSpeech variant\n    pub fn is_godan(&self) -> bool {\n        if let PartOfSpeech::Verb(v) = self {\n            matches!(v, VerbType::Godan(_))\n        } else {\n            false\n        }\n    }\n\n    /// Returns true if [`self`] is an ichdan PartOfSpeech variant\n    pub fn is_ichidan(&self) -> bool {\n        if let PartOfSpeech::Verb(v) = self {\n            match v {\n                VerbType::Ichidan => true,\n                _ => false,\n            }\n        } else {\n            false\n        }\n    }\n}\n\n#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Ord, Eq, Serialize, Deserialize, Hash)]\n#[repr(u8)]\npub enum VerbType {\n    Nidan(NidanVerb),\n    Yodan(VerbEnding),\n    Godan(GodanVerbEnding),\n    Irregular(IrregularVerb),\n    Unspecified,\n    Intransitive,\n    Transitive,\n    Ichidan,\n    IchidanZuru,\n    IchidanKureru,\n    Kuru,\n}\n\n#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Ord, Eq, Serialize, Deserialize, Hash)]\n#[repr(u8)]\npub enum AdjectiveType {\n    PreNounVerb,\n    /// I Adjective\n    Keiyoushi,\n    /// I Adjective conjugated like いい\n    KeiyoushiYoiIi,\n    Ku,\n    Na,\n    Nari,\n    No,\n    PreNoun,\n    Shiku,\n    Taru,\n}\n\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, PartialOrd, Ord, Eq, Deserialize, Hash)]\n#[repr(u8)]\npub enum NounType {\n    Normal,\n    Adverbial,\n    Prefix,\n    Suffix,\n    Temporal,\n}\n\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize, PartialOrd, Ord, Eq, Hash)]\n#[repr(u8)]\npub enum IrregularVerb {\n    Nu,\n    Ru,\n    NounOrAuxSuru,\n    Suru,\n    SuruSpecial,\n    Su,\n}\n\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize, PartialOrd, Ord, Eq, Hash)]\npub struct NidanVerb {\n    class: VerbClass,\n    ending: VerbEnding,\n}\n\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize, PartialOrd, Ord, Eq, Hash)]\n#[repr(u8)]\npub enum VerbClass {\n    Upper,\n    Lower,\n    None,\n}\n\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize, PartialOrd, Ord, Eq, Hash)]\n#[repr(u8)]\npub enum VerbEnding {\n    Bu,\n    Dzu,\n    Gu,\n    Hu,\n    Ku,\n    Mu,\n    Nu,\n    Ru,\n    Su,\n    Tsu,\n    U,\n    Yu,\n    Zu,\n}\n\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize, PartialOrd, Ord, Eq, Hash)]\n#[repr(u8)]\npub enum GodanVerbEnding {\n    Bu,\n    Gu,\n    Ku,\n    Mu,\n    Nu,\n    Ru,\n    Su,\n    Tsu,\n    U,\n\n    Aru,\n    USpecial,\n    Uru,\n    RuIrreg,\n    IkuYuku,\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for PartOfSpeech {\n    fn get_id(&self) -> &'static str {\n        match self {\n            PartOfSpeech::Noun(noun_type) => noun_type.get_id(),\n            PartOfSpeech::Sfx => \"SoundFx\",\n            PartOfSpeech::Expr => \"Expression\",\n            PartOfSpeech::Counter => \"Counter\",\n            PartOfSpeech::Suffix => \"Suffix\",\n            PartOfSpeech::Prefix => \"Prefix\",\n            PartOfSpeech::Particle => \"Particle\",\n            PartOfSpeech::Interjection => \"Interjection\",\n            PartOfSpeech::Pronoun => \"Pronoun\",\n            PartOfSpeech::Auxilary => \"Auxilary\",\n            PartOfSpeech::Adjective(adj) => adj.get_id(),\n            PartOfSpeech::Numeric => \"Numeric\",\n            PartOfSpeech::AdverbTo => \"Adverb-To\",\n            PartOfSpeech::Adverb => \"Adverb\",\n            PartOfSpeech::Verb(verb) => verb.get_id(),\n            PartOfSpeech::AuxilaryAdj => \"Auxilary adjective\",\n            PartOfSpeech::AuxilaryVerb => \"Auxilary Verb\",\n            PartOfSpeech::Conjunction => \"Conjunction\",\n            PartOfSpeech::Unclassified => \"Unclassified\",\n        }\n    }\n\n    fn gettext_custom(&self, dict: &TranslationDict, language: Option<Language>) -> String {\n        match self {\n            PartOfSpeech::Verb(verb) => verb.gettext_custom(dict, language),\n            _ => self.gettext(dict, language).to_owned(),\n        }\n    }\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for AdjectiveType {\n    fn get_id(&self) -> &'static str {\n        match self {\n            AdjectiveType::PreNounVerb => \"Noun or verb describing a noun\",\n            AdjectiveType::Keiyoushi => \"I adjective\",\n            AdjectiveType::KeiyoushiYoiIi => \"I adjective (conjugated like いい)\",\n            AdjectiveType::Ku => \"Ku adjective\",\n            AdjectiveType::Na => \"Na adjective\",\n            AdjectiveType::Nari => \"Formal form of na adjective\",\n            AdjectiveType::No => \"No adjective\",\n            AdjectiveType::PreNoun => \"Pre noun adjective\",\n            AdjectiveType::Shiku => \"Shiku adjective\",\n            AdjectiveType::Taru => \"Taru adjective\",\n        }\n    }\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for NounType {\n    fn get_id(&self) -> &'static str {\n        match self {\n            NounType::Normal => \"Noun\",\n            NounType::Adverbial => \"Noun adverbial\",\n            NounType::Prefix => \"Prefix (noun)\",\n            NounType::Suffix => \"Suffix (noun)\",\n            NounType::Temporal => \"Temporal noun\",\n        }\n    }\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for VerbType {\n    fn get_id(&self) -> &'static str {\n        match *self {\n            VerbType::Unspecified => \"Unspecified verb\",\n            VerbType::Intransitive => \"Intransitive verb\",\n            VerbType::Transitive => \"Transitive verb\",\n            VerbType::Ichidan => \"Ichidan verb\",\n            VerbType::IchidanZuru => \"Ichidan zuru verb\",\n            VerbType::IchidanKureru => \"Ichidan kureru verb\",\n            VerbType::Kuru => \"Kuru verb\",\n            VerbType::Irregular(irregular) => irregular.get_id(),\n            _ => \"Godan verb\",\n        }\n    }\n\n    fn gettext_custom(&self, dict: &TranslationDict, language: Option<Language>) -> String {\n        match self {\n            VerbType::Irregular(i) => i.gettext_custom(dict, language),\n            _ => self.gettext(dict, language).to_owned(),\n        }\n    }\n}\n\n#[cfg(feature = \"jotoba_intern\")]\nimpl Translatable for IrregularVerb {\n    fn get_id(&self) -> &'static str {\n        match self {\n            IrregularVerb::Nu | IrregularVerb::Ru | IrregularVerb::Su => {\n                \"Irregular verb with {} ending\"\n            }\n            IrregularVerb::NounOrAuxSuru => \"Noun taking suru\",\n            IrregularVerb::Suru => \"Suru verb\",\n            IrregularVerb::SuruSpecial => \"Suru special\",\n        }\n    }\n\n    fn gettext_custom(&self, dict: &TranslationDict, language: Option<Language>) -> String {\n        match self {\n            IrregularVerb::Nu => self.gettext_fmt(dict, &[\"nu\"], language),\n            IrregularVerb::Ru => self.gettext_fmt(dict, &[\"ru\"], language),\n            IrregularVerb::Su => self.gettext_fmt(dict, &[\"su\"], language),\n            IrregularVerb::NounOrAuxSuru | IrregularVerb::Suru | IrregularVerb::SuruSpecial => {\n                self.gettext(dict, language).to_owned()\n            }\n        }\n    }\n}\n\n/// VerbType into String\nimpl Into<String> for VerbType {\n    fn into(self) -> String {\n        match self {\n            VerbType::Nidan(nidan) => {\n                let n: String = nidan.into();\n                format!(\"{}{}\", \"v2\", n)\n            }\n            VerbType::Yodan(yodan) => {\n                let y: String = yodan.into();\n                format!(\"{}{}\", \"v4\", y)\n            }\n            VerbType::Godan(godan) => {\n                let g: String = godan.into();\n                format!(\"{}{}\", \"v5\", g)\n            }\n            VerbType::Irregular(irreg) => irreg.into(),\n            VerbType::Ichidan => \"v1\".to_owned(),\n            VerbType::IchidanKureru => \"v1-s\".to_owned(),\n            VerbType::Transitive => \"vt\".to_owned(),\n            VerbType::Intransitive => \"vi\".to_owned(),\n            VerbType::Kuru => \"vk\".to_owned(),\n            VerbType::IchidanZuru => \"vz\".to_owned(),\n            VerbType::Unspecified => \"v-unspec\".to_owned(),\n        }\n    }\n}\n\n/// Implement TryFrom for VerbType\nimpl TryFrom<&str> for VerbType {\n    type Error = ();\n\n    fn try_from(value: &str) -> Result<Self, Self::Error> {\n        if value.len() < 2 || value[..1] != *\"v\" {\n            return Err(());\n        }\n\n        Ok(match &value[1..2] {\n            \"1\" => match value {\n                \"v1\" => VerbType::Ichidan,\n                \"v1-s\" => VerbType::IchidanKureru,\n                _ => return Err(()),\n            },\n            \"2\" => VerbType::Nidan(NidanVerb::try_from(value)?), // Nidan\n            \"4\" => VerbType::Yodan(VerbEnding::try_from(&value[2..3])?), // Yodan\n            \"5\" => VerbType::Godan(GodanVerbEnding::try_from(&value[2..])?), // Godan\n            _ => match value {\n                \"vi\" => VerbType::Intransitive,\n                \"vt\" => VerbType::Transitive,\n                \"v-unspec\" => VerbType::Unspecified,\n                \"vz\" => VerbType::IchidanZuru,\n                \"vk\" => VerbType::Kuru,\n                _ => VerbType::Irregular(IrregularVerb::try_from(value)?),\n            },\n        })\n    }\n}\n\nimpl TryFrom<&str> for IrregularVerb {\n    type Error = ();\n    fn try_from(value: &str) -> Result<Self, Self::Error> {\n        Ok(match value {\n            \"vn\" => IrregularVerb::Nu,\n            \"vr\" => IrregularVerb::Ru,\n            \"vs\" => IrregularVerb::NounOrAuxSuru,\n            \"vs-i\" => IrregularVerb::Suru,\n            \"vs-s\" => IrregularVerb::SuruSpecial,\n            \"vs-c\" => IrregularVerb::Su,\n            _ => return Err(()),\n        })\n    }\n}\n\n/// IrregularVerb into String\nimpl Into<String> for IrregularVerb {\n    fn into(self) -> String {\n        match self {\n            IrregularVerb::Nu => \"vn\",\n            IrregularVerb::Ru => \"vr\",\n            IrregularVerb::NounOrAuxSuru => \"vs\",\n            IrregularVerb::Suru => \"vs-i\",\n            IrregularVerb::SuruSpecial => \"vs-s\",\n            IrregularVerb::Su => \"vs-c\",\n        }\n        .to_string()\n    }\n}\n\n/// GodanVerbEnding into String\nimpl Into<String> for GodanVerbEnding {\n    fn into(self) -> String {\n        match self {\n            GodanVerbEnding::Aru => \"aru\",\n            GodanVerbEnding::USpecial => \"u-s\",\n            GodanVerbEnding::Uru => \"uru\",\n            GodanVerbEnding::RuIrreg => \"r-i\",\n            GodanVerbEnding::IkuYuku => \"k-s\",\n            GodanVerbEnding::Bu => \"b\",\n            GodanVerbEnding::Ku => \"k\",\n            GodanVerbEnding::Gu => \"g\",\n            GodanVerbEnding::Nu => \"n\",\n            GodanVerbEnding::Mu => \"m\",\n            GodanVerbEnding::Ru => \"r\",\n            GodanVerbEnding::Su => \"s\",\n            GodanVerbEnding::Tsu => \"t\",\n            GodanVerbEnding::U => \"u\",\n        }\n        .to_string()\n    }\n}\n\n/// Implement TryFrom for VerbEnding\nimpl TryFrom<&str> for GodanVerbEnding {\n    type Error = ();\n    fn try_from(value: &str) -> Result<Self, Self::Error> {\n        Ok(match value {\n            \"aru\" => GodanVerbEnding::Aru,\n            \"u-s\" => GodanVerbEnding::USpecial,\n            \"uru\" => GodanVerbEnding::Uru,\n            \"r-i\" => GodanVerbEnding::RuIrreg,\n            \"k-s\" => GodanVerbEnding::IkuYuku,\n            _ => match &value[0..1] {\n                \"b\" => GodanVerbEnding::Bu,\n                \"k\" => GodanVerbEnding::Ku,\n                \"g\" => GodanVerbEnding::Gu,\n                \"n\" => GodanVerbEnding::Nu,\n                \"m\" => GodanVerbEnding::Mu,\n                \"r\" => GodanVerbEnding::Ru,\n                \"s\" => GodanVerbEnding::Su,\n                \"t\" => GodanVerbEnding::Tsu,\n                \"u\" => GodanVerbEnding::U,\n                _ => return Err(()),\n            },\n        })\n    }\n}\n\n/// VerbEnding into String\nimpl Into<String> for VerbEnding {\n    fn into(self) -> String {\n        match self {\n            VerbEnding::Bu => \"b\",\n            VerbEnding::Dzu => \"d\",\n            VerbEnding::Gu => \"g\",\n            VerbEnding::Hu => \"h\",\n            VerbEnding::Ku => \"k\",\n            VerbEnding::Mu => \"m\",\n            VerbEnding::Nu => \"n\",\n            VerbEnding::Ru => \"r\",\n            VerbEnding::Su => \"s\",\n            VerbEnding::Tsu => \"t\",\n            VerbEnding::U => \"w\",\n            VerbEnding::Yu => \"y\",\n            VerbEnding::Zu => \"z\",\n        }\n        .to_string()\n    }\n}\n\n/// Implement TryFrom for VerbEnding\nimpl TryFrom<&str> for VerbEnding {\n    type Error = ();\n    fn try_from(value: &str) -> Result<Self, Self::Error> {\n        Ok(match value {\n            \"b\" => VerbEnding::Bu,\n            \"d\" => VerbEnding::Dzu,\n            \"g\" => VerbEnding::Gu,\n            \"h\" => VerbEnding::Hu,\n            \"k\" => VerbEnding::Ku,\n            \"m\" => VerbEnding::Mu,\n            \"n\" => VerbEnding::Nu,\n            \"r\" => VerbEnding::Ru,\n            \"s\" => VerbEnding::Su,\n            \"t\" => VerbEnding::Tsu,\n            \"w\" => VerbEnding::U,\n            \"y\" => VerbEnding::Yu,\n            \"z\" => VerbEnding::Zu,\n            _ => return Err(()),\n        })\n    }\n}\n\n/// NidanVerb into String\nimpl Into<String> for NidanVerb {\n    fn into(self) -> String {\n        let class = match self.class {\n            VerbClass::Upper => \"k\",\n            VerbClass::Lower | VerbClass::None => \"s\",\n        };\n        let ending: String = self.ending.into();\n        format!(\"{}-{}\", ending, class)\n    }\n}\n\n/// Implement TryFrom for NidanVerb\nimpl TryFrom<&str> for NidanVerb {\n    type Error = ();\n\n    fn try_from(value: &str) -> Result<Self, Self::Error> {\n        if value.len() < 3 || value[..1] != *\"v\" {\n            return Err(());\n        }\n\n        if value == \"v2a-s\" {\n            return Ok(NidanVerb {\n                ending: VerbEnding::U,\n                class: VerbClass::None,\n            });\n        }\n\n        let class: VerbClass = match &value[4..5] {\n            \"k\" => VerbClass::Upper,\n            \"s\" => VerbClass::Lower,\n            _ => return Err(()),\n        };\n\n        let ending = VerbEnding::try_from(&value[2..3])?;\n\n        Ok(NidanVerb { class, ending })\n    }\n}\n\n/// NounType into String\nimpl Into<String> for NounType {\n    fn into(self) -> String {\n        match self {\n            NounType::Normal => \"n\",\n            NounType::Adverbial => \"n-adv\",\n            NounType::Prefix => \"n-pref\",\n            NounType::Suffix => \"n-suf\",\n            NounType::Temporal => \"n-t\",\n        }\n        .to_string()\n    }\n}\n\n/// Implement TryFrom for NounType\nimpl TryFrom<&str> for NounType {\n    type Error = ();\n\n    fn try_from(value: &str) -> Result<Self, Self::Error> {\n        Ok(match &value[2..] {\n            \"adv\" => NounType::Adverbial,\n            \"pref\" => NounType::Prefix,\n            \"suf\" => NounType::Suffix,\n            \"t\" => NounType::Temporal,\n            _ => return Err(()),\n        })\n    }\n}\n\nimpl Into<String> for AdjectiveType {\n    fn into(self) -> String {\n        match self {\n            AdjectiveType::PreNounVerb => \"adj-f\",\n            AdjectiveType::Keiyoushi => \"adj-i\",\n            AdjectiveType::KeiyoushiYoiIi => \"adj-ix\",\n            AdjectiveType::Ku => \"adj-ku\",\n            AdjectiveType::Na => \"adj-na\",\n            AdjectiveType::Nari => \"adj-nari\",\n            AdjectiveType::No => \"adj-no\",\n            AdjectiveType::PreNoun => \"adj-pn\",\n            AdjectiveType::Shiku => \"adj-shiku\",\n            AdjectiveType::Taru => \"adj-t\",\n        }\n        .to_string()\n    }\n}\n\n/// Implement TryFrom for AdjectiveType\nimpl TryFrom<&str> for AdjectiveType {\n    type Error = ();\n\n    fn try_from(value: &str) -> Result<Self, Self::Error> {\n        Ok(match value[4..].as_ref() {\n            \"f\" => AdjectiveType::PreNounVerb,\n            \"i\" => AdjectiveType::Keiyoushi,\n            \"ix\" => AdjectiveType::KeiyoushiYoiIi,\n            \"ku\" => AdjectiveType::Ku,\n            \"na\" => AdjectiveType::Na,\n            \"nari\" => AdjectiveType::Nari,\n            \"no\" => AdjectiveType::No,\n            \"pn\" => AdjectiveType::PreNoun,\n            \"shiku\" => AdjectiveType::Shiku,\n            \"t\" => AdjectiveType::Taru,\n            _ => return Err(()),\n        })\n    }\n}\n\nimpl Into<String> for PartOfSpeech {\n    fn into(self) -> String {\n        if let PartOfSpeech::Noun(noun) = self {\n            return noun.into();\n        }\n\n        match self {\n            PartOfSpeech::Adjective(adj) => adj.into(),\n            PartOfSpeech::Noun(noun) => noun.into(),\n            PartOfSpeech::Verb(verb) => verb.into(),\n            _ => match self {\n                PartOfSpeech::Pronoun => \"pn\",\n                PartOfSpeech::Adverb => \"adv\",\n                PartOfSpeech::Auxilary => \"aux\",\n                PartOfSpeech::Counter => \"ctr\",\n                PartOfSpeech::Conjunction => \"conj\",\n                PartOfSpeech::Expr => \"exp\",\n                PartOfSpeech::Interjection => \"int\",\n                PartOfSpeech::Numeric => \"num\",\n                PartOfSpeech::Particle => \"prt\",\n                PartOfSpeech::Suffix => \"suf\",\n                PartOfSpeech::Unclassified => \"unc\",\n                PartOfSpeech::AdverbTo => \"adv-to\",\n                PartOfSpeech::AuxilaryAdj => \"aux-adj\",\n                PartOfSpeech::AuxilaryVerb => \"aux-v\",\n                PartOfSpeech::Prefix => \"pref\",\n                PartOfSpeech::Sfx => \"sfx\",\n                _ => unreachable!(), // already checked above\n            }\n            .to_string(),\n        }\n    }\n}\n\n/// Implement TryFrom for PartOfSpeech\nimpl TryFrom<&str> for PartOfSpeech {\n    type Error = ();\n\n    fn try_from(value: &str) -> Result<Self, Self::Error> {\n        Ok(match value {\n            \"n\" => PartOfSpeech::Noun(NounType::Normal),\n            \"pn\" => PartOfSpeech::Pronoun,\n            \"sfx\" => PartOfSpeech::Sfx,\n            \"adv\" => PartOfSpeech::Adverb,\n            \"aux\" => PartOfSpeech::Auxilary,\n            \"ctr\" => PartOfSpeech::Counter,\n            \"exp\" => PartOfSpeech::Expr,\n            \"int\" => PartOfSpeech::Interjection,\n            \"num\" => PartOfSpeech::Numeric,\n            \"prt\" => PartOfSpeech::Particle,\n            \"conj\" => PartOfSpeech::Conjunction,\n            \"suf\" => PartOfSpeech::Suffix,\n            \"unc\" => PartOfSpeech::Unclassified,\n            \"adv-to\" => PartOfSpeech::AdverbTo,\n            \"aux-adj\" => PartOfSpeech::AuxilaryAdj,\n            \"aux-v\" => PartOfSpeech::AuxilaryVerb,\n            \"pref\" => PartOfSpeech::Prefix,\n            _ => {\n                if value.starts_with(\"n-\") {\n                    return Ok(PartOfSpeech::Noun(NounType::try_from(value)?));\n                }\n\n                if value.starts_with(\"adj\") {\n                    return Ok(PartOfSpeech::Adjective(AdjectiveType::try_from(value)?));\n                }\n\n                if value.starts_with('v') {\n                    return Ok(PartOfSpeech::Verb(VerbType::try_from(value)?));\n                }\n\n                return Err(());\n            }\n        })\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/pitch/border.rs",
    "content": "/// An HTML border for redering pitches\n#[repr(u8)]\npub enum Border {\n    Left,\n    Right,\n    Top,\n    Bottom,\n}\n\nimpl Border {\n    #[inline]\n    pub fn get_class(&self) -> char {\n        match self {\n            Border::Left => 'l',\n            Border::Right => 'r',\n            Border::Top => 't',\n            Border::Bottom => 'b',\n        }\n    }\n\n    #[inline]\n    pub fn horizontal(high: bool) -> Border {\n        if high {\n            Border::Top\n        } else {\n            Border::Bottom\n        }\n    }\n}\n\n/// Helper to build Border class strings\npub struct BorderBuilder {\n    inner: String,\n}\n\nimpl BorderBuilder {\n    #[inline]\n    pub fn new(initial: Border) -> Self {\n        let mut inner = String::with_capacity(3);\n        inner.push(initial.get_class());\n        Self { inner }\n    }\n\n    #[inline]\n    pub fn add(&mut self, border: Border) {\n        self.inner.push(' ');\n        self.inner.push(border.get_class());\n    }\n\n    #[inline]\n    pub fn build(self) -> String {\n        self.inner\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/pitch/mod.rs",
    "content": "pub mod raw_data;\n\npub mod border;\n\nuse jp_utils::JapaneseExt;\nuse serde::{Deserialize, Serialize};\n\n/// Owned pitch entry of a word\n#[derive(Clone, Serialize, Deserialize, Debug)]\npub struct Pitch {\n    pub parts: Vec<PitchPart>,\n}\n\nimpl Pitch {\n    pub fn new(kana: &str, drop: u8) -> Option<Self> {\n        let mut kana_items = split_kana(kana).collect::<Vec<_>>();\n        kana_items.push(\"\");\n        let syllable_count = kana_items.len();\n\n        if syllable_count == 0 || drop > 6 {\n            return None;\n        }\n        let mut kana_items = kana_items.into_iter();\n\n        let first_kana = kana_items.next()?;\n\n        if drop == 0 || drop == 1 {\n            if syllable_count == 1 {\n                let inner = vec![PitchPart::new(first_kana, drop == 1)];\n                return Some(Self::new_raw(inner));\n            } else {\n                let part1 = PitchPart::new(first_kana, drop == 1);\n                let part2 = PitchPart::new(&kana[first_kana.bytes().len()..], drop == 0);\n                return Some(Self::new_raw(vec![part1, part2]));\n            }\n        }\n\n        let up: usize = kana_items\n            .by_ref()\n            .take((drop - 1) as usize)\n            .map(|i| i.bytes().len())\n            .sum();\n\n        let parts = vec![\n            PitchPart::new(first_kana, false),\n            PitchPart::new(\n                &kana[first_kana.bytes().len()..first_kana.bytes().len() + up],\n                true,\n            ),\n            PitchPart::new(&kana[first_kana.bytes().len() + up..], false),\n        ];\n\n        return Some(Pitch::new_raw(parts));\n    }\n\n    #[inline]\n    fn new_raw(parts: Vec<PitchPart>) -> Self {\n        Self { parts }\n    }\n\n    /// Get a reference to the pitch's parts.\n    #[inline]\n    pub fn parts(&self) -> &[PitchPart] {\n        self.parts.as_ref()\n    }\n\n    /// Render helper for the template\n    #[cfg(feature = \"jotoba_intern\")]\n    pub fn render(&self) -> impl Iterator<Item = (String, &str)> {\n        use self::border::{Border, BorderBuilder};\n        let mut iter = self.parts.iter().enumerate();\n\n        std::iter::from_fn(move || {\n            let (pos, pitch_part) = iter.next()?;\n\n            if pitch_part.part.is_empty() {\n                // Don't render under/overline for empty character -- handles the case where the\n                // pitch changes from the end of the word to the particle\n                return Some((String::new(), \"\"));\n            }\n\n            let h_bord = Border::horizontal(pitch_part.high);\n            let mut b_builder = BorderBuilder::new(h_bord);\n\n            if pos != self.parts.len() - 1 {\n                b_builder.add(Border::Right);\n            }\n\n            let classes = b_builder.build();\n            let part_str = pitch_part.part.as_str();\n            Some((classes, part_str))\n        })\n    }\n}\n\n/// A single, owned part of a whole pitch entry for a word\n#[derive(Clone, Serialize, Deserialize, Debug)]\npub struct PitchPart {\n    pub part: String,\n    pub high: bool,\n}\n\nimpl PitchPart {\n    #[inline]\n    pub fn new<S: ToString>(part: S, high: bool) -> Self {\n        Self {\n            part: part.to_string(),\n            high,\n        }\n    }\n}\n\n/// Returns an iterator over all kana characters. The reason for Item to be &str is that 'きゅう'\n/// gets split up into [\"きゅ\", \"う\"] which can't be represented with only one char\npub fn split_kana(inp: &str) -> impl Iterator<Item = &str> {\n    let mut char_indices = inp.char_indices().peekable();\n\n    std::iter::from_fn(move || {\n        let (start_idx, _) = char_indices.next()?;\n        while let Some(&(next_idx, chr)) = char_indices.peek() {\n            if !chr.is_small_kana() {\n                return Some(&inp[start_idx..next_idx]);\n            }\n            char_indices.next();\n        }\n\n        Some(&inp[start_idx..])\n    })\n}\n\n#[cfg(test)]\nmod test {\n    use super::*;\n\n    #[test]\n    fn test_split_katakana_small() {\n        let inp = \"ファイナル\";\n        let out = split_kana(inp).collect::<Vec<_>>();\n        assert_eq!(out, vec![\"ファ\", \"イ\", \"ナ\", \"ル\"]);\n    }\n\n    #[test]\n    fn test_split_kana_small() {\n        let inp = \"きょうかしょ\";\n        let out = split_kana(inp).collect::<Vec<_>>();\n        assert_eq!(out, vec![\"きょ\", \"う\", \"か\", \"しょ\"]);\n    }\n\n    #[test]\n    fn test_split_kana() {\n        let inp = \"これがすき\";\n        let out = split_kana(inp).collect::<Vec<_>>();\n        assert_eq!(out, vec![\"こ\", \"れ\", \"が\", \"す\", \"き\"]);\n    }\n\n    #[test]\n    fn test_split_kana2() {\n        let inp = \"\";\n        let out = split_kana(inp).collect::<Vec<_>>();\n        let empty: Vec<&str> = Vec::new();\n        assert_eq!(out, empty);\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/pitch/raw_data.rs",
    "content": "use serde::{Deserialize, Serialize};\nuse std::fmt::Debug;\n\n/// Store for pitch values. There are max 4 pitch values with each 3 bits. This\n/// is why we store it efficiently in a u16\n#[derive(Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]\npub struct PitchValues {\n    raw: u16,\n}\n\nimpl PitchValues {\n    pub fn new(values: &[u8]) -> Self {\n        assert!(values.len() <= 4);\n\n        let mut raw: u16 = 0;\n\n        for (pos, val) in values.iter().enumerate() {\n            assert!(*val <= 6);\n            let shift = pos as u16 * 3;\n            raw |= (*val as u16) << shift;\n        }\n\n        raw |= (values.len() as u16) << 12;\n\n        Self { raw }\n    }\n\n    #[inline]\n    pub fn is_empty(&self) -> bool {\n        self.raw == 0\n    }\n\n    #[inline]\n    pub fn count(&self) -> u8 {\n        (self.raw >> 12) as u8\n    }\n\n    #[inline]\n    pub fn get(&self, pos: u8) -> Option<u8> {\n        (pos < self.count()).then(|| (self.raw >> (pos as u16 * 3)) as u8 & 0b00000111)\n    }\n\n    #[inline]\n    pub fn iter(&self) -> impl Iterator<Item = u8> + '_ {\n        (0..self.count()).map(|i| self.get(i).unwrap())\n    }\n}\n\nimpl Debug for PitchValues {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        write!(f, \"[\")?;\n        for (pos, p) in self.iter().enumerate() {\n            if pos > 0 {\n                write!(f, \"|\")?;\n            }\n            write!(f, \"{p}\")?;\n        }\n        write!(f, \"]\")\n    }\n}\n\n#[cfg(test)]\nmod test {\n    use super::*;\n\n    #[test]\n    fn test_pitch_value() {\n        assert_eq!(PitchValues::new(&[0]).count(), 1);\n        assert_eq!(PitchValues::new(&[0]).get(0), Some(0));\n        assert_eq!(PitchValues::new(&[0]).get(1), None);\n\n        assert_eq!(PitchValues::new(&[6, 6]).count(), 2);\n        assert_eq!(PitchValues::new(&[6, 6]).get(0), Some(6));\n        assert_eq!(PitchValues::new(&[6, 6]).get(1), Some(6));\n\n        assert_eq!(PitchValues::new(&[1, 6, 0]).count(), 3);\n        assert_eq!(PitchValues::new(&[1, 6, 0]).get(2), Some(0));\n        assert_eq!(PitchValues::new(&[]).count(), 0);\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/priority.rs",
    "content": "use serde::{Deserialize, Serialize};\nuse std::convert::TryFrom;\n\n/// Priority indicator of kanji/reading element\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize, Hash, Eq)]\n#[repr(u8)]\npub enum Priority {\n    News(u8),\n    Ichi(u8),\n    Spec(u8),\n    Gai(u8),\n    Nf(u8),\n}\n\nimpl Into<String> for Priority {\n    fn into(self) -> String {\n        match self {\n            Priority::News(v) => format!(\"news{}\", v),\n            Priority::Ichi(v) => format!(\"ichi{}\", v),\n            Priority::Spec(v) => format!(\"spec{}\", v),\n            Priority::Gai(v) => format!(\"gai{}\", v),\n            Priority::Nf(v) => format!(\"nf{}\", v),\n        }\n    }\n}\n\nimpl TryFrom<&str> for Priority {\n    type Error = ();\n    fn try_from(value: &str) -> Result<Self, Self::Error> {\n        if let Some(end) = value.strip_prefix(\"news\") {\n            return Ok(Priority::News(end.parse().map_err(|_| ())?));\n        }\n\n        if let Some(end) = value.strip_prefix(\"ichi\") {\n            return Ok(Priority::Ichi(end.parse().map_err(|_| ())?));\n        }\n\n        if let Some(end) = value.strip_prefix(\"spec\") {\n            return Ok(Priority::Spec(end.parse().map_err(|_| ())?));\n        }\n\n        if let Some(end) = value.strip_prefix(\"gai\") {\n            return Ok(Priority::Gai(end.parse().map_err(|_| ())?));\n        }\n\n        if let Some(end) = value.strip_prefix(\"nf\") {\n            return Ok(Priority::Nf(end.parse().map_err(|_| ())?));\n        }\n\n        Err(())\n    }\n}\n\n#[cfg(test)]\nmod test {\n    use super::*;\n    use std::convert::TryFrom;\n\n    #[test]\n    fn test_priority_ichi() {\n        let s = Priority::try_from(\"ichi1\");\n        assert!(s.is_ok());\n        let s = s.unwrap();\n        assert_eq!(s, Priority::Ichi(1));\n        let p: String = s.into();\n        assert_eq!(p, \"ichi1\");\n        let s = Priority::try_from(\"ichi\");\n        assert!(s.is_err());\n    }\n\n    #[test]\n    fn test_priority_nf() {\n        let s = Priority::try_from(\"nf10\");\n        assert!(s.is_ok());\n        let s = s.unwrap();\n        assert_eq!(s, Priority::Nf(10));\n        let p: String = s.into();\n        assert_eq!(p, \"nf10\");\n        let s = Priority::try_from(\"nf4\");\n        assert!(s.is_ok());\n        let s = s.unwrap();\n        assert_eq!(s, Priority::Nf(4));\n        let p: String = s.into();\n        assert_eq!(p, \"nf4\");\n\n        let s = Priority::try_from(\"nf\");\n        assert!(s.is_err());\n    }\n\n    #[test]\n    fn test_priority_news() {\n        let s = Priority::try_from(\"news10\");\n        assert!(s.is_ok());\n        let s = s.unwrap();\n        assert_eq!(s, Priority::News(10));\n        let p: String = s.into();\n        assert_eq!(p, \"news10\");\n\n        let s = Priority::try_from(\"news\");\n        assert!(s.is_err());\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/reading/iter.rs",
    "content": "use super::Reading;\nuse crate::jotoba::words::Dict;\n\n/// Iterator over all readings of a word\npub struct ReadingIter<'a> {\n    reading: &'a Reading,\n    allow_kana: bool,\n    did_kanji: bool,\n    did_kana: bool,\n    alternative_pos: u8,\n}\n\nimpl<'a> ReadingIter<'a> {\n    #[inline]\n    pub(crate) fn new(reading: &'a Reading, allow_kana: bool) -> Self {\n        Self {\n            reading,\n            allow_kana,\n            did_kana: false,\n            did_kanji: false,\n            alternative_pos: 0,\n        }\n    }\n}\n\nimpl<'a> Iterator for ReadingIter<'a> {\n    type Item = &'a Dict;\n\n    fn next(&mut self) -> Option<Self::Item> {\n        if !self.did_kana && self.allow_kana {\n            self.did_kana = true;\n            return Some(&self.reading.kana);\n        }\n        if !self.did_kanji && self.reading.kanji.is_some() {\n            self.did_kanji = true;\n            return Some(self.reading.kanji.as_ref().unwrap());\n        }\n        let i = self\n            .reading\n            .alternative\n            .get(self.alternative_pos as usize)?;\n        self.alternative_pos += 1;\n        Some(i)\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/reading/mod.rs",
    "content": "pub mod iter;\n\npub use iter::ReadingIter;\n\nuse super::Dict;\nuse jp_utils::JapaneseExt;\nuse serde::{Deserialize, Serialize};\n\n/// Various readings of a word\n#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize, Hash, Eq)]\npub struct Reading {\n    pub kana: Dict,\n    pub kanji: Option<Dict>,\n    pub alternative: Vec<Dict>,\n}\n\nimpl Reading {\n    /// Returns the preferred word-reading of a `Reading`\n    #[inline]\n    pub fn get_reading(&self) -> &Dict {\n        self.kanji.as_ref().unwrap_or(&self.kana)\n    }\n\n    /// Returns an iterator over all reading elements\n    #[inline]\n    pub fn iter(&self, allow_kana: bool) -> ReadingIter<'_> {\n        ReadingIter::new(self, allow_kana)\n    }\n\n    /// Return `true` if reading represents a katakana only word\n    #[inline]\n    pub fn is_katakana(&self) -> bool {\n        self.kana.reading.is_katakana() && self.kanji.is_none()\n    }\n}\n"
  },
  {
    "path": "lib/types/src/jotoba/words/sense.rs",
    "content": "use crate::jotoba::language::Language;\n\nuse super::{\n    dialect::Dialect,\n    field::Field,\n    foreign_language::ForeignLanguage,\n    gtype::GType,\n    misc::Misc,\n    part_of_speech::{PartOfSpeech, PosSimple},\n    Word,\n};\nuse serde::{Deserialize, Serialize};\n\n#[cfg(feature = \"jotoba_intern\")]\nuse localization::{language::Language as LocLanguage, traits::Translatable, TranslationDict};\n\n/// A single sense for a word. Represents one language,\n/// one misc item and 1..n glosses\n#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize, Hash)]\npub struct Sense {\n    pub id: u8,\n    pub misc: Option<Misc>,\n    pub field: Option<Field>,\n    pub dialect: Option<Dialect>,\n    pub glosses: Vec<Gloss>,\n    pub xref: Option<String>,\n    pub antonym: Option<String>,\n    pub information: Option<String>,\n    pub part_of_speech: Vec<PartOfSpeech>,\n    pub language: Language,\n    pub example_sentence: Option<u32>,\n    pub gairaigo: Option<Gairaigo>,\n}\n\n#[derive(Debug, Default, Clone, PartialEq, Deserialize, Serialize, Hash)]\npub struct Gairaigo {\n    pub language: ForeignLanguage,\n    pub fully_derived: bool,\n    pub original: String,\n}\n\nimpl Eq for Sense {}\n\n/// A gloss value represents one word in the\n/// translated language.\n#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize, Hash)]\npub struct Gloss {\n    pub id: u8,\n    pub gloss: String,\n    pub g_type: Option<GType>,\n}\n\n/// Converts sense and gloss id to a single u16\n#[inline]\npub fn to_unique_id(sense_id: u8, gloss_id: u8) -> u16 {\n    (sense_id as u16) << 8 | gloss_id as u16\n}\n\n/// Converts u16 to seq and gloss id\n#[inline]\npub fn from_unique_id(id: u16) -> (u8, u8) {\n    let gloss_id = id as u8;\n    let sense_id = (id >> 8) as u8;\n    (sense_id, gloss_id)\n}\n\nimpl Sense {\n    /// Get all pos_simple of a sense\n    pub fn get_pos_simple(&self) -> Vec<PosSimple> {\n        let mut pos_simple = self\n            .part_of_speech\n            .iter()\n            .map(|i| i.to_pos_simple())\n            .flatten()\n            .collect::<Vec<_>>();\n\n        pos_simple.sort_unstable();\n        pos_simple.dedup();\n        pos_simple\n    }\n\n    #[inline]\n    pub fn has_pos_simple(&self, s: &PosSimple) -> bool {\n        //self.get_pos_simple().contains(s)\n        self.part_of_speech\n            .iter()\n            .any(|p| p.to_pos_simple().contains(s))\n    }\n\n    #[inline]\n    pub fn gloss_by_id(&self, id: u8) -> Option<&Gloss> {\n        self.glosses.iter().find(|i| i.id == id)\n    }\n}\n\n// Jotoba intern only features\n#[cfg(feature = \"jotoba_intern\")]\nimpl Sense {\n    /// Get a senses tags prettified\n    #[inline]\n    pub fn get_glosses(&self) -> String {\n        use itertools::Itertools;\n        self.glosses.iter().map(|i| i.gloss.clone()).join(\"; \")\n    }\n\n    /// Returns an `xref` of the sense if available\n    #[inline]\n    pub fn get_xref(&self) -> Option<&str> {\n        self.xref.as_ref().and_then(|xref| xref.split('・').next())\n    }\n\n    /// Returns an `antonym` of the sense if available\n    #[inline]\n    pub fn get_antonym(&self) -> Option<&str> {\n        self.antonym\n            .as_ref()\n            .and_then(|antonym| antonym.split('・').next())\n    }\n\n    // Get a senses tags prettified\n    pub fn get_parts_of_speech(&self, dict: &TranslationDict, language: LocLanguage) -> String {\n        use itertools::Itertools;\n        self.part_of_speech\n            .iter()\n            .map(|i| i.gettext_custom(dict, Some(language)))\n            .join(\", \")\n    }\n\n    pub fn get_infos(\n        &self,\n        dict: &TranslationDict,\n        language: LocLanguage,\n    ) -> Option<(\n        Option<String>,\n        Option<&str>,\n        Option<&str>,\n        Option<Dialect>,\n        Option<String>,\n    )> {\n        let info_str = self.get_information_string(dict, language);\n        let xref = self.get_xref();\n        let antonym = self.get_antonym();\n        let dialect = self.dialect;\n\n        if xref.is_none() && info_str.is_none() && antonym.is_none() && self.gairaigo.is_none() {\n            None\n        } else {\n            let gairaigo_txt = self.get_gairaigo(dict, language);\n            Some((info_str, xref, antonym, dialect, gairaigo_txt))\n        }\n    }\n\n    fn get_gairaigo(&self, dict: &TranslationDict, language: LocLanguage) -> Option<String> {\n        self.gairaigo.as_ref().map(|gairaigo| {\n            let lang = gairaigo\n                .language\n                .pgettext(dict, \"foreign_lang\", Some(language));\n            dict.gettext_fmt(\"From {}: {}\", &[lang, &gairaigo.original], Some(language))\n        })\n    }\n\n    /// Return human readable information about a gloss\n    pub fn get_information_string(\n        &self,\n        dict: &TranslationDict,\n        language: LocLanguage,\n    ) -> Option<String> {\n        use itertools::Itertools;\n        let arr: [Option<String>; 3] = [\n            self.misc\n                .map(|i| i.gettext(dict, Some(language)).to_owned()),\n            self.field.map(|i| i.gettext_custom(dict, Some(language))),\n            self.information.clone(),\n        ];\n\n        let res = arr\n            .iter()\n            .filter_map(|i| i.is_some().then(|| i.as_ref().unwrap()))\n            .collect::<Vec<_>>();\n\n        if res.is_empty() {\n            return None;\n        }\n\n        if self.xref.is_some() || self.antonym.is_some() {\n            Some(format!(\"{}.\", res.iter().join(\", \")))\n        } else {\n            Some(res.iter().join(\", \"))\n        }\n    }\n}\n\n/// Iterator over all Senses and its glosses\npub struct SenseGlossIter<'a> {\n    word: &'a Word,\n    sense_pos: usize,\n    gloss_pos: usize,\n}\n\nimpl<'a> SenseGlossIter<'a> {\n    #[inline]\n    pub(super) fn new(word: &'a Word) -> Self {\n        SenseGlossIter {\n            word,\n            sense_pos: 0,\n            gloss_pos: 0,\n        }\n    }\n}\n\nimpl<'a> Iterator for SenseGlossIter<'a> {\n    type Item = (&'a Sense, &'a Gloss, u16);\n\n    fn next(&mut self) -> Option<Self::Item> {\n        let senses = &self.word.senses;\n        if senses.len() <= self.sense_pos {\n            return None;\n        }\n\n        let sense = &senses[self.sense_pos];\n        assert!(!sense.glosses.is_empty());\n        let gloss = &sense.glosses[self.gloss_pos];\n\n        self.gloss_pos += 1;\n        if self.gloss_pos >= sense.glosses.len() {\n            self.gloss_pos = 0;\n            self.sense_pos += 1;\n        }\n\n        let id = to_unique_id(sense.id, gloss.id);\n\n        Some((sense, gloss, id))\n    }\n}\n\n#[cfg(test)]\nmod test {\n    use super::*;\n\n    fn make_gloss(word: &str) -> Gloss {\n        Gloss {\n            gloss: word.to_string(),\n            ..Default::default()\n        }\n    }\n\n    fn make_word(senses: &[&[&str]]) -> Word {\n        let built_senses = senses\n            .iter()\n            .map(|sense| Sense {\n                glosses: sense.iter().map(|i| make_gloss(i)).collect(),\n                ..Default::default()\n            })\n            .collect::<Vec<_>>();\n\n        Word {\n            senses: built_senses,\n            ..Default::default()\n        }\n    }\n\n    #[test]\n    fn test_sense_gloss_iter() {\n        let word_empty = make_word(&[]);\n        assert_eq!(word_empty.sense_gloss_iter().next(), None);\n\n        let test_word = |data: &[&[&str]]| {\n            let word1 = make_word(data);\n            let mut iter1 = word1.sense_gloss_iter();\n\n            for i in data.into_iter().map(|i| i.iter()).flatten() {\n                assert_eq!(iter1.next().unwrap().1.gloss.as_str(), *i);\n            }\n            assert_eq!(iter1.next(), None);\n        };\n\n        let words = vec![\n            vec![&[\"gloss0_0\"][..]],\n            vec![&[\"gloss0_0\"][..], &[\"gloss1_0\"][..]],\n            vec![&[\"gloss0_0\", \"gloss0_1\"][..], &[\"gloss1_0\", \"gloss1_1\"][..]],\n        ];\n\n        for word in words {\n            test_word(&word);\n        }\n    }\n\n    #[test]\n    fn test_unique_id() {\n        let pairs = &[(1, 70), (10, 6), (0, 0), (255, 255), (1, 2)];\n\n        for (seq, gloss) in pairs {\n            let enc = to_unique_id(*seq, *gloss);\n            let (seq_res, gloss_res) = from_unique_id(enc);\n            assert_eq!(*seq, seq_res);\n            assert_eq!(*gloss, gloss_res);\n        }\n    }\n}\n"
  },
  {
    "path": "lib/types/src/lib.rs",
    "content": "/// Contains raw data structures used for parsing and generating the 'real' resources\n#[cfg(feature = \"raw_types\")]\npub mod raw;\n\n/// Contains all information holding structures for jotoba resources\npub mod jotoba;\n\n/// Contains all structures and informations required for the API\n#[cfg(feature = \"api\")]\npub mod api;\n"
  },
  {
    "path": "lib/types/src/raw/jmdict/mod.rs",
    "content": "use crate::jotoba::{\n    language::Language,\n    words::{\n        dialect::Dialect, field::Field, gtype::GType, information::Information, misc::Misc,\n        part_of_speech::PartOfSpeech, priority::Priority, sense::Gairaigo,\n    },\n};\n\nuse serde::{Deserialize, Serialize};\n\n/// An dict entry. Represents one word, phrase or expression\n#[derive(Debug, Default, Clone)]\npub struct Entry {\n    pub sequence: u32,\n    /// Different readings of a word\n    pub elements: Vec<EntryElement>,\n    /// Translations into various languages\n    pub senses: Vec<EntrySense>,\n}\n\n/// A single element for an entry. Defines reading, kanji and additional\n/// information for the japanese word\n#[derive(Debug, Default, Clone)]\npub struct EntryElement {\n    /// Is kanji reading\n    pub kanji: bool,\n    /// The reading\n    pub value: String,\n    pub priorities: Vec<Priority>,\n    pub reading_info: Vec<Information>,\n    pub no_true_reading: bool,\n}\n\n/// A single 'sense' item for an entry\n#[derive(Debug, Default, Clone)]\npub struct EntrySense {\n    pub id: u8,\n    pub glosses: Vec<GlossValue>,\n    pub misc: Option<Misc>,\n    pub part_of_speech: Vec<PartOfSpeech>,\n    pub antonym: Option<String>,\n    pub field: Option<Field>,\n    pub xref: Option<String>,\n    pub dialect: Option<Dialect>,\n    pub information: Option<String>,\n    pub gairaigo: Option<Gairaigo>,\n    pub example_sentence: Option<u32>,\n}\n\nimpl EntrySense {\n    pub fn clear(&mut self) {\n        self.glosses.clear();\n\n        if let Some(ref mut ant) = self.antonym {\n            ant.clear();\n            self.antonym = None;\n        }\n\n        if let Some(ref mut information) = self.information {\n            information.clear();\n            self.information = None;\n        }\n\n        if let Some(ref mut xref) = self.xref {\n            xref.clear();\n            self.xref = None;\n        }\n\n        self.field = None;\n        self.dialect = None;\n        self.misc = None;\n        self.part_of_speech.clear();\n        self.example_sentence = None;\n        self.gairaigo = None;\n    }\n}\n\n#[derive(Debug, Default, Clone, PartialEq, Deserialize, Serialize, Hash)]\npub struct Translation {\n    pub language: Language,\n    pub value: String,\n}\n\n/// A single gloss entry.\n#[derive(Debug, Clone, PartialEq)]\npub struct GlossValue {\n    pub language: Language,\n    pub g_type: Option<GType>,\n    pub value: String,\n}\n"
  },
  {
    "path": "lib/types/src/raw/jmnedict/mod.rs",
    "content": "use crate::jotoba::names::name_type::NameType;\n\n/// An dict entry. Represents one word, phrase or expression\n#[derive(Default)]\npub struct NameEntry {\n    pub sequence: i32,\n    pub kana_element: String,\n    pub kanji_element: Option<String>,\n    pub transcription: String,\n    pub name_type: Option<Vec<NameType>>,\n    pub xref: Option<String>,\n}\n"
  },
  {
    "path": "lib/types/src/raw/kanjidict/mod.rs",
    "content": "/// An kanji character. Represents one Kanji\n#[derive(Default, Clone, Debug)]\npub struct Character {\n    pub literal: char,\n    pub on_readings: Vec<String>,\n    pub kun_readings: Vec<String>,\n    pub chinese_readings: Vec<String>,\n    pub korean_romanized: Vec<String>,\n    pub korean_hangul: Vec<String>,\n    pub vietnamese: Vec<String>,\n    pub meaning: Vec<String>,\n    pub grade: Option<u8>,\n    pub stroke_count: u8,\n    pub variant: Vec<String>,\n    pub frequency: Option<u16>,\n    pub jlpt: Option<u8>,\n    pub natori: Vec<String>,\n    pub radical: Option<i32>,\n}\n"
  },
  {
    "path": "lib/types/src/raw/mod.rs",
    "content": "pub mod jmdict;\npub mod jmnedict;\npub mod kanjidict;\n"
  },
  {
    "path": "lib/utils/Cargo.toml",
    "content": "[package]\nname = \"utils\"\nversion = \"0.1.0\"\nauthors = [\"jojii <jojii@gmx.net>\"]\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\nitertools = \"0.11.0\"\nrand = \"0.8.5\"\nsabi = { git = \"https://github.com/yuk1ty/sabi\" }\n"
  },
  {
    "path": "lib/utils/src/binary_search.rs",
    "content": "use std::cmp::Ordering;\n\npub struct ResultIter<'a, C, B, T>\nwhere\n    C: FnMut(&T) -> Ordering + Copy,\n    B: BinarySearchable<Item = T>,\n{\n    cmp_fn: C,\n    first: Option<usize>,\n    item_pos: usize,\n    find: &'a B,\n}\n\nimpl<'a, C, B, T> Iterator for ResultIter<'a, C, B, T>\nwhere\n    C: FnMut(&T) -> Ordering + Copy,\n    B: BinarySearchable<Item = T>,\n{\n    type Item = T;\n\n    #[inline]\n    fn next(&mut self) -> Option<Self::Item> {\n        let curr_item_pos = self.first? + self.item_pos;\n\n        if curr_item_pos >= self.find.len() {\n            return None;\n        }\n\n        let item = self.find.get(curr_item_pos);\n        if (self.cmp_fn)(&item) == Ordering::Equal {\n            self.item_pos += 1;\n            return Some(item);\n        }\n\n        None\n    }\n}\n\nimpl<'a, C, B, T> ResultIter<'a, C, B, T>\nwhere\n    C: FnMut(&T) -> Ordering + Copy,\n    B: BinarySearchable<Item = T>,\n{\n    #[inline]\n    pub(crate) fn new(cmp: C, search: &'a B, first: Option<usize>) -> Self {\n        Self {\n            cmp_fn: cmp,\n            first,\n            item_pos: 0,\n            find: search,\n        }\n    }\n}\n\n/// A trait providing binary search for all `get` and `len` implementing types. Additionally\n/// `search` can be used to retrieve all matching items in sorted order.\npub trait BinarySearchable: Sized {\n    type Item: Sized;\n\n    fn get(&self, pos: usize) -> Self::Item;\n    fn len(&self) -> usize;\n\n    fn is_empty(&self) -> bool {\n        self.len() == 0\n    }\n\n    /// Returns an iterator over each matching result\n    fn search<C>(&self, cmp: C) -> ResultIter<'_, C, Self, Self::Item>\n    where\n        C: FnMut(&Self::Item) -> Ordering + Copy,\n    {\n        let first_item = self.find_first(cmp);\n        ResultIter::new(cmp, self, first_item)\n    }\n\n    fn binary_search_by<'a, F>(&'a self, mut f: F) -> Option<usize>\n    where\n        F: FnMut(&Self::Item) -> Ordering,\n    {\n        let mut size = self.len();\n        let mut left = 0;\n        let mut right = size;\n\n        while left < right {\n            let mid = left + size / 2;\n\n            let cmp = f(&self.get(mid));\n\n            if cmp == Ordering::Less {\n                left = mid + 1;\n            } else if cmp == Ordering::Greater {\n                right = mid;\n            } else {\n                return Some(mid);\n            }\n\n            size = right - left;\n        }\n        None\n    }\n\n    /// Finds first matching item\n    fn find_first<C>(&self, mut cmp: C) -> Option<usize>\n    where\n        C: FnMut(&Self::Item) -> Ordering,\n    {\n        // Find using binary search. If multiple results found (which is very likely the case in\n        // our implementation), a random item of the matching ones will be found\n        let random_index = self.binary_search_by(|a| cmp(a))?;\n\n        let mut curr_pos = random_index.saturating_sub(100);\n\n        loop {\n            if cmp(&self.get(curr_pos)) != Ordering::Equal {\n                loop {\n                    curr_pos += 1;\n                    if cmp(&self.get(curr_pos)) == Ordering::Equal {\n                        break;\n                    }\n                }\n                break Some(curr_pos);\n            }\n\n            if curr_pos == 0 {\n                break None;\n            }\n            curr_pos = curr_pos.saturating_sub(200);\n        }\n    }\n}\n"
  },
  {
    "path": "lib/utils/src/korean.rs",
    "content": "/// Returns true if `c` is a hangul character\n#[inline]\npub fn is_hangul(c: char) -> bool {\n    (c >= '\\u{AC00}' && c <= '\\u{D7AF}')\n        || (c >= '\\u{1100}' && c <= '\\u{11FF}')\n        || (c >= '\\u{3130}' && c <= '\\u{321E}')\n}\n\nsabi::sabi! {\n    /// Returns true if `c` is a hangul character\n    #[inline]\n    公開 関数 is_hangul_str(ハングルの文字列: &str) -> bool{\n        !ハングルの文字列.chars().any(|i| !is_hangul(i))\n    }\n}\n"
  },
  {
    "path": "lib/utils/src/lib.rs",
    "content": "pub mod binary_search;\npub mod korean;\n\nuse itertools::Itertools;\nuse rand::{distributions::Alphanumeric, thread_rng, Rng};\nuse std::cmp::Ordering;\n\n/// Return true if both slices have the same elments without being stored to be in the same order\npub fn same_elements<T>(v1: &[T], v2: &[T]) -> bool\nwhere\n    T: PartialEq,\n{\n    if v1.len() != v2.len() {\n        return false;\n    }\n\n    for i in v1 {\n        if !v2.contains(i) {\n            return false;\n        }\n    }\n\n    true\n}\n\n/// Return true if `v1` ⊆ `v2`\npub fn part_of<T>(v1: &[T], v2: &[T]) -> bool\nwhere\n    T: PartialEq,\n{\n    if v1.len() > v2.len() || v1.is_empty() {\n        return false;\n    }\n\n    for i in v1 {\n        if !v2.contains(i) {\n            return false;\n        }\n    }\n\n    true\n}\n\n/// Get the relative order of two elements within a vector requires that a, b being element of vec\npub fn get_item_order<T>(vec: &[T], a: &T, b: &T) -> Option<Ordering>\nwhere\n    T: PartialEq,\n{\n    if a == b {\n        return Some(Ordering::Equal);\n    }\n\n    for i in vec {\n        if *i == *a {\n            return Some(Ordering::Less);\n        }\n        if *i == *b {\n            return Some(Ordering::Greater);\n        }\n    }\n\n    None\n}\n\n/// Returns the real amount of characters in a string\n#[inline]\npub fn real_string_len<S: AsRef<str>>(s: S) -> usize {\n    // We should probably use grapheme clusters here\n    s.as_ref().chars().count()\n}\n\n/// Returns an antisymmetric ordering of [`a`] and [`b`] where `a == true` < `b == true`\n/// Example:\n///\n/// let a = true;\n/// let b = false;\n/// assert_eq!(bool_ord(a, b), Ordering::Less);\n#[inline]\npub fn bool_ord(a: bool, b: bool) -> Ordering {\n    if a && !b {\n        Ordering::Less\n    } else if !a && b {\n        Ordering::Greater\n    } else {\n        Ordering::Equal\n    }\n}\n\n/// Returns `None` if the vec is empty or Some(Vec<T>) if not\n#[inline]\npub fn to_option<T>(vec: Vec<T>) -> Option<Vec<T>> {\n    (!vec.is_empty()).then(|| vec)\n}\n\n/// Returns an ordering based on the option variants.\n/// Ordering: Some < None\n/// In case both are equal, None gets returned\npub fn option_order<T>(a: &Option<T>, b: &Option<T>) -> Option<Ordering> {\n    if a.is_some() && !b.is_some() {\n        Some(Ordering::Less)\n    } else if !a.is_some() && b.is_some() {\n        Some(Ordering::Greater)\n    } else {\n        None\n    }\n}\n\n/// Remove duplicates from a vector and return a newly allocated one using a func to compare both\n/// items. This doesn't need the source\n/// vector to be sorted unlike `.dedup()`. Therefore it's heavier in workload\npub fn remove_dups_by<T, F>(inp: Vec<T>, eq: F) -> Vec<T>\nwhere\n    T: PartialEq,\n    F: Fn(&T, &T) -> bool,\n{\n    let mut new: Vec<T> = Vec::new();\n\n    for item in inp {\n        if !contains(&new, &item, &eq) {\n            new.push(item)\n        }\n    }\n\n    new\n}\n\npub fn contains<T, F>(inp: &[T], item: &T, eq: F) -> bool\nwhere\n    F: Fn(&T, &T) -> bool,\n{\n    for i in inp {\n        if eq(i, item) {\n            return true;\n        }\n    }\n    false\n}\n\n/// Remove duplicates from a vector and return a newly allocated one. This doesn't need the source\n/// vector to be sorted unlike `.dedup()`. Therefore it's heavier in workload\npub fn remove_dups<T>(inp: Vec<T>) -> Vec<T>\nwhere\n    T: PartialEq,\n{\n    let mut new = vec![];\n\n    for item in inp {\n        if !new.contains(&item) {\n            new.push(item)\n        }\n    }\n\n    new\n}\n\n/// Returns an iterator over bools for each [`substr`] within [`text`] with the value `true` if the\n/// given substr occurence is within [`open`] and [`close`] or not\n///\n/// Example:\n///\n/// is_surrounded_by(r#\"this \"is\" an example\"#, \"is\", '\"','\"')\n///\n/// => will return an iterator over [ false, true ]\n///\npub fn is_surrounded_by<'a>(\n    text: &'a str,\n    substr: &'a str,\n    open: char,\n    close: char,\n) -> impl Iterator<Item = bool> + 'a {\n    // Counter for amount of nested brackets\n    let mut counter = 0;\n\n    let mut text_iter = text.char_indices().multipeek();\n    std::iter::from_fn(move || {\n        // Retard case no valid bracketing is possible\n        if substr.len() + 2 <= text.len() || substr.contains(open) || substr.contains(close) {\n            return None;\n        }\n\n        'b: while let Some((_, c)) = text_iter.next() {\n            if c == open {\n                counter += 1;\n                continue;\n            }\n\n            if c == close {\n                counter -= 1;\n                continue;\n            }\n\n            // Match each character of [`substr`] against the next appearing characters in [`text`] by\n            // peeking [`text_iter`] Aka string matching\n            for (pos, sub_char) in substr.chars().enumerate() {\n                let text_char = if pos == 0 {\n                    // Check first substr char against current char\n                    c\n                } else {\n                    // For later appearing characters, peek into the future\n                    match text_iter.peek().map(|i| i.1) {\n                        Some(c) => c,\n                        None => return None,\n                    }\n                };\n\n                // On the first not matching character, continue loop and reset peek\n                if sub_char.to_ascii_lowercase() != text_char.to_ascii_lowercase() {\n                    text_iter.reset_peek();\n                    continue 'b;\n                }\n            }\n\n            // Skip peeked items if maching substr was found\n            text_iter.reset_peek();\n            for _ in 0..substr.chars().count() - 1 {\n                text_iter.next();\n            }\n\n            // Only reaches this part if a matching substring was found\n            return Some(counter > 0);\n        }\n\n        None\n    })\n}\n\n#[inline]\npub fn trim_string_end(mut s: String) -> String {\n    while s.ends_with(' ') {\n        s.pop();\n    }\n    s\n}\n\n/// Returns true if [`s`] represents [`c`]\npub fn char_eq_str(c: char, s: &str) -> bool {\n    let mut chars = s.chars();\n    let is = chars.next().map(|i| i == c).unwrap_or_default();\n    is && chars.next().is_none()\n}\n\nsabi::sabi! {\n    /// Makes the first character to uppercase and returns a newly owned string\n    公開 関数 first_letter_upper(s: &str) -> 文字列{\n        束縛 可変 c = s.chars();\n        マッチ c.next(){\n            ない => 文字列::新(),\n            ある(f) => f.to_uppercase().chain(c).collect(),\n        }\n    }\n}\n\n/// Returns a random alpha numeric string with the length of [`len`]\n#[inline]\npub fn rand_alpha_numeric(len: usize) -> String {\n    thread_rng()\n        .sample_iter(&Alphanumeric)\n        .take(len)\n        .map(char::from)\n        .collect()\n}\n\n/// Formats romaji text by removing all 'n' occurences of n+ for 1 < |n| <= 4\n#[inline]\npub fn format_romaji_nn(inp: &str) -> String {\n    inp.replace(\"nn\", \"ん\")\n        .replace(\"n'\", \"ん\")\n        .replace(\"nnn\", \"nn\")\n        .replace(\"nnnn\", \"nnn\")\n        .replace(\"nnnnn\", \"nnnn\")\n}\n"
  },
  {
    "path": "locales/de.po",
    "content": "# SINGULAR\n# msgctxt \"\"\n# msgid \"\"\n# msgstr \"\"\n\n# PLURAL\n# msgctxt \"\"\n# msgid \"\"\n# msgid_plural \"\"\n# msgstr[0] \"\"\n# OPTIONAL: msgstr[1] \"\" \n\nmsgid \"\"\nmsgstr \"\"\n\"Project-Id-Version: PACKAGE VERSION\\n\"\n\"PO-Revision-Date: 2021-11-29 21:46+0100\\n\"\n\"Last-Translator:  <>\\n\"\n\"Language-Team: English\\n\"\n\"Language: en\\n\"\n\"MIME-Version: 1.0\\n\"\n\"Content-Type: text/plain; charset=UTF-8\\n\"\n\"Content-Transfer-Encoding: 8bit\\n\"\n\"Plural-Forms: nplurals=2; plural=(n != 1);\\n\"\n\nmsgid \"Jotoba\"\nmsgstr \"\"\n\n#### Base template\nmsgid \"Search...\"\nmsgstr \"Suche...\"\n\nmsgid \"Search\"\nmsgstr \"Suchen\"\n\nmsgid \"Settings\"\nmsgstr \"Einstellungen\"\n\nmsgid \"Radicals\"\nmsgstr \"Radikale\"\n\nmsgid \"Voice\"\nmsgstr \"Stimme\"\n\n# Rad Help Msg\nmsgid \"This tool allows you to find Kanji by their core components (Radicals)\"\nmsgstr \"Dieses Tool erlaubt es dir Kanji anhand ihrer Kernelemente (Radikale) zu finden\"\n\nmsgid \"You can select Radicals below and add found Kanji to the search bar\"\nmsgstr \"Du kannst unten gewünschte Radikale auswählen und die gefunden Kanji der Suchleiste hinzufügen\"\n\nmsgid \"Enter\"\nmsgstr \"\"\n\nmsgid \"to start searching\"\nmsgstr \"zum Starten der Suche\"\n\n# Rad Btns\n\nmsgid \"Reset\"\nmsgstr \"zurücksetzen\"\n\nmsgid \"reset\"\nmsgstr \"zurücksetzen\"\n\nmsgid \"apply\"\nmsgstr \"Annehmen\"\n\nmsgid \"create\"\nmsgstr \"erstellen\"\n\nmsgid \"Close\"\nmsgstr \"Schließen\"\n\nmsgid \"here\"\nmsgstr \"hier\"\n\nmsgid \"Accept\"\nmsgstr \"Annehmen\"\n\nmsgid \"Decline\"\nmsgstr \"Ablehnen\"\n\n# Search type / Dropdown\nmsgid \"Words\"\nmsgstr \"Wörter\"\n\nmsgid \"Kanji\"\nmsgstr \"Kanji\"\n\nmsgid \"Sentences\"\nmsgstr \"Sätze\"\n\nmsgid \"Names\"\nmsgstr \"Namen\"\n\n# Speech overlay\n\nmsgid \"Current language\"\nmsgstr \"Aktuelle Sprache\"\n\nmsgid \"Currently listening\"\nmsgstr \"Am Zuhören\"\n\nmsgid \"No\"\nmsgstr \"Nein\"\n\nmsgid \"To change your language, select one of the following\"\nmsgstr \"Um deine Sprache zu wechseln, wähle eins der folgenden\"\n\n# Image search overlay\n\nmsgid \"Enter a URL or upload your image directly and Jotoba will try to search for japanese words contained in the picture.\"\nmsgstr \"Lade ein Bild hoch oder füge direkt die URL ein ein und Jotoba wird versuchen nach Japanischen Begriffen zu suchen, welche in dem Bild zu sehen sind.\"\n\nmsgid \"Enter Image URL...\"\nmsgstr \"Bild-URL einfügen...\"\n\n# Rad Picker overlay\n\nmsgid \"Select Radicals\"\nmsgstr \"Wähle Radikale\"\n\nmsgid \"Select Kanji\"\nmsgstr \"Wähle Kanji\"\n\nmsgid \"Search Radicals...\"\nmsgstr \"Suche Radikale...\"\n\n# Notification overlay\nmsgid \"Notifications\"\nmsgstr \"Benachrichtigungen\"\n\nmsgid \"No new notifications\"\nmsgstr \"Keine Benachrichtigungen\"\n\nmsgid \"Show all\"\nmsgstr \"Zeige alle\"\n\n# Languages\n\nmsgid \"English\"\nmsgstr \"Englisch\"\n\nmsgid \"German\"\nmsgstr \"Deutsch\"\n\nmsgid \"Russian\"\nmsgstr \"Russisch\"\n\nmsgid \"Spanish\"\nmsgstr \"Spanisch\"\n\nmsgid \"Swedish\"\nmsgstr \"Schwedisch\"\n\nmsgid \"French\"\nmsgstr \"Französisch\"\n\nmsgid \"Dutch\"\nmsgstr \"Niederländisch\"\n\nmsgid \"Hungarian\"\nmsgstr \"Ungarisch\"\n\nmsgid \"Slovenian\"\nmsgstr \"Slovenisch\"\n\nmsgid \"Japanese\"\nmsgstr \"Japanisch\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"German\"\nmsgstr \"Deutschen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"English\"\nmsgstr \"Englischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Georgian\"\nmsgstr \"Georgischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Chinese\"\nmsgstr \"Chinesischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Manchu\"\nmsgstr \"Mandschurischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Kurdish\"\nmsgstr \"Kurdischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"ChinookJargon\"\nmsgstr \"Mandschurischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Italian\"\nmsgstr \"Italienischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Malayalam\"\nmsgstr \"Malayalamischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Tibetian\"\nmsgstr \"Tibetischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Mongolian\"\nmsgstr \"Mongolischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Romanian\"\nmsgstr \"Rumänischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Bantu\"\nmsgstr \"Bantusprachischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Norwegian\"\nmsgstr \"Norwegischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Icelandic\"\nmsgstr \"Isländischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Breton\"\nmsgstr \"Bretonischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Maori\"\nmsgstr \"Maorischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Latin\"\nmsgstr \"Lateinischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Amharic\"\nmsgstr \"Amharischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Khmer\"\nmsgstr \"Khmerischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Swahili\"\nmsgstr \"Swahilischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Hebrew\"\nmsgstr \"Hebräischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Galician\"\nmsgstr \"Galegischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Korean\"\nmsgstr \"Koreanischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Tamil\"\nmsgstr \"Tamilschen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Viatnamese\"\nmsgstr \"Vietnamesischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Polish\"\nmsgstr \"Polnischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Sanskrit\"\nmsgstr \"Sanskrit\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Persian\"\nmsgstr \"Persischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Filipino\"\nmsgstr \"Filipinischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Moldavian\"\nmsgstr \"Moldavischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Croatian\"\nmsgstr \"Kroatischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Thai\"\nmsgstr \"Thailändischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Burmese\"\nmsgstr \"Birmanischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Slovak\"\nmsgstr \"Slowakischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Czech\"\nmsgstr \"Tschechischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Hindi\"\nmsgstr \"Hindischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Mapudungun\"\nmsgstr \"Araukaischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Turkish\"\nmsgstr \"Türkischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Hawaiian\"\nmsgstr \"Hawaiischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Afrikaans\"\nmsgstr \"Afrikanischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Esperanto\"\nmsgstr \"Esperantonischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Yiddish\"\nmsgstr \"Jiddischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Somali\"\nmsgstr \"Somalischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Tahitian\"\nmsgstr \"Tahitischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Urdu\"\nmsgstr \"Urduischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Indonesian\"\nmsgstr \"Indonesischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Estonian\"\nmsgstr \"Estnischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Bullgarian\"\nmsgstr \"Bulgarischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Arabic\"\nmsgstr \"Arabischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Danish\"\nmsgstr \"Dänischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Portuguese\"\nmsgstr \"Portugiesischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Greek\"\nmsgstr \"Griechischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Finnish\"\nmsgstr \"Finnischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Ainu\"\nmsgstr \"Ainuischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Algonquian\"\nmsgstr \"Algonkinischen\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"French\"\nmsgstr \"Französischen\"\n\n# Settings\nmsgid \"Language\"\nmsgstr \"Sprache\"\n\nmsgid \"Display\"\nmsgstr \"Design\"\n\nmsgid \"General\"\nmsgstr \"Allgemein\"\n\nmsgid \"Default search language\"\nmsgstr \"Sprache für Suchanfragen\"\n\nmsgid \"Page language\"\nmsgstr \"Sprache der Webseite\"\n\nmsgid \"Always show english results\"\nmsgstr \"Zeige englische Ergebnisse immer\"\n\nmsgid \"Show english results on top\"\nmsgstr \"Zeige englische Ergebnisse als erstes\"\n\nmsgid \"Focus search bar on load\"\nmsgstr \"Fokussiere die Suchleiste nach dem Suchen\"\n\nmsgid \"Select input on load\"\nmsgstr \"Markiere den Input nach dem Suchen\"\n\nmsgid \"Results per page\"\nmsgstr \"Ergebnisse pro Seite\"\n\nmsgid \"Number...\"\nmsgstr \"Nummer...\"\n\nmsgid \"Input has to be in range of 1 and 100!\"\nmsgstr \"Eingabe muss zwischen 1 und 100 liegen!\"\n\nmsgid \"max amount of names/words/sentences shown per page\"\nmsgstr \"Anzahl der Namen/Wörter/Sätze auf einer Seite\"\n\nmsgid \"Show Furigana\"\nmsgstr \"Zeige Furigana\"\n\nmsgid \"Show example sentences\"\nmsgstr \"Zeige Beispielsätze\"\n\nmsgid \"Items per page\"\nmsgstr \"Anzahl pro Seite\"\n\nmsgid \"max amount of kanji shown per page\"\nmsgstr \"Anzahl der Kanji auf einer Seite\"\n\nmsgid \"Use dark mode\"\nmsgstr \"Dark Mode\"\n\nmsgid \"Show kanji-animation on load\"\nmsgstr \"Zeige Kanji-Animation vollständig\"\n\nmsgid \"Show kanji-animation numbers\"\nmsgstr \"Nummeriere Zeichenreihenfolge\"\n\nmsgid \"Default kanji animation speed\"\nmsgstr \"Kanji Animationsgeschwindigkeit\"\n\nmsgid \"Enable Quick-Copy\"\nmsgstr \"\\\"Quick-Copy\\\" aktivieren\"\n\nmsgid \"Share usage statistics\"\nmsgstr \"Nutzungsstatistiken teilen\"\n\nmsgid \"STATISTICS_EXPLANATION\"\nmsgstr \"Um Jotoba zu verbessern, sammeln wir vollständig anonymisierte Daten zur Webseitennutzung. Du kannst jedoch jederzeit aus der Sammlung aussteigen.\"\n\n# Cookie text\nmsgid \"To use this feature you have to accept to the use of cookies.\"\nmsgstr \"Um dieses Feature nutzen zu können, musst du den Cookies zustimmen.\"\n\nmsgid \"Your data will only be used for your personal website settings.\"\nmsgstr \"Deine Daten werden ausschließlich für deine persönlichen Website Einstellungen verwendet.\"\n\n# Prefix of cookie revoke text\nmsgid \"Click\"\nmsgstr \"Klicke\"\n\n# Suffix of Cookie revoke text\nmsgid \"to revoke your Cookies agreement\"\nmsgstr \"um deine Cookie Zustimmung zurückzunehmen\"\n\n# Suffix of Cookie agree text\nmsgid \"to enable Cookies\"\nmsgstr \"um Cookies zu aktivieren\"\n\n# Footer\nmsgid \"Jotoba wouldn't be able to exist without the help of many open-source data sources.\"\nmsgstr \"Jotoba würde ohne die Hilfe der Hilfe von vielen Open-Source Quellen nicht existieren\"\n\nmsgid \"About Page\"\nmsgstr \"\\\"Über Uns\\\" - Seite\"\n\n# Cookie Footer\nmsgid \"We use cookies to improve your experience and deliver personalized content.\"\nmsgstr \"Wir benutzen Cookies um Jotoba zu verbessern und Deinen Inhalt zu personalisieren.\"\n\nmsgid \"By using Jotoba you agree to our\"\nmsgstr \"Indem du Jotoba benutzt, stimmst du unserer\"\n\nmsgid \"privacy policy\"\nmsgstr \"Datenschutzerklärung\"\n\nmsgid \".\"\nmsgstr \"zu.\"\n\nmsgid \"Only use necessary\"\nmsgstr \"Nur notwendige verwenden\"\n\nmsgid \"Allow Cookies\"\nmsgstr \"alle Cookies erlauben\"\n\n### About Page\n\nmsgid \"is a multilingual Japanese dictionary\"\nmsgstr \"ist ein mehrsprachiges Japanisch-Wörterbuch\"\n\nmsgid \"It is easy to find translations for words or kanji, see example sentences and the way names can be written.\"\nmsgstr \"Es ist einfach Übersetzungen für Wörter oder Kanji, aber auch Beispielsätze und Namen zu finden.\"\n\nmsgid \"Here are some examples on how to use this page\"\nmsgstr \"Hier sind einige Beispiele, wie man die Webseite benutzen kann\"\n\nmsgid \"Quickly change the search type by pressing\"\nmsgstr \"Ändere deinen Suchtypen mit diesen Tasten:\"\n\nmsgid \"You can specify your search by typing\"\nmsgstr \"Suche nach spezifischen Ergebnissen mit:\"\n\nmsgid \"You can find verbs that are conjugated\"\nmsgstr \"Du kannst Konjugierte Verben suchen\"\n\nmsgid \"You can search multiple kanji at once\"\nmsgstr \"Suche mehrere Kanji auf einmal\"\n\nmsgid \"is open source\"\nmsgstr \"ist Open Source\"\n\nmsgid \"Check out our\"\nmsgstr \"Besuche unsere\"\n\nmsgid \"Check out the\"\nmsgstr \"Besuche unsere\"\n\nmsgid \"aswell if you are interested in upcoming features and what we are currently working on\"\nmsgstr \", wenn du an zukünftigen Features interessiert bist oder dich interessiert an welchen Features wir aktuell arbeiten\"\n\nmsgid \"for a list of all contributors in this project.\"\nmsgstr \"für eine Liste aller Mitwirkenden an diesem Projekt.\"\n\nmsgctxt \"index\"\nmsgid \"or\"\nmsgstr \"oder\"\n\nmsgid \"Press\"\nmsgstr \"Drücke\"\n\nmsgid \"to instantly focus the search bar\"\nmsgstr \"um die Suchleiste zu fokussieren\"\n\n### Info / Help Page\n\nmsgid \"Shortcuts\"\nmsgstr \"Tastenkombinationen\"\n\nmsgid \"To improve the quality of life on Jotoba, we offer some shortcuts to quickly navigate the page:\"\nmsgstr \"Um die Nutzung von Jotoba so einfach wie möglich zu machen, bieten wir einige Shortcuts an mit denen man schnell auf der Seite navigieren kann:\"\n\nmsgid \"Everywhere\"\nmsgstr \"Überall verfügbar\"\n\nmsgid \"Quickly change between words | sentences | names | kanji tabs\"\nmsgstr \"Wechsle schnell zwischen Wörtern | Sätzen | Namen | Kanji Suchen\"\n\nmsgid \"Focus the search bar\"\nmsgstr \"Fokussiere die Suchleiste\"\n\nmsgid \"Focussed search bar\"\nmsgstr \"Wenn die Suchleiste im Fokus liegt\"\n\nmsgid \"Iterate suggestions up | down\"\nmsgstr \"Iteriere Vorschläge hoch | herunter\"\n\nmsgid \"Iterate suggestions down\"\nmsgstr \"Iteriere Vorschläge herunter\"\n\nmsgid \"[Words] search\"\nmsgstr \"[Wörter] Suche\"\n\nmsgid \"Play the first possible audio\"\nmsgstr \"Spiele die Audio vom ersten Ergebnis ab\"\n\nmsgid \"[Kanji] search\"\nmsgstr \"[Kanji] Suche\"\n\nmsgid \"Show / Collapse compounds\"\nmsgstr \"Zeige / Verstecke Wortverbindungen\"\n\nmsgid \"To specify what kind of results your search should offer, you can use shortcuts.\"\nmsgstr \"Um deine Suche genauer zu spezifizieren, können Hashtags verwendet werden.\"\n\nmsgid \"Hashtags should be written at end end of your input like this:\"\nmsgstr \"Sie sollten folgendermaßen an das Ende eines Inputs geschrieben werden:\"\n\nmsgid \"Available Hashtags for [Words] search\"\nmsgstr \"Verfügbare Hashtags für die [Wörter] Suche\"\n\nmsgid \"Search for nouns\"\nmsgstr \"Suche nach Nomen\"\n\nmsgid \"Search for verbs\"\nmsgstr \"Suche nach Verben\"\n\nmsgid \"Search for transitive verbs\"\nmsgstr \"Suche nach transitiven Verben\"\n\nmsgid \"Search for intransitive verbs\"\nmsgstr \"Suche nach intransitiven Verben\"\n\nmsgid \"Search for adverb\"\nmsgstr \"Suche nach Adverben\"\n\nmsgid \"Search for auxilary verbs\"\nmsgstr \"Suche nach Hilfsverben\"\n\nmsgid \"Search for adjectives\"\nmsgstr \"Suche nach Adjektiven\"\n\nmsgid \"Search for pronouns\"\nmsgstr \"Suche nach Pronomen\"\n\nmsgid \"Search for conjugations\"\nmsgstr \"Suche nach Konjugationen\"\n\nmsgid \"Search for prefixes\"\nmsgstr \"Suche nach Prefixen\"\n\nmsgid \"Search for suffixes\"\nmsgstr \"Suche nach Suffixen\"\n\nmsgid \"Search for japanese particles\"\nmsgstr \"Suche nach Japanischen Partikeln [z.B. を]\"\n\nmsgid \"Lists iru/eru ending verbs which are conjugated as godan verbs\"\nmsgstr \"Zeige iru/eru endende Verben, die als Godan verb konjugiert werden\"\n\nmsgid \"Search for sfx words [comic sounds]\"\nmsgstr \"Suche nach sfx-Wörtern [z.b. Sounds in Comics]\"\n\nmsgid \"Search for words used for counting\"\nmsgstr \"Suche nach Wörtern zum Zählen\"\n\nmsgid \"Search for expressions\"\nmsgstr \"Suche nach Ausdrücken\"\n\nmsgid \"Search for words used as interjections\"\nmsgstr \"Suche nach Wörtern, welche als Interjektionen genutzt werden\"\n\nmsgid \"Search for numeric words\"\nmsgstr \"Suche nach numerischen Wörtern\"\n\nmsgid \"Search for abbreviations\"\nmsgstr \"Suche nach Abkürzungen\"\n\nmsgid \"Search for words that don't fit in any category\"\nmsgstr \"Suche nach Wörtern, welche in keine andere Kategorie passen\"\n\nmsgid \"Search for words included in the specific JLPT level\"\nmsgstr \"Suche nach Wörtern aus dem jeweiligen JLPT Level\"\n\nmsgid \"Search in the [words] category\"\nmsgstr \"Suche in der [Wörter] Kategorie\"\n\nmsgid \"Search in the [sentences] category\"\nmsgstr \"Suche in der [Sätze] Kategorie\"\n\nmsgid \"Search in the [name] category\"\nmsgstr \"Suche in der [Namen] Kategorie\"\n\nmsgid \"Search in the [kanji] category\"\nmsgstr \"Suche in der [Kanji] Kategorie\"\n\nmsgid \"Available Hashtags for [Sentence] search\"\nmsgstr \"Verfügbare Hashtags für die [Sätze] Suche\"\n\nmsgid \"Search for sentences included in the specific JLPT level\"\nmsgstr \"Suche nach Sätzen aus dem jeweiligen JLPT Level\"\n\nmsgid \"Hide translations by default to translate them yourself and check if its correct\"\nmsgstr \"Zeige Übersetzungen eingeklappt an, damit du die Sätze selber übersetzen und gegenprüfen kannst\"\n\nmsgid \"Available Hashtags for [Kanji] search\"\nmsgstr \"Verfügbare Hashtags für die [Kanji] Suche\"\n\nmsgid \"Search for kanji included in the specific Genki chapter\"\nmsgstr \"Suche nach Kanji aus dem jeweiligen Genki Kapitel\"\n\nmsgid \"Radical search\"\nmsgstr \"Radical suche\"\n\nmsgid \"The radical picker allows searching for radicals to make the process of picking radicals even faster. The supported inputs are as following:\"\nmsgstr \"Der Radikalsucher ermöglicht eine Suche innerhalb der Radikalen um eine noch schnellere Kanji Inputmethode zur Verfügung zu stellen. Die unterstützten Input-typen sind wie folgt:\"\n\nmsgid \"Results in all radicals used to build given kanji characters\"\nmsgstr \"Gibt alle Radikale an, die benutzt werden um die gesuchten kanji zusammenezustellen\"\n\nmsgid \"Searches in words for the given query and returns in result-matching radicals\"\nmsgstr \"Führt eine Wortsuche durch und gibt alle Radikale an, die in den Ergebnissen benutzt werden\"\n\nmsgid \"Tries to find the given query in radicals names, otherwise does a word search and returns the result's kanji\"\nmsgstr \"Versucht Radikale bei ihren Namen zu finden. Gelingt dies nicht, wird eine Wortsuche durchgeführt\"\n\n## Name search\nmsgid \"Full name\"\nmsgstr \"Vollständer Name\"\n\nmsgid \"Sex\"\nmsgstr \"Geschlecht\"\n\nmsgid \"Name origin\"\nmsgstr \"Namensherkunft\"\n\n## Kanji results\n\nmsgid \"Part\"\nmsgid_plural \"Parts\"\nmsgstr[0] \"Bestandteil\"\nmsgstr[1] \"Bestandteile\"\n\n# strokes suffix\nmsgid \"{} stroke\"\nmsgid_plural \"{} strokes\"\nmsgstr[0] \"{} Strich\"\nmsgstr[1] \"{} Striche\"\n\nmsgid \"Decomposition\"\nmsgstr \"Zusammensetzung\"\n\nmsgid \"Radical\"\nmsgstr \"Radikal\"\n\nmsgid \"Kun\"\nmsgstr \"\"\n\nmsgid \"On\"\nmsgstr \"\"\n\nmsgid \"On reading compounds\"\nmsgstr \"On - Zusammensetzungen\"\n\nmsgid \"Kun reading compounds\"\nmsgstr \"Kun - Zusammensetzungen\"\n\nmsgid \"JLPT level\"\nmsgstr \"JLPT Level\"\n\nmsgid \"of 2500 most used kanji in newspapers\"\nmsgstr \"der 2500 meist genutzten Kanji in Zeitungen\"\n\nmsgid \"Similar Kanji\"\nmsgstr \"Ähnliche Kanji\"\n\nmsgid \"Chinese reading\"\nmsgstr \"Chinesische Lesungen\"\n\nmsgid \"Korean reading\"\nmsgstr \"Koreanische Lesungen\"\n\nmsgid \"Vietnamese reading\"\nmsgstr \"Vietnamesische Lesungen\"\n\nmsgid \"Japanese names\"\nmsgstr \"Japanische Namen\"\n\n## Word results\nmsgid \"Words and kanji\"\nmsgstr \"Wörter und Kanjis\"\n\nmsgid \"{} could be an inflection of {}, with this form:\"\nmsgid_plural \"{} könnte eine Flexion sein von {}, mit diesen Formen:\"\nmsgstr[0] \"{} könnte eine Flexion sein von {}, mit dieser Form:\"\nmsgstr[1] \"{} könnte eine Flexion sein von {}, mit diesen Formen:\"\n\nmsgid \"Temporarily switched language to {}\"\nmsgstr \"Vorübergehend zu {} gewechselt\"\n\nmsgctxt \"inflection\"\nmsgid \"Negative\"\nmsgstr \"Negativ\"\n\nmsgctxt \"inflection\"\nmsgid \"Polite\"\nmsgstr \"Höflichkeit\"\n\nmsgctxt \"inflection\"\nmsgid \"Present\"\nmsgstr \"Gegenwart\"\n\nmsgctxt \"inflection\"\nmsgid \"Past\"\nmsgstr \"Vergangenheit\"\n\nmsgctxt \"inflection\"\nmsgid \"TeForm\"\nmsgstr \"Te-Form\"\n\nmsgctxt \"inflection\"\nmsgid \"Potential\"\nmsgstr \"Möglichkeit\"\n\nmsgctxt \"inflection\"\nmsgid \"Passive\"\nmsgstr \"Passiv\"\n\nmsgctxt \"inflection\"\nmsgid \"Causative\"\nmsgstr \"Kausativ\"\n\nmsgctxt \"inflection\"\nmsgid \"PotentialOrPassive\"\nmsgstr \"Potential or Passiv\"\n\nmsgctxt \"inflection\"\nmsgid \"Imperative\"\nmsgstr \"Imperativ\"\n\nmsgctxt \"inflection\"\nmsgid \"Tai\"\nmsgstr \"たい (Gibt an etwas tun zu wollen)\"\n\nmsgctxt \"inflection\"\nmsgid \"TeIru\"\nmsgstr \"ている (Gibt eine andauernde Aktion an)\"\n\nmsgctxt \"inflection\"\nmsgid \"TeAru\"\nmsgstr \"てある (Bedeutet etwas wurde getan)\"\n\nmsgctxt \"inflection\"\nmsgid \"TeMiru\"\nmsgstr \"てみる (Bedeutet etwas zu \\\"versuchen\\\")\"\n\nmsgctxt \"inflection\"\nmsgid \"Tara\"\nmsgstr \"たら (Gibt eine Bedingung an)\"\n\nmsgid \"Taught in {} grade\"\nmsgstr \"Beigebracht in der {}. Klasse\"\n\nmsgid \", with this form:\"\nmsgid_plural \", with these forms:\"\nmsgstr[0] \", mit dieser Form:\"\nmsgstr[1] \", mit diesen Formen:\"\n\nmsgid \"Show Conjugations\"\nmsgstr \"Zeige Konjugationen\"\n\nmsgid \"Show collocation\"\nmsgid_plural \"Show collocations\"\nmsgstr[0] \"Zeige Kollokation\"\nmsgstr[1] \"Zeige Kollokationen\"\n\nmsgid \"Collocations\"\nmsgstr \"Kollokationen\"\n\nmsgid \"Conjugations\"\nmsgstr \"Konjugationen\"\n\nmsgid \"Antonym of {}\"\nmsgstr \"Antonym von {}\"\n\nmsgid \"See also {}\"\nmsgstr \"Siehe auch {}\"\n\nmsgid \"Pitch accent\"\nmsgstr \"Tonhöhenakzent\"\n\nmsgid \"Other forms\"\nmsgstr \"Andere Formen\"\n\nmsgid \"Affirmative\"\nmsgstr \"Positiv\"\n\nmsgid \"Negative\"\nmsgstr \"Negativ\"\n\nmsgid \"Present\"\nmsgstr \"Gegenwart\"\n\nmsgid \"Present, polite\"\nmsgstr \"Gegenwart, höflich\"\n\nmsgid \"Past\"\nmsgstr \"Vergangenheit\"\n\nmsgid \"Past, polite\"\nmsgstr \"Vergangenheit, höflich\"\n\nmsgid \"Te-form\"\nmsgstr \"Te-Form\"\n\nmsgid \"Potential\"\nmsgstr \"Potenzielle\"\n\nmsgid \"Passive\"\nmsgstr \"Passiv\"\n\nmsgid \"Causative\"\nmsgstr \"Kausativ\"\n\nmsgid \"Causative Passive\"\nmsgstr \"Kausativ-Passiv\"\n\nmsgid \"Imperative\"\nmsgstr \"Imperativ\"\n\nmsgid \"Play audio\"\nmsgstr \"Vorlesen\"\n\nmsgid \"common word\"\nmsgstr \"häufiges Wort\"\n\nmsgid \"JLPT N{}\"\nmsgstr \"\"\n\nmsgid \"Sentence search\"\nmsgstr \"Beispielsätze\"\n\nmsgid \"Download audio\"\nmsgstr \"Audio herunterladen\"\n\nmsgid \"Direct reference\"\nmsgstr \"Einzelnachweis\"\n\n# \"no words found\"\nmsgid \"words\"\nmsgstr \"Wörter\"\n\n# gairaigo\nmsgid \"From {}: {}\"\nmsgstr \"Vom {}: {}\"\n\n## Sentence search\n\nmsgid \"hide\"\nmsgstr \"verstecken\"\n\nmsgid \"show\"\nmsgstr \"zeigen\"\n\n# \"No sentences found\"\nmsgid \"sentences\"\nmsgstr \"Sätze\"\n\n## About page\n\n# Title 1\nmsgid \"About\"\nmsgstr \"Über Jotoba\"\n\nmsgid \"Jotoba is a multilingual Japanese dictionary. It is easy to find translations for words or kanji, see example sentences and the way names can be written.\"\nmsgstr \"Jotoba ist ein mehrsprachiges Japanisch Wörterbuch. Es ist einfach Übersetzungen für Wörter oder Kanji zu finden oder zu sehen, wie Beispielsätze und Namen geschrieben werden.\"\n\nmsgid \"Jotoba is open source. Check out our\"\nmsgstr \"Jotob ist Open Source. Besuche unsere\"\n\nmsgid \"Github page\"\nmsgstr \"Github Seite\"\n\nmsgid \"if you want to contribute or host Jotoba yourself.\"\nmsgstr \", wenn du Jotoba helfen willst zu wachsen oder auch selbst hosten möchtest.\"\n\nmsgid \"Trello Board\"\nmsgstr \"Trello Roadmap\"\n\nmsgid \"aswell if you are interested in upcoming features and what we are currently working on!\"\nmsgstr \", wenn du an zukünftigen Features oder unserer aktuellen Arbeit interessiert bist!\"\n\n# Title 2\nmsgid \"Data Sources and Inspiration\"\nmsgstr \"Quellen und Inspirationen\"\n\nmsgid \"Of course this project wouldn't have been possible without the help of some great data sources.\"\nmsgstr \"Jotoba wäre ohne die Hilfe von vielen großartigen und kostenlosen Projekten niemals möglich gewesen.\"\n\nmsgid \"Many thanks to every one of them for providing such a variety of data people can use to learn the japanese language.\"\nmsgstr \"Vielen Dank an jeden, der daran beteiligt war diese große Vielfalt an Daten für Japanisch Lernende zu erschaffen. \"\n\n# Source\nmsgid \"Jisho\"\nmsgstr \"\"\n\nmsgid \"Joto-kun\"\nmsgstr \"\"\n\nmsgid \"Joto-kun was created by a good friend of ours who is truly a wizard when it comes down to design!\"\nmsgstr \"Joto-kun wurde von einer guten Freundin erstellt, welche eine wahrere Zauberin ist, wenn es ums Designen geht!\"\n\nmsgid \"Jisho, created by Kim Ahlström, Miwa Ahlström and Andrew Plummer is a pretty and powerful english-japanese dictionary.\"\nmsgstr \"Jisho, welches von Kim Ahlström, Miwa Ahlström und Andrew Plummer erstellt wurde, ist ein großartiges Englisch-Japanisch Wörterbuch.\"\n\nmsgid \"We took inspiration in their work and design to improve on their concept and offer it to a wider variety of people.\"\nmsgstr \"Wir haben uns von ihrer Arbeit inspirieren lassen um ihr Konzept und Design weiterzuentwickeln und so vielen Menschen wie möglich zur Verfügung stellen zu können.\"\n\n# Source\nmsgid \"Words (except sound effects), Kanji and Names available on this site are publicly provided and maintained by\"\nmsgstr \"Wörter (außer Sound Effekten), Kanji und Namen auf unserer Seite sind öffentlich bereitgestellt und fallen unter die\"\n\nmsgid \"and available under the license\"\nmsgstr \"und verfügbar unter der Lizenz\"\n\nmsgid \"Additionally, the RADKFILE by Jin Breen is used to link Radicals to Kanji.\"\nmsgstr \"Außerdem wird die RADKFILE von JinBreen verwendet, welche Radikale und Kanji verlinkt.\"\n\n# Source\nmsgid \"Audio Files\"\nmsgstr \"Audio Dateien\"\n\nmsgid \"The audio files #1 were graciously made public by\"\nmsgstr \"Die Audio Dateien #1 wurden netterweise von\"\n\nmsgid \"WaniKani\"\nmsgstr \"WaniKani\"\n\nmsgid \"and\"\nmsgstr \"und\"\n\nmsgid \"Tofugo\"\nmsgstr \"Tofugo\"\n\nmsgid \"and uploaded to Github under the CC-BY-4.0 licence.\"\nmsgstr \"öffentlich gemacht und auf Github unter der CC-BY-4.0 Lizenz veröffentlicht.\"\n\nmsgid \"The audio files #2 are provided by the\"\nmsgstr \"Die Audio dateien #2 wurden vom\"\n\nmsgid \"Kanji alive project\"\nmsgstr \"Kanji alive Projekt\"\n\nmsgid \"and are also available under the CC-BY-4.0 license.\"\nmsgstr \"veröffentlicht und stehen auch unter der CC-BY-4.0 Lizenz.\"\n\nmsgid \"Manga Sound Effects\"\nmsgstr \"Manga Sound Effekte\"\n\nmsgid \"The data about Sound Effects is graciously provided by Chris Kincaid and is used as additional data in the word search.\"\nmsgstr \"Die Daten der Sound Effekte wurden uns netterweise von Chris Kincaid zur Verfügung gestellt und können über die Wörtersuche gefunden werden.\"\n\n# Source\nmsgid \"Sentences are provided by Tatoeba under the Creative Commons CC 1.0 and 2.0 licences. \"\nmsgstr \"Sätze sind von Tatoeba unter den Creative Commons CC 1.0 und 2.0 Lizenzen bereitgestellt.\"\n\n# Source\nmsgid \"Kanji Animations\"\nmsgstr \"Kanji Animationen\"\n\nmsgid \"The raw data used for kanji animations is publicly provided by KanjiVG, a project by Ulrich Apel.\"\nmsgstr \"Die Rohdaten der Kanji Animationen wurden von Ulrich Apel und seinem Projekt 'KanjiVG' bereitgestellt.\"\n\nmsgid \"The conversion into images and animated SVG is done by a ruby script which was made by\"\nmsgstr \"Die Konvertierung der Bilder und animierten SVG wurde durch ein Ruby Skript von\"\n\nmsgid \"Kimtaro\"\nmsgstr \"\"\n\nmsgid \"and altered by\"\nmsgstr \"erschaffen, welches von\"\n\nmsgid \"Yukáru\"\nmsgstr \"Yukáru speziell für Jotoba angepasst wurde.\"\n\n# Source\nmsgid \"JLPT Data\"\nmsgstr \"JLPT Daten\"\n\nmsgid \"Data about JLPT proficiencies are by provided by Jonathan Waller.\"\nmsgstr \"Die Informationen über die JLPT Fähigkeitslevel sind von Jonathan Waller zur Verfügung gestellt worden.\"\n\nmsgid \"There is also some non-free data available on his website, so check it out if you are interested.\"\nmsgstr \"Auf seiner Webseite gibt es noch weitere, nicht kostenlose Daten, falls du dich für seine Arbeit interessiert.\"\n\n# Source\nmsgid \"Word tokenization\"\nmsgstr \"Wort-Tokenisierung\"\n\nmsgid \"Word tokenization is done using UniDic, by the UniDic Consortium and used for Japanese morphological analysis implementations.\"\nmsgstr \"Word tokenization is done using UniDic, by the UniDic Consortium and used for Japanese morphological analysis implementations.\"\n\n#Source\nmsgid \"Pitch accents\"\nmsgstr \"Pitch-Akzente\"\n\nmsgid \"Data about Radicals used in specific Kanji are provided by Kanjium.\"\nmsgstr \"Diese Daten wurden vom Kanjium Projekt zur Verfügung gestellt.\"\n\nmsgid \"On the project's Github Page you can find lots of data about Kanji.\"\nmsgstr \"Auf ihrer Github Seite findet man viele verschiedene Infos über Kanji.\"\n\nmsgid \"Pitch accent data has been extracted from UniDic.\"\nmsgstr \"Pitch-Akzent Daten wurden aus dem UniDic extrahiert.\"\n\n## Jmdict\n\n# Dialect(s)\nmsgid \"{} dialect\"\nmsgstr \"{} Dialekt\"\n\n# Information\nmsgid \"ateji\"\nmsgstr \"\"\n\nmsgid \"irregular kana\"\nmsgstr \"Unregelmäßiges Kana\"\n\nmsgid \"irregular kanji\"\nmsgstr \"Unregelmäßiges Kanji\"\n\nmsgid \"irregular okurigana\"\nmsgstr \"Unregelmäßiges Okurigana\"\n\nmsgid \"outdated kana\"\nmsgstr \"veraltetes Kana\"\n\nmsgid \"outdated kanji\"\nmsgstr \"veraltetes Kanji\"\n\nmsgid \"gikun\"\nmsgstr \"\"\n\nmsgid \"usually written in kana\"\nmsgstr \"Normalerweise geschrieben in Kana\"\n\nmsgid \"rarely used kanji form\"\nmsgstr \"Selten verwendete Kanjiform\"\n\n# Misc\n\nmsgid \"Abbreviation\"\nmsgstr \"Abkürzung\"\n\nmsgid \"Archaism\"\nmsgstr \"Archaismus\"\n\nmsgid \"Character\"\nmsgstr \"Charakter\"\n\nmsgid \"Childrens language\"\nmsgstr \"Kindersprache\"\n\nmsgid \"Colloquialism\"\nmsgstr \"Umgangssprache\"\n\nmsgid \"Company name\"\nmsgstr \"Firmenname\"\n\nmsgid \"Creature\"\nmsgstr \"Kreatur\"\n\nmsgid \"Dated term\"\nmsgstr \"Datierter Begriff\"\n\nmsgid \"Deity\"\nmsgstr \"Gottheit\"\n\nmsgid \"Derogatory\"\nmsgstr \"Abwertend\"\n\nmsgid \"Document\"\nmsgstr \"Dokument\"\n\nmsgid \"Event\"\nmsgstr \"Event\"\n\nmsgid \"Familiar language\"\nmsgstr \"Vertraute Sprache\"\n\nmsgid \"Female term/language\"\nmsgstr \"Weblicher Begriff/Sprache\"\n\nmsgid \"Fiction\"\nmsgstr \"Fiktion\"\n\nmsgid \"Given name\"\nmsgstr \"Vorname\"\n\nmsgid \"Group\"\nmsgstr \"Gruppe\"\n\nmsgid \"Historical term\"\nmsgstr \"Historischer Begriff\"\n\nmsgid \"Honorific language\"\nmsgstr \"Höfliche Sprache\"\n\nmsgid \"Humble language\"\nmsgstr \"Bescheidene Sprache\"\n\nmsgid \"Idiomatic expression\"\nmsgstr \"Idiomatischer Ausdruck\"\n\nmsgid \"Jocular humorous term\"\nmsgstr \"Scherzhafter humoristischer Begriff\"\n\nmsgid \"Legend\"\nmsgstr \"Legende\"\n\nmsgid \"Literary/formal term\"\nmsgstr \"Literarischer/formeller Begriff\"\n\nmsgid \"Manga slang\"\nmsgstr \"Manga-Slang\"\n\nmsgid \"Male term/language\"\nmsgstr \"Männlicher Begriff/Sprache\"\n\nmsgid \"Mythology\"\nmsgstr \"Mythologie\"\n\nmsgid \"Internet slang\"\nmsgstr \"Internet-Slang\"\n\nmsgid \"Object\"\nmsgstr \"Objekt\"\n\nmsgid \"Obsolete term\"\nmsgstr \"Veralteter Begriff\"\n\nmsgid \"Obscure term\"\nmsgstr \"Unbekannter Begriff\"\n\nmsgid \"Onomatopoetic or mimetic word\"\nmsgstr \"Onomatopoetisches oder mimetisches Wort\"\n\nmsgid \"Organization name\"\nmsgstr \"Organisationsname\"\n\nmsgid \"Other\"\nmsgstr \"Andere\"\n\nmsgid \"Person name\"\nmsgstr \"Personenname\"\n\nmsgid \"Place name\"\nmsgstr \"Ortsname\"\n\nmsgid \"Poetical term\"\nmsgstr \"Poetischer Begriff\"\n\nmsgid \"Polite language\"\nmsgstr \"Höfliche Sprache\"\n\nmsgid \"Product name\"\nmsgstr \"Produktname\"\n\nmsgid \"Proverb\"\nmsgstr \"Sprichwort\"\n\nmsgid \"Qutation\"\nmsgstr \"Zitat\"\n\nmsgid \"Rare\"\nmsgstr \"Selten\"\n\nmsgid \"Religion\"\nmsgstr \"\"\n\nmsgid \"Sensitive\"\nmsgstr \"Sensibel\"\n\nmsgid \"Service\"\nmsgstr \"\"\n\nmsgid \"Slang\"\nmsgstr \"\"\n\nmsgid \"Railway station\"\nmsgstr \"Bahnhof\"\n\nmsgid \"Family or surname\"\nmsgstr \"Familien- oder Nachname\"\n\nmsgid \"Usually written in kana\"\nmsgstr \"Üblicherweise in Kana geschrieben\"\n\nmsgid \"Unclassified name\"\nmsgstr \"Nicht klassifizierter Name\"\n\nmsgid \"Vulgar expression/word\"\nmsgstr \"Vulgärer Ausdruck/Wort\"\n\nmsgid \"Artwork\"\nmsgstr \"Kunstwerk\"\n\nmsgid \"Rude/x-rated term\"\nmsgstr \"Nicht Jugendfreier Begriff\"\n\nmsgid \"Yojijukugo\"\nmsgstr \"\"\n\n# Fields\n\nmsgid \"{} term\"\nmsgstr \"{} Begriff\"\n\n# The following words will be inserted in the brackets above. Check the Syntax.\n\nmsgid \"Agriculture\"\nmsgstr \"Landwirtschaft\"\n\nmsgid \"Anatomy\"\nmsgstr \"Anatomie\"\n\nmsgid \"Archeology\"\nmsgstr \"Archäologie\"\n\nmsgid \"Architecture\"\nmsgstr \"Architektur\"\n\nmsgid \"Art aesthetics\"\nmsgstr \"Kunstästhetik\"\n\nmsgid \"Astronomy\"\nmsgstr \"Astronomie\"\n\nmsgid \"Audio/visual\"\nmsgstr \"Audio/visuell\"\n\nmsgid \"Aviation\"\nmsgstr \"Luftfahrt\"\n\nmsgid \"Baseball\"\nmsgstr \"\"\n\nmsgid \"Biochemistry\"\nmsgstr \"Biochemie\"\n\nmsgid \"Biology\"\nmsgstr \"Biologie\"\n\nmsgid \"Botany\"\nmsgstr \"Botanik\"\n\nmsgid \"Buddhism\"\nmsgstr \"Buddhismus\"\n\nmsgid \"Business\"\nmsgstr \"\"\n\nmsgid \"Chemistry\"\nmsgstr \"Chemie\"\n\nmsgid \"Christianity\"\nmsgstr \"Christentum\"\n\nmsgid \"Computing\"\nmsgstr \"Computer\"\n\nmsgid \"Clothing\"\nmsgstr \"Kleidung\"\n\nmsgid \"Crystallography\"\nmsgstr \"Kristallographie\"\n\nmsgid \"Ecology\"\nmsgstr \"Ökologie\"\n\nmsgid \"Economics\"\nmsgstr \"Wirtschaft\"\n\nmsgid \"Electricity\"\nmsgstr \"Elektrizität\"\n\nmsgid \"Electronics\"\nmsgstr \"Elektronik\"\n\nmsgid \"Embryology\"\nmsgstr \"Embryologie\"\n\nmsgid \"Engineering\"\nmsgstr \"Ingenieurwesen\"\n\nmsgid \"Entomology\"\nmsgstr \"Entomologie\"\n\nmsgid \"Finance\"\nmsgstr \"Finanzen\"\n\nmsgid \"Fishing\"\nmsgstr \"Fischen\"\n\nmsgid \"FoodCooking\"\nmsgstr \"Essen kochen\"\n\nmsgid \"Gardening\"\nmsgstr \"Gärtnerei\"\n\nmsgid \"Genetics\"\nmsgstr \"Genetik\"\n\nmsgid \"Geography\"\nmsgstr \"Geografie\"\n\nmsgid \"Geology\"\nmsgstr \"Geologie\"\n\nmsgid \"Geometry\"\nmsgstr \"Geometrie\"\n\nmsgid \"Go (game)\"\nmsgstr \"\"\n\nmsgid \"Golf\"\nmsgstr \"\"\n\nmsgid \"Grammar\"\nmsgstr \"Grammatik\"\n\nmsgid \"Greek mythology\"\nmsgstr \"Griechische Mythologie\"\n\nmsgid \"Hanafuda\"\nmsgstr \"\"\n\nmsgid \"Horseracing\"\nmsgstr \"Pferderennen\"\n\nmsgid \"Law\"\nmsgstr \"Recht\"\n\nmsgid \"Linguistics\"\nmsgstr \"Linguistik\"\n\nmsgid \"Logic\"\nmsgstr \"Logik\"\n\nmsgid \"Martial arts\"\nmsgstr \"\"\n\nmsgid \"Mahjong\"\nmsgstr \"\"\n\nmsgid \"Mathematics\"\nmsgstr \"Mathemaik\"\n\nmsgid \"MechanicalEngineering\"\nmsgstr \"Maschinenbau\"\n\nmsgid \"Medicine\"\nmsgstr \"Medizin\"\n\nmsgid \"Climate/weather\"\nmsgstr \"Klima/Wetter\"\n\nmsgid \"Military\"\nmsgstr \"Militär\"\n\nmsgid \"Music\"\nmsgstr \"Musik\"\n\nmsgid \"Ornithology\"\nmsgstr \"Vogelkunde\"\n\nmsgid \"Paleontology\"\nmsgstr \"Paläontologie\"\n\nmsgid \"Pathology\"\nmsgstr \"Pathologie\"\n\nmsgid \"Pharmacy\"\nmsgstr \"Pharmazie\"\n\nmsgid \"Philosophy\"\nmsgstr \"Philosophie\"\n\nmsgid \"Photography\"\nmsgstr \"Fotografie\"\n\nmsgid \"Physics\"\nmsgstr \"Physik\"\n\nmsgid \"Physiology\"\nmsgstr \"Physiologie\"\n\nmsgid \"Printing\"\nmsgstr \"\"\n\nmsgid \"Psychology\"\nmsgstr \"Psychologie\"\n\nmsgid \"Psychiatry\"\nmsgstr \"Psychatrie\"\n\nmsgid \"Railway\"\nmsgstr \"Eisenbahn\"\n\n\nmsgid \"Shinto\"\nmsgstr \"\"\n\nmsgid \"Shogi\"\nmsgstr \"\"\n\nmsgid \"Sports\"\nmsgstr \"Sport\"\n\nmsgid \"Statistics\"\nmsgstr \"Statistiken\"\n\nmsgid \"Sumo\"\nmsgstr \"\"\n\nmsgid \"Telecommunications\"\nmsgstr \"Telekommunikation\"\n\nmsgid \"Trademark\"\nmsgstr \"\"\n\nmsgid \"Videogame\"\nmsgstr \"Videospiel\"\n\nmsgid \"Zoology\"\nmsgstr \"Zoologie\"\n\n# Part of speech\nmsgid \"Godan verb\"\nmsgstr \"Godan Verb\"\n\nmsgid \"Irregular verb with {} ending\"\nmsgstr \"Unregelmäßiges Wort mit {} Endung\"\n\nmsgid \"SoundFx\"\nmsgstr \"\"\n\nmsgid \"Expression\"\nmsgstr \"Ausdruck\"\n\nmsgid \"Counter\"\nmsgstr \"Zählwort\"\n\nmsgid \"Suffix\"\nmsgstr \"\"\n\nmsgid \"Prefix\"\nmsgstr \"Präfix\"\n\nmsgid \"Particle\"\nmsgstr \"Partikel\"\n\nmsgid \"Interjection\"\nmsgstr \"Ausruf\"\n\nmsgid \"Symbol\"\nmsgstr \"\"\n\nmsgid \"Pronoun\"\nmsgstr \"Pronomen\"\n\nmsgid \"Auxilary\"\nmsgstr \"\"\n\nmsgid \"Numeric\"\nmsgstr \"Nummer\"\n\nmsgid \"Adverb-To\"\nmsgstr \"To-Adverb\"\n\nmsgid \"Adverb\"\nmsgstr \"Adverb\"\n\nmsgid \"Adjective\"\nmsgstr \"Adjektiv\"\n\nmsgid \"Auxilary adjective\"\nmsgstr \"Hilfsadjektiv\"\n\nmsgid \"Auxilary Verb\"\nmsgstr \"Hilfsverb\"\n\nmsgid \"Verb\"\nmsgstr \"Verb\"\n\nmsgid \"Conjugation\"\nmsgstr \"Konjugation\"\n\nmsgid \"Unclassified\"\nmsgstr \"nicht klassifiziert\"\n\nmsgid \"Noun or verb describing a noun\"\nmsgstr \"Nomen beschreibendes Nomen oder Verb\"\n\nmsgid \"I adjective\"\nmsgstr \"I Adjektiv\"\n\nmsgid \"I adjective (conjugated like いい)\"\nmsgstr \"I Adjektiv (konjugiert wie いい)\"\n\nmsgid \"Ku adjective\"\nmsgstr \"Ku Adjektiv\"\n\nmsgid \"Na adjective\"\nmsgstr \"Na Adjektiv\"\n\nmsgid \"Formal form of na adjective\"\nmsgstr \"Formelle Form eines na-Adjektives\"\n\nmsgid \"No adjective\"\nmsgstr \"No-Adjektiv\"\n\nmsgid \"Pre noun adjective\"\nmsgstr \"Pre-Nomen Adjektiv\"\n\nmsgid \"Shiku adjective\"\nmsgstr \"Shiku Adjektiv\"\n\nmsgid \"Taru adjective\"\nmsgstr \"Taru Adjektiv\"\n\nmsgid \"Noun\"\nmsgstr \"Nomen\"\n\nmsgid \"Noun adverbial\"\nmsgstr \"adverbiales Nomen\"\n\nmsgid \"Prefix (noun)\"\nmsgstr \"Präfix (Nomen)\"\n\nmsgid \"Suffix (noun)\"\nmsgstr \"Suffix (Nomen)\"\n\nmsgid \"Temporal noun\"\nmsgstr \"zeitliches Nomen\"\n\nmsgid \"Unspecified verb\"\nmsgstr \"Unspezifiziertes Verb\"\n\nmsgid \"Intransitive verb\"\nmsgstr \"Intransitives Verb\"\n\nmsgid \"Transitive verb\"\nmsgstr \"Transitives Verb\"\n\nmsgid \"Ichidan verb\"\nmsgstr \"Ichidan Verb\"\n\nmsgid \"Ichidan zuru verb\"\nmsgstr \"Ichidan zuru Verb\"\n\nmsgid \"Ichidan kureru verb\"\nmsgstr \"Ichidan kureru Verb\"\n\nmsgid \"Kuru verb\"\nmsgstr \"Kuru Verb\"\n\nmsgid \"Noun taking suru\"\nmsgstr \"\"\n\nmsgid \"Suru verb\"\nmsgstr \"Suru Verb\"\n\nmsgid \"Suru special\"\nmsgstr \"Suru verb, Spezialfall\"\n\nmsgid \"Pre-noun\"\nmsgstr \"\"\n\n# this thingy -> \" \"\nmsgid \"Space\"\nmsgstr \"Leerzeichen\"\n\n## Name search\nmsgctxt \"name_type\"\nmsgid \"Company\"\nmsgstr \"Firma\"\n\nmsgctxt \"name_type\"\nmsgid \"Female\"\nmsgstr \"Weiblich\"\n\nmsgctxt \"name_type\"\nmsgid \"Male\"\nmsgstr \"Männlich\"\n\nmsgctxt \"name_type\"\nmsgid \"Organization\"\nmsgstr \"Organisation\"\n\nmsgctxt \"name_type\"\nmsgid \"Persons name\"\nmsgstr \"Personenname\"\n\nmsgctxt \"name_type\"\nmsgid \"Place\"\nmsgstr \"Platz\"\n\nmsgctxt \"name_type\"\nmsgid \"Product\"\nmsgstr \"Produkt\"\n\nmsgctxt \"name_type\"\nmsgid \"(Railway)Station\"\nmsgstr \"Haltestelle\"\n\nmsgctxt \"name_type\"\nmsgid \"Surname\"\nmsgstr \"Familienname\"\n\nmsgctxt \"name_type\"\nmsgid \"Unknown\"\nmsgstr \"unbekannt\"\n\nmsgctxt \"name_type\"\nmsgid \"Art work\"\nmsgstr \"Kunstwerk\"\n\nmsgctxt \"name_type\"\nmsgid \"Character\"\nmsgstr \"Charakter\"\n\nmsgctxt \"name_type\"\nmsgid \"Deity\"\nmsgstr \"Göttliches Wesen\"\n\nmsgctxt \"name_type\"\nmsgid \"Document\"\nmsgstr \"Dokument\"\n\nmsgctxt \"name_type\"\nmsgid \"Event\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Fiction\"\nmsgstr \"Fiktion\"\n\nmsgctxt \"name_type\"\nmsgid \"Group\"\nmsgstr \"Gruppe\"\n\nmsgctxt \"name_type\"\nmsgid \"Legend\"\nmsgstr \"Legende\"\n\nmsgctxt \"name_type\"\nmsgid \"Mythology\"\nmsgstr \"Mythologie\"\n\nmsgctxt \"name_type\"\nmsgid \"Object\"\nmsgstr \"Objekt\"\n\nmsgctxt \"name_type\"\nmsgid \"Other\"\nmsgstr \"Andere\"\n\nmsgctxt \"name_type\"\nmsgid \"Religion\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Service\"\nmsgstr \"\"\n\n\n# \"No names found\"\nmsgid \"names\"\nmsgstr \"Namen\"\n\n## Search help\nmsgid \"Search Help\"\nmsgstr \"Suchhilfe\"\n\nmsgid \"No {} found\"\nmsgstr \"Keine {} gefunden\"\n\nmsgid \"Your default search language might not fit your input\"\nmsgstr \"Deine eingestellte Sprache und die in der gesuchten Sprache stimmen nicht überein\"\n\nmsgid \"Check your search for typos\"\nmsgstr \"Überprüfe deine Suche nach Tippfehlern\"\n\nmsgid \"Use more generic search terms\"\nmsgstr \"Benutze allgemeinere Begriffe\"\n\nmsgid \"Try finding your search in a different category using\"\nmsgstr \"Versuche in einer anderen Kategorie zu suchen, mithilfe \"\n\nmsgid \"Your search request might not be included in our database yet\"\nmsgstr \"Deine Suchanfrage ist (noch) nicht in unserer Datenbank\"\n\nmsgid \"If you think your search should be contained in our database, submit an issue on\"\nmsgstr \"Wenn du der Meinung bist, diese Suchanfrage sei in unserer Datenbank, erstelle gerne ein Issue auf\"\n\n# Also check our Trello board since we might be working on it\nmsgid \"Also check our\"\nmsgstr \"Außerdem besuche unser\"\n\nmsgid \"since we might be working on it!\"\nmsgstr \", denn es kann sein, dass wir bereits daran arbeiten!\"\n\n \n# Paginator\nmsgid \"First\"\nmsgstr \"Erste\"\n \nmsgid \"Last\"\nmsgstr \"Letzte\"\n"
  },
  {
    "path": "locales/en.po",
    "content": "# SINGULAR\n# msgctxt \"\"\n# msgid \"\"\n# msgstr \"\"\n\n# PLURAL\n# msgctxt \"\"\n# msgid \"\"\n# msgid_plural \"\"\n# msgstr[0] \"\"\n# OPTIONAL: msgstr[1] \"\" \n\nmsgid \"\"\nmsgstr \"\"\n\"Project-Id-Version: PACKAGE VERSION\\n\"\n\"PO-Revision-Date: 2021-12-12 16:30+0100\\n\"\n\"Last-Translator:  <jojii@gmx.net>\\n\"\n\"Language-Team: English\\n\"\n\"Language: en\\n\"\n\"MIME-Version: 1.0\\n\"\n\"Content-Type: text/plain; charset=UTF-8\\n\"\n\"Content-Transfer-Encoding: 8bit\\n\"\n\"Plural-Forms: nplurals=2; plural=(n != 1);\\n\"\n\nmsgid \"Jotoba\"\nmsgstr \"\"\n\n#### Base template\nmsgid \"Search...\"\nmsgstr \"\"\n\nmsgid \"Search\"\nmsgstr \"\"\n\nmsgid \"Settings\"\nmsgstr \"\"\n\nmsgid \"Radicals\"\nmsgstr \"\"\n\nmsgid \"Voice\"\nmsgstr \"\"\n\n# Rad Help Msg\nmsgid \"This tool allows you to find Kanji by their core components (Radicals)\"\nmsgstr \"\"\n\nmsgid \"You can select Radicals below and add found Kanji to the search bar\"\nmsgstr \"\"\n\nmsgid \"Enter\"\nmsgstr \"\"\n\nmsgid \"to start searching\"\nmsgstr \"\"\n\n# Rad Btns\n\nmsgid \"Reset\"\nmsgstr \"\"\n\nmsgid \"reset\"\nmsgstr \"\"\n\nmsgid \"apply\"\nmsgstr \"\"\n\nmsgid \"create\"\nmsgstr \"\"\n\nmsgid \"here\"\nmsgstr \"\"\n\nmsgid \"Accept\"\nmsgstr \"\"\n\nmsgid \"Decline\"\nmsgstr \"\"\n\n# Search type / Dropdown\nmsgid \"Words\"\nmsgstr \"\"\n\nmsgid \"Kanji\"\nmsgstr \"\"\n\nmsgid \"Sentences\"\nmsgstr \"\"\n\nmsgid \"Names\"\nmsgstr \"\"\n\n# Speech overlay\n\nmsgid \"Current language\"\nmsgstr \"\"\n\nmsgid \"Currently listening\"\nmsgstr \"\"\n\nmsgid \"No\"\nmsgstr \"\"\n\nmsgid \"To change your language, select one of the following\"\nmsgstr \"\"\n\n# Image search overlay\n\nmsgid \"Enter a URL or upload your image directly and Jotoba will try to search for japanese words contained in the picture.\"\nmsgstr \"\"\n\nmsgid \"Enter Image URL...\"\nmsgstr \"\"\n\n# Rad Picker overlay\n\nmsgid \"Select Radicals\"\nmsgstr \"\"\n\nmsgid \"Select Kanji\"\nmsgstr \"\"\n\nmsgid \"Search Radicals...\"\nmsgstr \"\"\n\n# Notification overlay\nmsgid \"Notifications\"\nmsgstr \"\"\n\nmsgid \"No new notifications\"\nmsgstr \"\"\n\nmsgid \"Show all\"\nmsgstr \"\"\n\nmsgid \"Close\"\nmsgstr \"\"\n\n# Languages\n\nmsgid \"English\"\nmsgstr \"\"\n\nmsgid \"German\"\nmsgstr \"\"\n\nmsgid \"Russian\"\nmsgstr \"\"\n\nmsgid \"Spanish\"\nmsgstr \"\"\n\nmsgid \"Swedish\"\nmsgstr \"\"\n\nmsgid \"French\"\nmsgstr \"\"\n\nmsgid \"Dutch\"\nmsgstr \"\"\n\nmsgid \"Hungarian\"\nmsgstr \"\"\n\nmsgid \"Slovenian\"\nmsgstr \"\"\n\nmsgid \"Japanese\"\nmsgstr \"\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"German\"\nmsgstr \"\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"English\"\nmsgstr \"\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Georgian\"\nmsgstr \"Georgian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Chinese\"\nmsgstr \"Chinese\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Manchu\"\nmsgstr \"Manchu\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Kurdish\"\nmsgstr \"Kurdish\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"ChinookJargon\"\nmsgstr \"Chinook jargon\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Italian\"\nmsgstr \"Italian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Malayalam\"\nmsgstr \"Malayalam\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Tibetian\"\nmsgstr \"Tibetian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Mongolian\"\nmsgstr \"Mongolian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Romanian\"\nmsgstr \"Romanian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Bantu\"\nmsgstr \"Bantu\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Norwegian\"\nmsgstr \"Norwegian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Icelandic\"\nmsgstr \"Icelandic\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Breton\"\nmsgstr \"Breton\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Maori\"\nmsgstr \"Maori\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Latin\"\nmsgstr \"Latin\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Amharic\"\nmsgstr \"Amharic\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Khmer\"\nmsgstr \"Khmer\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Swahili\"\nmsgstr \"Swahili\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Hebrew\"\nmsgstr \"Hebrew\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Galician\"\nmsgstr \"Galician\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Korean\"\nmsgstr \"Korean\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Tamil\"\nmsgstr \"Tamil\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Viatnamese\"\nmsgstr \"Viatnamese\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Polish\"\nmsgstr \"Polish\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Sanskrit\"\nmsgstr \"Sanskrit\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Persian\"\nmsgstr \"Persian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Filipino\"\nmsgstr \"Filipino\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Moldavian\"\nmsgstr \"Moldavian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Croatian\"\nmsgstr \"Croatian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Thai\"\nmsgstr \"Thai\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Burmese\"\nmsgstr \"Burmese\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Slovak\"\nmsgstr \"Slovak\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Czech\"\nmsgstr \"Czech\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Hindi\"\nmsgstr \"Hindi\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Mapudungun\"\nmsgstr \"Mapudungun\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Turkish\"\nmsgstr \"Turkish\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Hawaiian\"\nmsgstr \"Hawaiian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Afrikaans\"\nmsgstr \"Afrikaans\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Esperanto\"\nmsgstr \"Esperanto\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Yiddish\"\nmsgstr \"Yiddish\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Somali\"\nmsgstr \"Somali\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Tahitian\"\nmsgstr \"Tahitian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Urdu\"\nmsgstr \"Urdu\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Indonesian\"\nmsgstr \"Indonesian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Estonian\"\nmsgstr \"Estonian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Bulgarian\"\nmsgstr \"Bulgarian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Arabic\"\nmsgstr \"Arabic\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Danish\"\nmsgstr \"Danish\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Portuguese\"\nmsgstr \"Portuguese\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Greek\"\nmsgstr \"Greek\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Finnish\"\nmsgstr \"Finnish\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Ainu\"\nmsgstr \"Ainu\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Algonquian\"\nmsgstr \"Algonquian\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"French\"\nmsgstr \"French\"\n\n# Settings\nmsgid \"Language\"\nmsgstr \"\"\n\nmsgid \"Display\"\nmsgstr \"\"\n\nmsgid \"General\"\nmsgstr \"\"\n\nmsgid \"Default search language\"\nmsgstr \"\"\n\nmsgid \"Page language\"\nmsgstr \"\"\n\nmsgid \"Always show english results\"\nmsgstr \"\"\n\nmsgid \"Show english results on top\"\nmsgstr \"\"\n\nmsgid \"Focus search bar on load\"\nmsgstr \"\"\n\nmsgid \"Select input on load\"\nmsgstr \"\"\n\nmsgid \"Results per page\"\nmsgstr \"\"\n\nmsgid \"Number...\"\nmsgstr \"\"\n\nmsgid \"Input has to be in range of 1 and 100!\"\nmsgstr \"\"\n\nmsgid \"max amount of names/words/sentences shown per page\"\nmsgstr \"\"\n\nmsgid \"Show Furigana\"\nmsgstr \"\"\n\nmsgid \"Show example sentences\"\nmsgstr \"\"\n\nmsgid \"Items per page\"\nmsgstr \"\"\n\nmsgid \"max amount of kanji shown per page\"\nmsgstr \"\"\n\nmsgid \"Use dark mode\"\nmsgstr \"\"\n\nmsgid \"Show kanji on load\"\nmsgstr \"\"\n\nmsgid \"Show kanji numbers\"\nmsgstr \"\"\n\nmsgid \"Default kanji animation speed\"\nmsgstr \"\"\n\nmsgid \"Enable Quick-Copy\"\nmsgstr \"\"\n\nmsgid \"Share usage statistics\"\nmsgstr \"\"\n\nmsgid \"STATISTICS_EXPLANATION\"\nmsgstr \"To help improve Jotoba, we collect certain anonymous data (in accordance with the GDPR) by default - but if you wish, we will stop collecting anything from you.\"\n\n# Cookie text\nmsgid \"To use this feature you have to accept to the use of cookies.\"\nmsgstr \"\"\n\nmsgid \"Your data will only be used for your personal website settings.\"\nmsgstr \"\"\n\n# Prefix of cookie revoke text\nmsgid \"Click\"\nmsgstr \"\"\n\n# Suffix of Cookie revoke text\nmsgid \"to revoke your Cookies agreement\"\nmsgstr \"\"\n\n# Suffix of Cookie agree text\nmsgid \"to enable Cookies\"\nmsgstr \"\"\n\n# Footer\nmsgid \"Jotoba wouldn't be able to exist without the help of many open-source data sources\"\nmsgstr \"\"\n\nmsgid \"About Page\"\nmsgstr \"\"\n\n# Cookie Footer\nmsgid \"We use cookies to improve your experience and deliver personalized content.\"\nmsgstr \"\"\n\nmsgid \"By using Jotoba you agree to our\"\nmsgstr \"\"\n\n#  - End of the above sentence -\nmsgid \".\"  \nmsgstr \"\"\n\nmsgid \"privacy policy\"\nmsgstr \"\"\n\nmsgid \"Only use necessary\"\nmsgstr \"\"\n\nmsgid \"Allow Cookies\"\nmsgstr \"\"\n\n### About Page\n\nmsgid \"is a multilingual Japanese dictionary\"\nmsgstr \"\"\n\nmsgid \"It is easy to find translations for words or kanji, see example sentences and the way names can be written.\"\nmsgstr \"\"\n\nmsgid \"Here are some examples on how to use this page\"\nmsgstr \"\"\n\nmsgid \"Quickly change the search type by pressing\"\nmsgstr \"\"\n\nmsgid \"You can specify your search by typing\"\nmsgstr \"\"\n\nmsgid \"You can find verbs that are conjugated\"\nmsgstr \"\"\n\nmsgid \"You can search multiple kanji at once\"\nmsgstr \"\"\n\nmsgid \"is open source\"\nmsgstr \"\"\n\nmsgid \"Check out our\"\nmsgstr \"\"\n\nmsgid \"Check out the\"\nmsgstr \"\"\n\nmsgid \"aswell if you are interested in upcoming features and what we are currently working on\"\nmsgstr \"\"\n\nmsgid \"for a list of all contributors in this project.\"\nmsgstr \"\"\n\nmsgctxt \"index\"\nmsgid \"or\"\nmsgstr \"\"\n\nmsgctxt \"index\"\nmsgid \"Press\"\nmsgstr \"\"\n\nmsgid \"to instantly focus the search bar\"\nmsgstr \"\"\n\n### Info / Help Page\n\nmsgid \"Shortcuts\"\nmsgstr \"\"\n\nmsgid \"To improve the quality of life on Jotoba, we offer some shortcuts to quickly navigate the page:\"\nmsgstr \"\"\n\nmsgid \"Everywhere\"\nmsgstr \"\"\n\nmsgid \"Quickly change between words | sentences | names | kanji tabs\"\nmsgstr \"\"\n\nmsgid \"Focus the search bar\"\nmsgstr \"\"\n\nmsgid \"Focussed search bar\"\nmsgstr \"\"\n\nmsgid \"Iterate suggestions up | down\"\nmsgstr \"\"\n\nmsgid \"Iterate suggestions down\"\nmsgstr \"\"\n\nmsgid \"[Words] search\"\nmsgstr \"\"\n\nmsgid \"Play the first possible audio\"\nmsgstr \"\"\n\nmsgid \"[Kanji] search\"\nmsgstr \"\"\n\nmsgid \"Show / Collapse compounds\"\nmsgstr \"\"\n\nmsgid \"To specify what kind of results your search should offer, you can use shortcuts.\"\nmsgstr \"\"\n\nmsgid \"Hashtags should be written at end end of your input like this:\"\nmsgstr \"\"\n\nmsgid \"Available Hashtags for [Words] search\"\nmsgstr \"\"\n\nmsgid \"Search for nouns\"\nmsgstr \"\"\n\nmsgid \"Search for verbs\"\nmsgstr \"\"\n\nmsgid \"Search for transitive verbs\"\nmsgstr \"\"\n\nmsgid \"Search for intransitive verbs\"\nmsgstr \"\"\n\nmsgid \"Search for adverb\"\nmsgstr \"\"\n\nmsgid \"Search for auxilary verbs\"\nmsgstr \"\"\n\nmsgid \"Search for adjectives\"\nmsgstr \"\"\n\nmsgid \"Search for pronouns\"\nmsgstr \"\"\n\nmsgid \"Search for conjugations\"\nmsgstr \"\"\n\nmsgid \"Search for prefixes\"\nmsgstr \"\"\n\nmsgid \"Search for suffixes\"\nmsgstr \"\"\n\nmsgid \"Search for japanese particles\"\nmsgstr \"\"\n\nmsgid \"Lists iru/eru ending verbs which are conjugated as godan verbs\"\nmsgstr \"\"\n\nmsgid \"Search for sfx words [comic sounds]\"\nmsgstr \"\"\n\nmsgid \"Search for words used for counting\"\nmsgstr \"\"\n\nmsgid \"Search for expressions\"\nmsgstr \"\"\n\nmsgid \"Search for words used as interjections\"\nmsgstr \"\"\n\nmsgid \"Search for numeric words\"\nmsgstr \"\"\n\nmsgid \"Search for abbreviations\"\nmsgstr \"\"\n\nmsgid \"Search for words that don't fit in any category\"\nmsgstr \"\"\n\nmsgid \"Search for words included in the specific JLPT level\"\nmsgstr \"\"\n\nmsgid \"Search in the [words] category\"\nmsgstr \"\"\n\nmsgid \"Search in the [sentences] category\"\nmsgstr \"\"\n\nmsgid \"Search in the [name] category\"\nmsgstr \"\"\n\nmsgid \"Search in the [kanji] category\"\nmsgstr \"\"\n\nmsgid \"Available Hashtags for [Sentence] search\"\nmsgstr \"\"\n\nmsgid \"Search for sentences included in the specific JLPT level\"\nmsgstr \"\"\n\nmsgid \"Hide translations by default to translate them yourself and check if its correct\"\nmsgstr \"\"\n\nmsgid \"Available Hashtags for [Kanji] search\"\nmsgstr \"\"\n\nmsgid \"Search for kanji included in the specific Genki chapter\"\nmsgstr \"\"\n\nmsgid \"Radical search\"\nmsgstr \"\"\n\nmsgid \"The radical picker allows searching for radicals to make the process of picking radicals even faster. The supported inputs are as following:\"\nmsgstr \"\"\n\nmsgid \"Results in all radicals used to build given kanji characters\"\nmsgstr \"\"\n\nmsgid \"Searches in words for the given query and returns in result-matching radicals\"\nmsgstr \"\"\n\nmsgid \"Tries to find the given query in radicals names, otherwise does a word search and returns the result's kanji\"\nmsgstr \"\"\n\n## Name search\nmsgid \"Full name\"\nmsgstr \"\"\n\nmsgid \"Sex\"\nmsgstr \"\"\n\nmsgid \"Name origin\"\nmsgstr \"\"\n\n## Kanji results\n\nmsgid \"Part\"\nmsgid_plural \"Parts\"\nmsgstr[0] \"Part\"\nmsgstr[1] \"Parts\"\n\n# strokes suffix\nmsgid \"{} stroke\"\nmsgid_plural \"{} strokes\"\nmsgstr[0] \"{} stroke\"\nmsgstr[1] \"{} strokes\"\n\nmsgid \"Decomposition\"\nmsgstr \"\"\n\nmsgid \"Radical\"\nmsgstr \"\"\n\nmsgid \"Kun\"\nmsgstr \"\"\n\nmsgid \"On\"\nmsgstr \"\"\n\nmsgid \"On reading compounds\"\nmsgstr \"\"\n\nmsgid \"Kun reading compounds\"\nmsgstr \"\"\n\nmsgid \"JLPT level\"\nmsgstr \"\"\n\nmsgid \"of 2500 most used kanji in newspapers\"\nmsgstr \"\"\n\nmsgid \"Similar Kanji\"\nmsgstr \"\"\n\nmsgid \"Chinese reading\"\nmsgstr \"\"\n\nmsgid \"Korean reading\"\nmsgstr \"\"\n\nmsgid \"Vietnamese reading\"\nmsgstr \"\"\n\nmsgid \"Japanese names\"\nmsgstr \"\"\n\n## Word results\nmsgid \"Words and kanji\"\nmsgstr \"\"\n\nmsgid \"{} could be an inflection of {}, with this form:\"\nmsgid_plural \"{} could be an inflection of {}, with this forms:\"\nmsgstr[0] \"{} could be an inflection of {}, with this form:\"\nmsgstr[1] \"{} could be an inflection of {}, with this forms:\"\n\nmsgid \"Temporarily switched language to {}\"\nmsgstr \"\"\n\nmsgctxt \"inflection\"\nmsgid \"Negative\"\nmsgstr \"\"\n\nmsgctxt \"inflection\"\nmsgid \"Polite\"\nmsgstr \"\"\n\nmsgctxt \"inflection\"\nmsgid \"Present\"\nmsgstr \"\"\n\nmsgctxt \"inflection\"\nmsgid \"Past\"\nmsgstr \"\"\n\nmsgctxt \"inflection\"\nmsgid \"TeForm\"\nmsgstr \"\"\n\nmsgctxt \"inflection\"\nmsgid \"Potential\"\nmsgstr \"\"\n\nmsgctxt \"inflection\"\nmsgid \"Passive\"\nmsgstr \"\"\n\nmsgctxt \"inflection\"\nmsgid \"Causative\"\nmsgstr \"\"\n\nmsgctxt \"inflection\"\nmsgid \"PotentialOrPassive\"\nmsgstr \"Potential or Passive\"\n\nmsgctxt \"inflection\"\nmsgid \"Imperative\"\nmsgstr \"\"\n\nmsgctxt \"inflection\"\nmsgid \"Tai\"\nmsgstr \"たい (Want to do something)\"\n\nmsgctxt \"inflection\"\nmsgid \"TeIru\"\nmsgstr \"ている (Indicates an action that is ongoing)\"\n\nmsgctxt \"inflection\"\nmsgid \"TeAru\"\nmsgstr \"てある (Indicates an action that has been done intentionally)\"\n\nmsgctxt \"inflection\"\nmsgid \"TeMiru\"\nmsgstr \"てみる (Means to \\\"Try\\\" something)\"\n\nmsgctxt \"inflection\"\nmsgid \"Tara\"\nmsgstr \"たら (States a condition)\"\n\nmsgid \", with this form:\"\nmsgid_plural \", with these forms:\"\nmsgstr[0] \", with this form:\"\nmsgstr[1] \", with these forms:\"\n\nmsgid \"Taught in {} grade\"\nmsgstr \"\"\n\nmsgid \"Show Conjugations\"\nmsgstr \"\"\n\nmsgid \"Show collocation\"\nmsgid_plural \"Show collocations\"\nmsgstr[0] \"Show collocation\"\nmsgstr[1] \"Show collocations\"\n\nmsgid \"Collocations\"\nmsgstr \"\"\n\nmsgid \"Conjugations\"\nmsgstr \"\"\n\nmsgid \"Antonym of {}\"\nmsgstr \"Antonym of {}\"\n\nmsgid \"See also {}\"\nmsgstr \"\"\n\nmsgid \"Pitch accent\"\nmsgstr \"\"\n\nmsgid \"Other forms\"\nmsgstr \"\"\n\nmsgid \"Affirmative\"\nmsgstr \"\"\n\nmsgid \"Negative\"\nmsgstr \"\"\n\nmsgid \"Present\"\nmsgstr \"\"\n\nmsgid \"Present, polite\"\nmsgstr \"\"\n\nmsgid \"Past\"\nmsgstr \"\"\n\nmsgid \"Past, polite\"\nmsgstr \"\"\n\nmsgid \"Te-form\"\nmsgstr \"\"\n\nmsgid \"Potential\"\nmsgstr \"\"\n\nmsgid \"Passive\"\nmsgstr \"\"\n\nmsgid \"Causative\"\nmsgstr \"\"\n\nmsgid \"Causative Passive\"\nmsgstr \"\"\n\nmsgid \"Imperative\"\nmsgstr \"\"\n\nmsgid \"Play audio\"\nmsgstr \"\"\n\nmsgid \"common word\"\nmsgstr \"\"\n\nmsgid \"JLPT N{}\"\nmsgstr \"\"\n\nmsgid \"Download audio\"\nmsgstr \"\"\n\nmsgid \"Sentence search\"\nmsgstr \"\"\n\nmsgid \"Direct reference\"\nmsgstr \"\"\n\n# \"no words found\"\nmsgid \"words\"\nmsgstr \"\"\n\n# gairaigo\nmsgid \"From {}: {}\"\nmsgstr \"\"\n\n## Sentence search\n\nmsgid \"hide\"\nmsgstr \"\"\n\nmsgid \"show\"\nmsgstr \"\"\n\n# \"No sentences found\"\nmsgid \"sentences\"\nmsgstr \"\"\n\n## About page\n\n# Title 1\nmsgid \"About\"\nmsgstr \"\"\n\nmsgid \"Jotoba is a multilingual Japanese dictionary. It is easy to find translations for words or kanji, see example sentences and the way names can be written.\"\nmsgstr \"\"\n\nmsgid \"Jotoba is open source. Check out our\"\nmsgstr \"\"\n\nmsgid \"Github page\"\nmsgstr \"\"\n\nmsgid \"if you want to contribute or host Jotoba yourself.\"\nmsgstr \"\"\n\nmsgid \"Trello Board\"\nmsgstr \"\"\n\nmsgid \"aswell if you are interested in upcoming features and what we are currently working on!\"\nmsgstr \"\"\n\n# Title 2\nmsgid \"Data Sources and Inspiration\"\nmsgstr \"\"\n\nmsgid \"Of course this project wouldn't have been possible without the help of some great data sources.\"\nmsgstr \"\"\n\nmsgid \"Many thanks to every one of them for providing such a variety of data people can use to learn the japanese language.\"\nmsgstr \"\"\n\n# Source\nmsgid \"Jisho\"\nmsgstr \"\"\n\nmsgid \"Joto-kun\"\nmsgstr \"\"\n\nmsgid \"Joto-kun was created by a good friend of ours who is truly a wizard when it comes down to design!\"\nmsgstr \"\"\n\nmsgid \"Jisho, created by Kim Ahlström, Miwa Ahlström and Andrew Plummer is a pretty and powerful english-japanese dictionary.\"\nmsgstr \"\"\n\nmsgid \"We took inspiration in their work and design to improve on their concept and offer it to a wider variety of people.\"\nmsgstr \"\"\n\n# Source\nmsgid \"Words (except sound effects), Kanji and Names available on this site are publicly provided and maintained by\"\nmsgstr \"\"\n\nmsgid \"and available under the license\"\nmsgstr \"\"\n\nmsgid \"Additionally, the RADKFILE by Jin Breen is used to link Radicals to Kanji.\"\nmsgstr \"\"\n\n# Source\nmsgid \"Audio Files\"\nmsgstr \"\"\n\nmsgid \"The audio files #1 were graciously made public by\"\nmsgstr \"\"\n\nmsgid \"WaniKani\"\nmsgstr \"\"\n\nmsgid \"and\"\nmsgstr \"\"\n\nmsgid \"Tofugo\"\nmsgstr \"\"\n\nmsgid \"and uploaded to Github under the CC-BY-4.0 licence.\"\nmsgstr \"\"\n\nmsgid \"The audio files #2 are provided by the\"\nmsgstr \"\"\n\nmsgid \"Kanji alive project\"\nmsgstr \"\"\n\nmsgid \"and are also available under the CC-BY-4.0 license.\"\nmsgstr \"\"\n\nmsgid \"Manga Sound Effects\"\nmsgstr \"\"\n\nmsgid \"The data about Sound Effects is graciously provided by Chris Kincaid and is used as additional data in the word search.\"\nmsgstr \"\"\n\n# Source\nmsgid \"Sentences are provided by Tatoeba under the Creative Commons CC 1.0 and 2.0 licences. \"\nmsgstr \"\"\n\n# Source\nmsgid \"Kanji Animations\"\nmsgstr \"\"\n\nmsgid \"The raw data used for kanji animations is publicly provided by KanjiVG, a project by Ulrich Apel.\"\nmsgstr \"\"\n\nmsgid \"The conversion into images and animated SVG is done by a ruby script which was made by\"\nmsgstr \"\"\n\nmsgid \"Kimtaro\"\nmsgstr \"\"\n\nmsgid \"and altered by\"\nmsgstr \"\"\n\nmsgid \"Yukáru\"\nmsgstr \"\"\n\n# Source\nmsgid \"JLPT Data\"\nmsgstr \"\"\n\nmsgid \"Data about JLPT proficiencies are by provided by Jonathan Waller.\"\nmsgstr \"\"\n\nmsgid \"There is also some non-free data available on his website, so check it out if you are interested.\"\nmsgstr \"\"\n\n# Source\nmsgid \"Word tokenization\"\nmsgstr \"\"\n\nmsgid \"Word tokenization is done using UniDic, by the UniDic Consortium and used for Japanese morphological analysis implementations.\"\nmsgstr \"\"\n\n#Source\nmsgid \"Pitch accents\"\nmsgstr \"\"\n\nmsgid \"Data about Radicals used in specific Kanji are provided by Kanjium.\"\nmsgstr \"\"\n\nmsgid \"On the project's Github Page you can find lots of data about Kanji.\"\nmsgstr \"\"\n\nmsgid \"Pitch accent data has been extracted from UniDic.\"\nmsgstr \"\"\n\n## Jmdict\n\n# Dialect(s)\nmsgid \"{} dialect\"\nmsgstr \"\"\n\n# Information\nmsgid \"ateji\"\nmsgstr \"\"\n\nmsgid \"irregular kana\"\nmsgstr \"\"\n\nmsgid \"irregular kanji\"\nmsgstr \"\"\n\nmsgid \"irregular okurigana\"\nmsgstr \"\"\n\nmsgid \"outdated kana\"\nmsgstr \"\"\n\nmsgid \"outdated kanji\"\nmsgstr \"\"\n\nmsgid \"gikun\"\nmsgstr \"\"\n\nmsgid \"usually written in kana\"\nmsgstr \"\"\n\nmsgid \"rarely used kanji form\"\nmsgstr \"\"\n\n# Misc\n\nmsgid \"Abbreviation\"\nmsgstr \"\"\n\nmsgid \"Archaism\"\nmsgstr \"\"\n\nmsgid \"Character\"\nmsgstr \"\"\n\nmsgid \"Childrens language\"\nmsgstr \"\"\n\nmsgid \"Colloquialism\"\nmsgstr \"\"\n\nmsgid \"Company name\"\nmsgstr \"\"\n\nmsgid \"Creature\"\nmsgstr \"\"\n\nmsgid \"Dated term\"\nmsgstr \"\"\n\nmsgid \"Deity\"\nmsgstr \"\"\n\nmsgid \"Derogatory\"\nmsgstr \"\"\n\nmsgid \"Event\"\nmsgstr \"\"\n\nmsgid \"Document\"\nmsgstr \"\"\n\nmsgid \"Familiar language\"\nmsgstr \"\"\n\nmsgid \"Female term/language\"\nmsgstr \"\"\n\nmsgid \"Fiction\"\nmsgstr \"\"\n\nmsgid \"Given name\"\nmsgstr \"\"\n\nmsgid \"Group\"\nmsgstr \"\"\n\nmsgid \"Historical term\"\nmsgstr \"\"\n\nmsgid \"Honorific language\"\nmsgstr \"\"\n\nmsgid \"Humble language\"\nmsgstr \"\"\n\nmsgid \"Idiomatic expression\"\nmsgstr \"\"\n\nmsgid \"Jocular humorous term\"\nmsgstr \"\"\n\nmsgid \"Legend\"\nmsgstr \"\"\n\nmsgid \"Literary/formal term\"\nmsgstr \"\"\n\nmsgid \"Manga slang\"\nmsgstr \"\"\n\nmsgid \"Male term/language\"\nmsgstr \"\"\n\nmsgid \"Mythology\"\nmsgstr \"\"\n\nmsgid \"Internet slang\"\nmsgstr \"\"\n\nmsgid \"Object\"\nmsgstr \"\"\n\nmsgid \"Obsolete term\"\nmsgstr \"\"\n\nmsgid \"Obscure term\"\nmsgstr \"\"\n\nmsgid \"Onomatopoetic or mimetic word\"\nmsgstr \"\"\n\nmsgid \"Organization name\"\nmsgstr \"\"\n\nmsgid \"Other\"\nmsgstr \"\"\n\nmsgid \"Person name\"\nmsgstr \"\"\n\nmsgid \"Place name\"\nmsgstr \"\"\n\nmsgid \"Poetical term\"\nmsgstr \"\"\n\nmsgid \"Polite language\"\nmsgstr \"\"\n\nmsgid \"Product name\"\nmsgstr \"\"\n\nmsgid \"Proverb\"\nmsgstr \"\"\n\nmsgid \"Qutation\"\nmsgstr \"\"\n\nmsgid \"Rare\"\nmsgstr \"\"\n\nmsgid \"Religion\"\nmsgstr \"\"\n\nmsgid \"Sensitive\"\nmsgstr \"\"\n\nmsgid \"Service\"\nmsgstr \"\"\n\nmsgid \"Slang\"\nmsgstr \"\"\n\nmsgid \"Railway station\"\nmsgstr \"\"\n\nmsgid \"Family or surname\"\nmsgstr \"\"\n\nmsgid \"Usually written in kana\"\nmsgstr \"\"\n\nmsgid \"Unclassified name\"\nmsgstr \"\"\n\nmsgid \"Vulgar expression/word\"\nmsgstr \"\"\n\nmsgid \"Artwork\"\nmsgstr \"\"\n\nmsgid \"Rude/x-rated term\"\nmsgstr \"\"\n\nmsgid \"Yojijukugo\"\nmsgstr \"\"\n\n# Fields\n\nmsgid \"{} term\"\nmsgstr \"\"\n\nmsgid \"Agriculture\"\nmsgstr \"\"\n\nmsgid \"Anatomy\"\nmsgstr \"\"\n\nmsgid \"Archeology\"\nmsgstr \"\"\n\nmsgid \"Architecture\"\nmsgstr \"\"\n\nmsgid \"Art aesthetics\"\nmsgstr \"\"\n\nmsgid \"Astronomy\"\nmsgstr \"\"\n\nmsgid \"Audio/visual\"\nmsgstr \"\"\n\nmsgid \"Aviation\"\nmsgstr \"\"\n\nmsgid \"Baseball\"\nmsgstr \"\"\n\nmsgid \"Biochemistry\"\nmsgstr \"\"\n\nmsgid \"Biology\"\nmsgstr \"\"\n\nmsgid \"Botany\"\nmsgstr \"\"\n\nmsgid \"Buddhism\"\nmsgstr \"\"\n\nmsgid \"Business\"\nmsgstr \"\"\n\nmsgid \"Chemistry\"\nmsgstr \"\"\n\nmsgid \"Christianity\"\nmsgstr \"\"\n\nmsgid \"Computing\"\nmsgstr \"\"\n\nmsgid \"Clothing\"\nmsgstr \"\"\n\nmsgid \"Crystallography\"\nmsgstr \"\"\n\nmsgid \"Ecology\"\nmsgstr \"\"\n\nmsgid \"Economics\"\nmsgstr \"\"\n\nmsgid \"Electricity\"\nmsgstr \"\"\n\nmsgid \"Electronics\"\nmsgstr \"\"\n\nmsgid \"Embryology\"\nmsgstr \"\"\n\nmsgid \"Engineering\"\nmsgstr \"\"\n\nmsgid \"Entomology\"\nmsgstr \"\"\n\nmsgid \"Finance\"\nmsgstr \"\"\n\nmsgid \"Fishing\"\nmsgstr \"\"\n\nmsgid \"FoodCooking\"\nmsgstr \"\"\n\nmsgid \"Gardening\"\nmsgstr \"\"\n\nmsgid \"Genetics\"\nmsgstr \"\"\n\nmsgid \"Geography\"\nmsgstr \"\"\n\nmsgid \"Geology\"\nmsgstr \"\"\n\nmsgid \"Geometry\"\nmsgstr \"\"\n\nmsgid \"Go (game)\"\nmsgstr \"\"\n\nmsgid \"Golf\"\nmsgstr \"\"\n\nmsgid \"Grammar\"\nmsgstr \"\"\n\nmsgid \"Greek mythology\"\nmsgstr \"\"\n\nmsgid \"Hanafuda\"\nmsgstr \"\"\n\nmsgid \"Horseracing\"\nmsgstr \"\"\n\nmsgid \"Law\"\nmsgstr \"\"\n\nmsgid \"Linguistics\"\nmsgstr \"\"\n\nmsgid \"Logic\"\nmsgstr \"\"\n\nmsgid \"Martial arts\"\nmsgstr \"\"\n\nmsgid \"Mahjong\"\nmsgstr \"\"\n\nmsgid \"Mathematics\"\nmsgstr \"\"\n\nmsgid \"MechanicalEngineering\"\nmsgstr \"\"\n\nmsgid \"Medicine\"\nmsgstr \"\"\n\nmsgid \"Climate/weather\"\nmsgstr \"\"\n\nmsgid \"Military\"\nmsgstr \"\"\n\nmsgid \"Music\"\nmsgstr \"\"\n\nmsgid \"Ornithology\"\nmsgstr \"\"\n\nmsgid \"Paleontology\"\nmsgstr \"\"\n\nmsgid \"Pathology\"\nmsgstr \"\"\n\nmsgid \"Pharmacy\"\nmsgstr \"\"\n\nmsgid \"Philosophy\"\nmsgstr \"\"\n\nmsgid \"Photography\"\nmsgstr \"\"\n\nmsgid \"Physics\"\nmsgstr \"\"\n\nmsgid \"Physiology\"\nmsgstr \"\"\n\nmsgid \"Printing\"\nmsgstr \"\"\n\nmsgid \"Psychology\"\nmsgstr \"\"\n\nmsgid \"Psychiatry\"\nmsgstr \"\"\n\nmsgid \"Railway\"\nmsgstr \"\"\n\nmsgid \"Shinto\"\nmsgstr \"\"\n\nmsgid \"Shogi\"\nmsgstr \"\"\n\nmsgid \"Sports\"\nmsgstr \"\"\n\nmsgid \"Statistics\"\nmsgstr \"\"\n\nmsgid \"Sumo\"\nmsgstr \"\"\n\nmsgid \"Telecommunications\"\nmsgstr \"\"\n\nmsgid \"Trademark\"\nmsgstr \"\"\n\nmsgid \"Videogame\"\nmsgstr \"\"\n\nmsgid \"Zoology\"\nmsgstr \"\"\n\n# Part of speech\nmsgid \"Godan verb\"\nmsgstr \"\"\n\nmsgid \"Irregular verb with {} ending\"\nmsgstr \"\"\n\nmsgid \"SoundFx\"\nmsgstr \"\"\n\nmsgid \"Expression\"\nmsgstr \"\"\n\nmsgid \"Counter\"\nmsgstr \"\"\n\nmsgid \"Suffix\"\nmsgstr \"\"\n\nmsgid \"Prefix\"\nmsgstr \"\"\n\nmsgid \"Particle\"\nmsgstr \"\"\n\nmsgid \"Interjection\"\nmsgstr \"\"\n\nmsgid \"Symbol\"\nmsgstr \"\"\n\nmsgid \"Pronoun\"\nmsgstr \"\"\n\nmsgid \"Auxilary\"\nmsgstr \"\"\n\nmsgid \"Numeric\"\nmsgstr \"\"\n\nmsgid \"Adverb-To\"\nmsgstr \"\"\n\nmsgid \"Adverb\"\nmsgstr \"\"\n\nmsgid \"Adjective\"\nmsgstr \"\"\n\nmsgid \"Auxilary adjective\"\nmsgstr \"\"\n\nmsgid \"Auxilary Verb\"\nmsgstr \"\"\n\nmsgid \"Verb\"\nmsgstr \"\"\n\nmsgid \"Conjugation\"\nmsgstr \"\"\n\nmsgid \"Unclassified\"\nmsgstr \"\"\n\nmsgid \"Noun or verb describing a noun\"\nmsgstr \"\"\n\nmsgid \"I adjective\"\nmsgstr \"\"\n\nmsgid \"I adjective (conjugated like いい)\"\nmsgstr \"\"\n\nmsgid \"Ku adjective\"\nmsgstr \"\"\n\nmsgid \"Na adjective\"\nmsgstr \"\"\n\nmsgid \"Formal form of na adjective\"\nmsgstr \"\"\n\nmsgid \"No adjective\"\nmsgstr \"\"\n\nmsgid \"Pre noun adjective\"\nmsgstr \"\"\n\nmsgid \"Shiku adjective\"\nmsgstr \"\"\n\nmsgid \"Taru adjective\"\nmsgstr \"\"\n\nmsgid \"Noun\"\nmsgstr \"\"\n\nmsgid \"Noun adverbial\"\nmsgstr \"\"\n\nmsgid \"Prefix (noun)\"\nmsgstr \"\"\n\nmsgid \"Suffix (noun)\"\nmsgstr \"\"\n\nmsgid \"Temporal noun\"\nmsgstr \"\"\n\nmsgid \"Unspecified verb\"\nmsgstr \"\"\n\nmsgid \"Intransitive verb\"\nmsgstr \"\"\n\nmsgid \"Transitive verb\"\nmsgstr \"\"\n\nmsgid \"Ichidan verb\"\nmsgstr \"\"\n\nmsgid \"Ichidan zuru verb\"\nmsgstr \"\"\n\nmsgid \"Ichidan kureru verb\"\nmsgstr \"\"\n\nmsgid \"Kuru verb\"\nmsgstr \"\"\n\nmsgid \"Noun taking suru\"\nmsgstr \"\"\n\nmsgid \"Suru verb\"\nmsgstr \"\"\n\nmsgid \"Suru special\"\nmsgstr \"\"\n\nmsgid \"Pre-noun\"\nmsgstr \"\"\n\n# this thingy -> \" \"\nmsgid \"Space\"\nmsgstr \"\"\n\n## Name search\nmsgctxt \"name_type\"\nmsgid \"Company\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Female\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Male\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Organization\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Persons name\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Place\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Product\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"(Railway)Station\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Surname\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Unknown\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Art work\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Character\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Deity\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Document\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Event\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Fiction\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Group\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Legend\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Mythology\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Object\"\nmsgstr \"Object\"\n\nmsgctxt \"name_type\"\nmsgid \"Other\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Religion\"\nmsgstr \"\"\n\nmsgctxt \"name_type\"\nmsgid \"Service\"\nmsgstr \"\"\n\n# \"No names found\"\nmsgid \"names\"\nmsgstr \"\"\n\n## Search help\nmsgid \"Search Help\"\nmsgstr \"\"\n\nmsgid \"No {} found\"\nmsgstr \"\"\n\nmsgid \"Your default search language might not fit your input\"\nmsgstr \"\"\n\nmsgid \"Check your search for typos\"\nmsgstr \"\"\n\nmsgid \"Use more generic search terms\"\nmsgstr \"\"\n\nmsgid \"Try finding your search in a different category using\"\nmsgstr \"\"\n\nmsgid \"Your search request might not be included in our database yet\"\nmsgstr \"\"\n\nmsgid \"If you think your search should be contained in our database, submit an issue on\"\nmsgstr \"\"\n\n# Also check our Trello board since we might be working on it\nmsgid \"Also check our\"\nmsgstr \"\"\n\nmsgid \"since we might be working on it!\"\nmsgstr \"\"\n \n# Paginator\nmsgid \"First\"\nmsgstr \"\"\n \nmsgid \"Last\"\nmsgstr \"\"\n"
  },
  {
    "path": "locales/hu.po",
    "content": "# SINGULAR\n# msgctxt \"\"\n# msgid \"\"\n# msgstr \"\"\n\n# PLURAL\n# msgctxt \"\"\n# msgid \"\"\n# msgid_plural \"\"\n# msgstr[0] \"\"\n# OPTIONAL: msgstr[1] \"\" \n\nmsgid \"\"\nmsgstr \"\"\n\"Project-Id-Version: PACKAGE VERSION\\n\"\n\"PO-Revision-Date: 2021-11-29 21:47+0100\\n\"\n\"Last-Translator:  <jojii@gmx.net>\\n\"\n\"Language-Team: English\\n\"\n\"Language: en\\n\"\n\"MIME-Version: 1.0\\n\"\n\"Content-Type: text/plain; charset=UTF-8\\n\"\n\"Content-Transfer-Encoding: 8bit\\n\"\n\"Plural-Forms: nplurals=2; plural=(n != 1);\\n\"\n\nmsgid \"Jotoba\"\nmsgstr \"\"\n\n#### Base template\nmsgid \"Search...\"\nmsgstr \"Keresés...\"\n\nmsgid \"Search\"\nmsgstr \"Keresés\"\n\nmsgid \"Settings\"\nmsgstr \"Beállítások\"\n\nmsgid \"Radicals\"\nmsgstr \"Gyökök\"\n\nmsgid \"Voice\"\nmsgstr \"Hang\"\n\n# Rad Help Msg\nmsgid \"This tool allows you to find Kanji by their core components (Radicals)\"\nmsgstr \"Ezzel az eszközzel rákereshetsz Kanjikra az alkotórészeik alapján (Gyökök)\"\n\nmsgid \"You can select Radicals below and add found Kanji to the search bar\"\nmsgstr \"Alul kiválaszthatod a gyököket és a megtalált kanjit hozzáadhatod a keresősávhoz\"\n\nmsgid \"Enter\"\nmsgstr \"\"\n\nmsgid \"to start searching\"\nmsgstr \"a keresés elkezdéséhez\"\n\n# Rad Btns\n\nmsgid \"Reset\"\nmsgstr \"Visszaállítás\"\n\nmsgid \"reset\"\nmsgstr \"visszaállítás\"\n\nmsgid \"apply\"\nmsgstr \"beállítás\"\n\nmsgid \"create\"\nmsgstr \"létrehozás\"\n\nmsgid \"here\"\nmsgstr \"itt\"\n\nmsgid \"Accept\"\nmsgstr \"Elfogadás\"\n\nmsgid \"Decline\"\nmsgstr \"Visszautasítás\"\n\n# Search type / Dropdown\nmsgid \"Words\"\nmsgstr \"Szavak\"\n\nmsgid \"Kanji\"\nmsgstr \"Kanjik\"\n\nmsgid \"Sentences\"\nmsgstr \"Mondatok\"\n\nmsgid \"Names\"\nmsgstr \"Nevek\"\n\n# Speech overlay\n\nmsgid \"Current language\"\nmsgstr \"Kiválasztott nyelv\"\n\nmsgid \"Currently listening\"\nmsgstr \"Éppen figyel\"\n\nmsgid \"No\"\nmsgstr \"Nem\"\n\nmsgid \"To change your language, select one of the following\"\nmsgstr \"A nyelv megváltoztatásához válaszd ki az egyiket az alábbiak közül\"\n\n# Image search overlay\n\nmsgid \"Enter a URL or upload your image directly and Jotoba will try to search for japanese words contained in the picture.\"\nmsgstr \"Írj be egy webcímet vagy tölts fel egy képet közvetlenül és a Jotoba megpróbál rákeresni a szavakra a képen\"\n\nmsgid \"Enter Image URL...\"\nmsgstr \"Add meg a kép címét...\"\n\n# Rad Picker overlay\n\nmsgid \"Select Radicals\"\nmsgstr \"Gyökök Kiválasztása\"\n\nmsgid \"Select Kanji\"\nmsgstr \"Kanji Kiválasztása\"\n\nmsgid \"Search Radicals...\"\nmsgstr \"Gyökök Keresése...\"\n\n# Notification overlay\nmsgid \"Notifications\"\nmsgstr \"Értesítések\"\n\nmsgid \"No new notifications\"\nmsgstr \"Nincs új értesítés\"\n\nmsgid \"Show all\"\nmsgstr \"Mind mutatása\"\n\nmsgid \"Close\"\nmsgstr \"Bezárás\"\n\n# Languages\n\nmsgid \"English\"\nmsgstr \"Angol\"\n\nmsgid \"German\"\nmsgstr \"Német\"\n\nmsgid \"Russian\"\nmsgstr \"Orosz\"\n\nmsgid \"Spanish\"\nmsgstr \"Spanyol\"\n\nmsgid \"Swedish\"\nmsgstr \"Svéd\"\n\nmsgid \"French\"\nmsgstr \"Francia\"\n\nmsgid \"Dutch\"\nmsgstr \"Holland\"\n\nmsgid \"Hungarian\"\nmsgstr \"Magyar\"\n\nmsgid \"Slovenian\"\nmsgstr \"Szlovák\"\n\nmsgid \"Japanese\"\nmsgstr \"Japán\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"German\"\nmsgstr \"Német\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"English\"\nmsgstr \"Angol\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Georgian\"\nmsgstr \"Grúz\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Chinese\"\nmsgstr \"Kínai\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Manchu\"\nmsgstr \"Mandzsu\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Kurdish\"\nmsgstr \"Krud\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"ChinookJargon\"\nmsgstr \"Csinuk\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Italian\"\nmsgstr \"Olasz\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Malayalam\"\nmsgstr \"Malajálam\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Tibetian\"\nmsgstr \"Tibeti\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Mongolian\"\nmsgstr \"Mongol\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Romanian\"\nmsgstr \"Román\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Bantu\"\nmsgstr \"Bantu\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Norwegian\"\nmsgstr \"Norvég\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Icelandic\"\nmsgstr \"Izlandi\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Breton\"\nmsgstr \"Breton\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Maori\"\nmsgstr \"Maori\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Latin\"\nmsgstr \"Latin\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Amharic\"\nmsgstr \"Amhara\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Khmer\"\nmsgstr \"Khmer\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Swahili\"\nmsgstr \"Szuahéli\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Hebrew\"\nmsgstr \"Héber\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Galician\"\nmsgstr \"Galiciai\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Korean\"\nmsgstr \"Koreai\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Tamil\"\nmsgstr \"Tamil\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Viatnamese\"\nmsgstr \"Vietnám\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Polish\"\nmsgstr \"Lengyel\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Sanskrit\"\nmsgstr \"Szanszkrit\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Persian\"\nmsgstr \"Perzsa\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Filipino\"\nmsgstr \"Filippínó\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Moldavian\"\nmsgstr \"Moldáv\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Croatian\"\nmsgstr \"Horvát\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Thai\"\nmsgstr \"Thai\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Burmese\"\nmsgstr \"Burmai\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Slovak\"\nmsgstr \"Szlovák\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Czech\"\nmsgstr \"Cseh\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Hindi\"\nmsgstr \"Hindi\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Mapudungun\"\nmsgstr \"Mapudungun\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Turkish\"\nmsgstr \"Török\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Hawaiian\"\nmsgstr \"Hawaii\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Afrikaans\"\nmsgstr \"Afrikaans\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Esperanto\"\nmsgstr \"Eszperantó\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Yiddish\"\nmsgstr \"Jiddis\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Somali\"\nmsgstr \"Szomáli\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Tahitian\"\nmsgstr \"Tahiti\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Urdu\"\nmsgstr \"Urdu\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Indonesian\"\nmsgstr \"Indonéz\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Estonian\"\nmsgstr \"Észt\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Bulgarian\"\nmsgstr \"Bolgár\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Arabic\"\nmsgstr \"Arab\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Danish\"\nmsgstr \"Dán\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Portuguese\"\nmsgstr \"Portugál\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Greek\"\nmsgstr \"Görög\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Finnish\"\nmsgstr \"Finn\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Ainu\"\nmsgstr \"Ainu\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"Algonquian\"\nmsgstr \"Algonking\"\n\nmsgctxt \"foreign_lang\"\nmsgid \"French\"\nmsgstr \"Francia\"\n\n# Settings\nmsgid \"Language\"\nmsgstr \"Nyelv\"\n\nmsgid \"Display\"\nmsgstr \"Megjelenítés\"\n\nmsgid \"General\"\nmsgstr \"Általános\"\n\nmsgid \"Default search language\"\nmsgstr \"Alapértelmezett keresési nyelv\"\n\nmsgid \"Page language\"\nmsgstr \"Weboldal nyelve\"\n\nmsgid \"Always show english results\"\nmsgstr \"Mindig angol eredmények mutatása\"\n\nmsgid \"Show english results on top\"\nmsgstr \"Angol eredmények felül\"\n\nmsgid \"Focus search bar on load\"\nmsgstr \"Keresősáv fókuszálása betöltéskor\"\n\nmsgid \"Select input on load\"\nmsgstr \"Bemenet kiválasztása betöltéskor\"\n\nmsgid \"Results per page\"\nmsgstr \"Eredmények száma egy oldalon\"\n\nmsgid \"Number...\"\nmsgstr \"Szám...\"\n\nmsgid \"Input has to be in range of 1 and 100!\"\nmsgstr \"A bemenetnek 1 és 100 között kell lennie!\"\n\nmsgid \"max amount of names/words/sentences shown per page\"\nmsgstr \"nevek/szavak/mondatok maximális száma egy oldalon\"\n\nmsgid \"Show Furigana\"\nmsgstr \"Furigana Mutatása\"\n\nmsgid \"Show example sentences\"\nmsgstr \"Példamondatok mutatása\"\n\nmsgid \"Items per page\"\nmsgstr \"Eredmények egy oldalon\"\n\nmsgid \"max amount of kanji shown per page\"\nmsgstr \"kanjik maximális száma egy oldalon\"\n\nmsgid \"Use dark mode\"\nmsgstr \"Sötét téma használata\"\n\nmsgid \"Default kanji animation speed\"\nmsgstr \"Alapértelmezett kanji animáció sebessége\"\n\nmsgid \"Enable Quick-Copy\"\nmsgstr \"\"\n\nmsgid \"Share usage statistics\"\nmsgstr \"Sütik Elfogadása\"\n\n# Cookie text\nmsgid \"To use this feature you have to accept to the use of cookies.\"\nmsgstr \"Ennek a funkciónak a használatához el kell fogadnod a sütiket.\"\n\nmsgid \"Your data will only be used for your personal website settings.\"\nmsgstr \"Az adataid csak a személyes beállításaidhoz lesznek felhasználva.\"\n\n# Prefix of cookie revoke text\nmsgid \"Click\"\nmsgstr \"Kattints\"\n\n# Suffix of Cookie revoke text\nmsgid \"to revoke your Cookies agreement\"\nmsgstr \"a sütik visszavonásához\"\n\n# Suffix of Cookie agree text\nmsgid \"to enable Cookies\"\nmsgstr \"a sütik engedélyezéséhez\"\n\n# Footer\nmsgid \"Jotoba wouldn't be able to exist without the help of many open-source data sources\"\nmsgstr \"A Jotoba nem jöhetett volna létre rengeteg nyílt-forráskódú adatforrás nélkül\"\n\nmsgid \"About Page\"\nmsgstr \"Rólunk Oldalunkat\"\n\n# Cookie Footer\nmsgid \"We use cookies to improve your experience and deliver personalized content.\"\nmsgstr \"Sütiket használunk az élményed javítására és a személyre szabott tartalmakhoz\"\n\nmsgid \"By using Jotoba you agree to our\"\nmsgstr \"A Jotoba használatával elfogadod a\"\n\n#  - End of the above sentence -\nmsgid \".\"  \nmsgstr \"-ünket.\"\n\nmsgid \"privacy policy\"\nmsgstr \"adatvédelmi házirend\"\n\nmsgid \"Only use necessary\"\nmsgstr \"Csak a szükségeseket\"\n\nmsgid \"Allow Cookies\"\nmsgstr \"Sütik Engedélyezése\"\n\n### About Page\n\nmsgid \"is a multilingual Japanese dictionary\"\nmsgstr \"egy többnyelvű japán szótár\"\n\nmsgid \"It is easy to find translations for words or kanji, see example sentences and the way names can be written.\"\nmsgstr \"Könnyen megtalálhatod szavak és kanjik fordítását, láthatsz példamondatokat és azt, hogy hogyan kell egyes neveket írni.\"\n\nmsgid \"Here are some examples on how to use this page\"\nmsgstr \"Itt van pár példa az oldal használatához\"\n\nmsgid \"Quickly change the search type by pressing\"\nmsgstr \"Gyorsan megváltoztathatod a keresési típust\"\n\nmsgid \"You can specify your search by typing\"\nmsgstr \"Egyszerűen megadhatod a keresésedet\"\n\nmsgid \"You can find verbs that are conjugated\"\nmsgstr \"Ragozott igékre is kereshetsz\"\n\nmsgid \"You can search multiple kanji at once\"\nmsgstr \"Egyszerre több kanjira is kereshetsz\"\n\nmsgid \"is open source\"\nmsgstr \"nyílt forráskódú\"\n\nmsgid \"Check out our\"\nmsgstr \"Tekintsd meg a\"\n\nmsgid \"Check out the\"\nmsgstr \"Tekintsd meg a\"\n\nmsgid \"aswell if you are interested in upcoming features and what we are currently working on\"\nmsgstr \"-nkat is, ha érdekelnek a közelgő funkciók és szeretnéd látni, hogy éppen mind dolgozunk\"\n\nmsgid \"for a list of all contributors in this project.\"\nmsgstr \"a projekt körzeműködői listájának a megtekintéséhez.\"\n\nmsgctxt \"index\"\nmsgid \"or\"\nmsgstr \"vagy\"\n\nmsgctxt \"index\"\nmsgid \"Press\"\nmsgstr \"Nyomd meg a\"\n\nmsgid \"to instantly focus the search bar\"\nmsgstr \"gombot a keresősáv fókuszálásához\"\n\n### Info / Help Page\n\nmsgid \"Shortcuts\"\nmsgstr \"Gyorsgombok\"\n\nmsgid \"To improve the quality of life on Jotoba, we offer some shortcuts to quickly navigate the page:\"\nmsgstr \"A Jotoba használatának megkönnyítésére létrehoztunk néhány gyorsgombot az oldal navigálásához:\"\n\nmsgid \"Everywhere\"\nmsgstr \"Mindenhol\"\n\nmsgid \"Quickly change between words | sentences | names | kanji tabs\"\nmsgstr \"Válts a szavak | mondatok | nevek | kanji lapok között\"\n\nmsgid \"Focus the search bar\"\nmsgstr \"Keresősáv fókuszálása\"\n\nmsgid \"Focussed search bar\"\nmsgstr \"Fókuszált keresősáv\"\n\nmsgid \"Iterate suggestions up | down\"\nmsgstr \"Felajánlások közötti lépés fel | le\"\n\nmsgid \"Iterate suggestions down\"\nmsgstr \"Felajánlások közötti lépés le\"\n\nmsgid \"[Words] search\"\nmsgstr \"[Szavak] keresés\"\n\nmsgid \"Play the first possible audio\"\nmsgstr \"Az első létező hanganyag lejátszása\"\n\nmsgid \"[Kanji] search\"\nmsgstr \"[Kanji] keresés\"\n\nmsgid \"Show / Collapse compounds\"\nmsgstr \"Összetételek mutatása / elrejtése\"\n\nmsgid \"To specify what kind of results your search should offer, you can use shortcuts.\"\nmsgstr \"A keresés eredményének típusának beállításához használhatsz gyorsgombokat.\"\n\nmsgid \"Hashtags should be written at end end of your input like this:\"\nmsgstr \"A hashtag-ek a keresés végére kerüljenek a következőképpen:\"\n\nmsgid \"Available Hashtags for [Words] search\"\nmsgstr \"Hashtag-ek [Szavak] kereséséhez\"\n\nmsgid \"Search for nouns\"\nmsgstr \"Főnevek keresése\"\n\nmsgid \"Search for verbs\"\nmsgstr \"Igék keresése\"\n\nmsgid \"Search for transitive verbs\"\nmsgstr \"Tranzitív igék keresése\"\n\nmsgid \"Search for intransitive verbs\"\nmsgstr \"Intranzitív igék keresése\"\n\nmsgid \"Search for adverb\"\nmsgstr \"Határozószavak keresése\"\n\nmsgid \"Search for auxilary verbs\"\nmsgstr \"Segédigék keresése\"\n\nmsgid \"Search for adjectives\"\nmsgstr \"Melléknevek keresése\"\n\nmsgid \"Search for pronouns\"\nmsgstr \"Névmások keresése\"\n\nmsgid \"Search for conjugations\"\nmsgstr \"Ragozások keresése\"\n\nmsgid \"Search for prefixes\"\nmsgstr \"Előtagok keresése\"\n\nmsgid \"Search for suffixes\"\nmsgstr \"Utótagok keresése\"\n\nmsgid \"Search for japanese particles\"\nmsgstr \"Japán partikulák keresése\"\n\nmsgid \"Lists iru/eru ending verbs which are conjugated as godan verbs\"\nmsgstr \"Godan igékhez hasonlóan ragozott iru/eru igék listázása\"\n\nmsgid \"Search for sfx words [comic sounds]\"\nmsgstr \"Hangutánzó szavak keresése\"\n\nmsgid \"Search for words used for counting\"\nmsgstr \"Számlálószavak keresése\"\n\nmsgid \"Search for expressions\"\nmsgstr \"Kifejezések keresése\"\n\nmsgid \"Search for words used as interjections\"\nmsgstr \"Indulatszavakként használt szavak keresése\"\n\nmsgid \"Search for numeric words\"\nmsgstr \"Numerikus szavak keresése\"\n\nmsgid \"Search for abbreviations\"\nmsgstr \"Rövidítések keresése\"\n\nmsgid \"Search for words that don't fit in any category\"\nmsgstr \"Más kategóriákba nem illő szavak keresése\"\n\nmsgid \"Search for words included in the specific JLPT level\"\nmsgstr \"JLPT szintekhez tartozó szavak keresése\"\n\nmsgid \"Search in the [words] category\"\nmsgstr \"Keresés a [szavak] kategóriában\"\n\nmsgid \"Search in the [sentences] category\"\nmsgstr \"Keresés a [mondatok] kategóriában\"\n\nmsgid \"Search in the [name] category\"\nmsgstr \"Keresés a [nevek] kategóriában\"\n\nmsgid \"Search in the [kanji] category\"\nmsgstr \"Keresés a [kanjik] kategóriában\"\n\nmsgid \"Available Hashtags for [Sentence] search\"\nmsgstr \"Hashtag-ek [Mondatok] kereséséhez\"\n\nmsgid \"Search for sentences included in the specific JLPT level\"\nmsgstr \"JLPT szintekhez tartotó mondatok keresése\"\n\nmsgid \"Hide translations by default to translate them yourself and check if its correct\"\nmsgstr \"Fordítások elrejtése, hogy lefordíthassad és ellenőrizhesd, hogy helyes-e\"\n\nmsgid \"Available Hashtags for [Kanji] search\"\nmsgstr \"Hashtag-ek [Kanjik] kereséséhez\"\n\nmsgid \"Search for kanji included in the specific Genki chapter\"\nmsgstr \"Kanjik keresése a megadott Genki fejezetben\"\n\nmsgid \"Radical search\"\nmsgstr \"Gyök keresés\"\n\nmsgid \"The radical picker allows searching for radicals to make the process of picking radicals even faster. The supported inputs are as following:\"\nmsgstr \"A gyökválasztóban lehet gyökökre keresni, így felgyorsítva a folyamatot. A megengedett bemenetek a következők:\"\n\nmsgid \"Results in all radicals used to build given kanji characters\"\nmsgstr \"Visszaadja a megadott kanjit felépítő gyököket\"\n\nmsgid \"Searches in words for the given query and returns in result-matching radicals\"\nmsgstr \"Rákeres a kifejezésre és visszatér az abban használt gyökökkel\"\n\nmsgid \"Tries to find the given query in radicals names, otherwise does a word search and returns the result's kanji\"\nmsgstr \"Gyököket keres a neveik alapján, vagy, ha ez sikertelen, kanjira keres\"\n\n## Name search\nmsgid \"Full name\"\nmsgstr \"Teljes név\"\n\nmsgid \"Sex\"\nmsgstr \"Nem\"\n\nmsgid \"Name origin\"\nmsgstr \"Név eredete\"\n\n## Kanji results\n\nmsgid \"Part\"\nmsgid_plural \"Parts\"\nmsgstr[0] \"Rész\"\nmsgstr[1] \"Részek\"\n\n# strokes suffix\nmsgid \"{} stroke\"\nmsgid_plural \"{} strokes\"\nmsgstr[0] \"{} stroke\"\nmsgstr[1] \"{} strokes\"\n\nmsgid \"Decomposition\"\nmsgstr \"Fogalmazás\"\n\nmsgid \"Radical\"\nmsgstr \"Gyök\"\n\nmsgid \"Kun\"\nmsgstr \"Kun\"\n\nmsgid \"On\"\nmsgstr \"On\"\n\nmsgid \"On reading compounds\"\nmsgstr \"On olvasat összetétel\"\n\nmsgid \"Kun reading compounds\"\nmsgstr \"Kun olvasat összetétel\"\n\nmsgid \"JLPT level\"\nmsgstr \"JLPT szint\"\n\nmsgid \"of 2500 most used kanji in newspapers\"\nmsgstr \" a 2500 újságokban leggyakrabban használt kanji közül\"\n\nmsgid \"Similar Kanji\"\nmsgstr \"Hasonló kanjik\"\n\nmsgid \"Chinese reading\"\nmsgstr \"Kínai olvasat\"\n\nmsgid \"Korean reading\"\nmsgstr \"Koreai olvasat\"\n\nmsgid \"Vietnamese reading\"\nmsgstr \"\"\n\nmsgid \"Japanese names\"\nmsgstr \"Japán nevek\"\n\n## Word results\nmsgid \"Words and kanji\"\nmsgstr \"Szavak és kanjik\"\n\nmsgid \"{} could be an inflection of {}, with this form:\"\nmsgid_plural \"{} could be an inflection of {}, with this forms:\"\nmsgstr[0] \"{} could be an inflection of {}, with this form:\"\nmsgstr[1] \"{} could be an inflection of {}, with this forms:\"\n\nmsgid \"Temporarily switched language to {}\"\nmsgstr \"Átmenetileg a nyelv megváltozott {}-ra/re\"\n\nmsgctxt \"inflection\"\nmsgid \"Negative\"\nmsgstr \"Negatív\"\n\nmsgctxt \"inflection\"\nmsgid \"Polite\"\nmsgstr \"Udvarias\"\n\nmsgctxt \"inflection\"\nmsgid \"Present\"\nmsgstr \"Jelen\"\n\nmsgctxt \"inflection\"\nmsgid \"Past\"\nmsgstr \"Múlt\"\n\nmsgctxt \"inflection\"\nmsgid \"TeForm\"\nmsgstr \"Te alak\"\n\nmsgctxt \"inflection\"\nmsgid \"Potential\"\nmsgstr \"Feltételes\"\n\nmsgctxt \"inflection\"\nmsgid \"Passive\"\nmsgstr \"Szenvedő\"\n\nmsgctxt \"inflection\"\nmsgid \"Causative\"\nmsgstr \"Okhatározó\"\n\nmsgctxt \"inflection\"\nmsgid \"PotentialOrPassive\"\nmsgstr \"Potenciális vagy szenvedő\"\n\nmsgctxt \"inflection\"\nmsgid \"Imperative\"\nmsgstr \"Felszólító\"\n\nmsgctxt \"inflection\"\nmsgid \"Tai\"\nmsgstr \"たい (Vágyat, kívánságot fejez ki)\"\n\nmsgctxt \"inflection\"\nmsgid \"TeIru\"\nmsgstr \"ている (Egy jelenleg folyamatban lévő cselekvést fejez ki)\"\n\nmsgctxt \"inflection\"\nmsgid \"TeAru\"\nmsgstr \"てある (Egy befejezett cselekvést fejez ki)\"\n\nmsgctxt \"inflection\"\nmsgid \"TeMiru\"\nmsgstr \"てみる (Egy cselekvés megpróbálását fejezi ki)\"\n\nmsgctxt \"inflection\"\nmsgid \"Tara\"\nmsgstr \"たら (Egy feltételt fejez ki)\"\n\nmsgid \", with this form:\"\nmsgid_plural \", with these forms:\"\nmsgstr[0] \", ebben a formában:\"\nmsgstr[1] \", ezekben a formákban:\"\n\nmsgid \"Taught in {} grade\"\nmsgstr \"{}. évben oktatott\"\n\nmsgid \"Show Conjugations\"\nmsgstr \"Ragozás Mutatása\"\n\nmsgid \"Show collocation\"\nmsgid_plural \"Show collocations\"\nmsgstr[0] \"Besorolás mutatása\"\nmsgstr[1] \"Besorolások mutatása\"\n\nmsgid \"Collocations\"\nmsgstr \"Besorolások\"\n\nmsgid \"Conjugations\"\nmsgstr \"Ragozások\"\n\nmsgid \"Antonym of {}\"\nmsgstr \"Ellentetje: {}\"\n\nmsgid \"See also {}\"\nmsgstr \"Lásd még {}\"\n\nmsgid \"Pitch accent\"\nmsgstr \"Zenei hangsúly\"\n\nmsgid \"Other forms\"\nmsgstr \"Más formák\"\n\nmsgid \"Affirmative\"\nmsgstr \"Igenlő\"\n\nmsgid \"Negative\"\nmsgstr \"Negatív\"\n\nmsgid \"Present\"\nmsgstr \"Jelen\"\n\nmsgid \"Present, polite\"\nmsgstr \"Jelen, udvarias\"\n\nmsgid \"Past\"\nmsgstr \"Múlt\"\n\nmsgid \"Past, polite\"\nmsgstr \"Múlt, udvarias\"\n\nmsgid \"Te-form\"\nmsgstr \"Te alak\"\n\nmsgid \"Potential\"\nmsgstr \"Feltételes\"\n\nmsgid \"Passive\"\nmsgstr \"Szenvedő\"\n\nmsgid \"Causative\"\nmsgstr \"Okhatározó\"\n\nmsgid \"Causative Passive\"\nmsgstr \"Szenvedő Okhatározó\"\n\nmsgid \"Imperative\"\nmsgstr \"Felszólító\"\n\nmsgid \"Play audio\"\nmsgstr \"Hanganyag lejátszása\"\n\nmsgid \"common word\"\nmsgstr \"gyakori szó\"\n\nmsgid \"JLPT N{}\"\nmsgstr \"\"\n\nmsgid \"Download audio\"\nmsgstr \"Hanganyag letöltése\"\n\nmsgid \"Sentence search\"\nmsgstr \"Mondatok keresése\"\n\nmsgid \"Direct reference\"\nmsgstr \"Közvetlen hivatkozás\"\n\n# \"no words found\"\nmsgid \"words\"\nmsgstr \"szó\"\n\n# gairaigo\nmsgid \"From {}: {}\"\nmsgstr \"{}-ból/ből: {}\"\n\n## Sentence search\n\nmsgid \"hide\"\nmsgstr \"elrejtés\"\n\nmsgid \"show\"\nmsgstr \"mutatás\"\n\n# \"No sentences found\"\nmsgid \"sentences\"\nmsgstr \"mondat\"\n\n## About page\n\n# Title 1\nmsgid \"About\"\nmsgstr \"Rólunk\"\n\nmsgid \"Jotoba is a multilingual Japanese dictionary. It is easy to find translations for words or kanji, see example sentences and the way names can be written.\"\nmsgstr \"A Jotoba egy többnyelvű japán szótár. Könnyen megtalálhatod szavak és kanjik fordítását, láthatsz példamondatokat és azt, hogy hogyan kell egyes neveket írni.\"\n\nmsgid \"Jotoba is open source. Check out our\"\nmsgstr \"A Jotoba nyílt forráskódú. Tekintsd meg a\"\n\nmsgid \"Github page\"\nmsgstr \"Github oldalunkat\"\n\nmsgid \"if you want to contribute or host Jotoba yourself.\"\nmsgstr \"ha szeretnél közreműködni, vagy magad futtatni a Jotoba-t.\"\n\nmsgid \"Trello Board\"\nmsgstr \"Trello Táblánk\"\n\nmsgid \"aswell if you are interested in upcoming features and what we are currently working on!\"\nmsgstr \"is, ha érdekelnek a közelgő funkciók és, hogy min dolgozunk éppen!\"\n\n# Title 2\nmsgid \"Data Sources and Inspiration\"\nmsgstr \"Adatforrások és Inspirációk\"\n\nmsgid \"Of course this project wouldn't have been possible without the help of some great data sources.\"\nmsgstr \"Természetesen ez a projekt nem lett volna lehetséges néhány remek adatforrás nélkül.\"\n\nmsgid \"Many thanks to every one of them for providing such a variety of data people can use to learn the japanese language.\"\nmsgstr \"Ezer köszönet mindegyikőjüknek az általuk biztosított adatokért, hogy mindenki használhassa japánul tanuláshoz.\"\n\n# Source\nmsgid \"Jisho\"\nmsgstr \"Jisho\"\n\nmsgid \"Joto-kun\"\nmsgstr \"Joto-kun\"\n\nmsgid \"Joto-kun was created by a good friend of ours who is truly a wizard when it comes down to design!\"\nmsgstr \"Joto-kunt egy jóbarátunk készítette, aki varázsolni tud, ha dizájnról van szó!\"\n\nmsgid \"Jisho, created by Kim Ahlström, Miwa Ahlström and Andrew Plummer is a pretty and powerful english-japanese dictionary.\"\nmsgstr \"Jisho, Kim Ahlström, Miwa Ahlström és Andrew Plummer által egy gyönyörű és erőteljes angol-japán szótár.\"\n\nmsgid \"We took inspiration in their work and design to improve on their concept and offer it to a wider variety of people.\"\nmsgstr \"Ihletet merítettünk a munkájukból és dizájnukból, hogy javíthassunk az ötletükön és egy szélesebb körben felajánlhassuk.\"\n\n# Source\nmsgid \"Words (except sound effects), Kanji and Names available on this site are publicly provided and maintained by\"\nmsgstr \"Az ezen az oldalon található szavakat (a hangutázószavak kivételével), kanjikat és neveket publikusan elérhetővé tette és karbantartja az\"\n\nmsgid \"and available under the license\"\nmsgstr \"és az alábbi licensz alatt publikálta:\"\n\nmsgid \"Additionally, the RADKFILE by Jin Breen is used to link Radicals to Kanji.\"\nmsgstr \"Ezenkívül a Jin Breen által készített RADKFILE segítségével társítunk gyököket a kanjikhoz.\"\n\n# Source\nmsgid \"Audio Files\"\nmsgstr \"Hanganyagok\"\n\nmsgid \"The audio files #1 were graciously made public by\"\nmsgstr \"Az #1-es hanganyagokat kegyesen publikusan elérhetővé tette a\"\n\nmsgid \"WaniKani\"\nmsgstr \"WaniKani\"\n\nmsgid \"and\"\nmsgstr \"és\"\n\nmsgid \"Tofugo\"\nmsgstr \"Tofugu\"\n\nmsgid \"and uploaded to Github under the CC-BY-4.0 licence.\"\nmsgstr \"és feltöltötték Githubra CC-BY-4.0 licensz alatt.\"\n\nmsgid \"The audio files #2 are provided by the\"\nmsgstr \"A #2-es hanganyagokat a\"\n\nmsgid \"Kanji alive project\"\nmsgstr \"Kanji alive projekt\"\n\nmsgid \"and are also available under the CC-BY-4.0 license.\"\nmsgstr \"biztosította és tette elérhetővé CC-BY-4.0 licensz alatt.\"\n\nmsgid \"Manga Sound Effects\"\nmsgstr \"Manga Hang Effektek\"\n\nmsgid \"The data about Sound Effects is graciously provided by Chris Kincaid and is used as additional data in the word search.\"\nmsgstr \"A hangeffektekről szóló adatokat Chris Kincaid biztosította, hogy megjeleníthessük a szókeresés közben.\"\n\n# Source\nmsgid \"Sentences are provided by Tatoeba under the Creative Commons CC 1.0 and 2.0 licences. \"\nmsgstr \"A mondatokat a Tatoeba biztosította Creative Commons CC 1.0 és 2.0 licensz alatt. \"\n\n# Source\nmsgid \"Kanji Animations\"\nmsgstr \"Kanji Animációk\"\n\nmsgid \"The raw data used for kanji animations is publicly provided by KanjiVG, a project by Ulrich Apel.\"\nmsgstr \"A nyers adatokat a kanji animációkhoz az Ulrich Apel által létrehozott KanjiVG biztosította.\"\n\nmsgid \"The conversion into images and animated SVG is done by a ruby script which was made by\"\nmsgstr \"A képekké és SVG-ké alakítást végző ruby szkript készítette\"\n\nmsgid \"Kimtaro\"\nmsgstr \"Kimtaro\"\n\nmsgid \"and altered by\"\nmsgstr \"és módosította\"\n\nmsgid \"Yukáru\"\nmsgstr \"Yukáru\"\n\n# Source\nmsgid \"JLPT Data\"\nmsgstr \"JLPT Adatok\"\n\nmsgid \"Data about JLPT proficiencies are by provided by Jonathan Waller.\"\nmsgstr \"A JLPT szintek adatait Jonathan Waller biztosította.\"\n\nmsgid \"There is also some non-free data available on his website, so check it out if you are interested.\"\nmsgstr \"A weboldalán elérhető néhány nem ingyenes adatforrás is, úgyhogy nézz be, ha érdekel.\"\n\n# Source\nmsgid \"Word tokenization\"\nmsgstr \"Szó elemekre bontésa\"\n\nmsgid \"Word tokenization is done using UniDic, by the UniDic Consortium and used for Japanese morphological analysis implementations.\"\nmsgstr \"Word tokenization is done using UniDic, by the UniDic Consortium and used for Japanese morphological analysis implementations.\"\n\n#Source\nmsgid \"Pitch accents\"\nmsgstr \"\"\n\nmsgid \"Data about Radicals used in specific Kanji are provided by Kanjium.\"\nmsgstr \"\"\n\nmsgid \"On the project's Github Page you can find lots of data about Kanji.\"\nmsgstr \"A projekt weboldalán rengeteg, kanjikról szóló adatot találhatsz.\"\n\nmsgid \"Pitch accent data has been extracted from UniDic.\"\nmsgstr \"\"\n\n## Jmdict\n\n# Dialect(s)\nmsgid \"{} dialect\"\nmsgstr \"{} dialektus\"\n\n# Information\nmsgid \"ateji\"\nmsgstr \"ateji\"\n\nmsgid \"irregular kana\"\nmsgstr \"szabálytalan kana\"\n\nmsgid \"irregular kanji\"\nmsgstr \"szabálytalan kanji\"\n\nmsgid \"irregular okurigana\"\nmsgstr \"szabálytalan okurigana\"\n\nmsgid \"outdated kana\"\nmsgstr \"elavult kana\"\n\nmsgid \"outdated kanji\"\nmsgstr \"elavult kanji\"\n\nmsgid \"gikun\"\nmsgstr \"gikun\"\n\nmsgid \"usually written in kana\"\nmsgstr \"általában kanával írják\"\n\nmsgid \"rarely used kanji form\"\nmsgstr \"ritkán használt kanji forma\"\n\n# Misc\n\nmsgid \"Abbreviation\"\nmsgstr \"Rövidítés\"\n\nmsgid \"Archaism\"\nmsgstr \"Régies kifejezés\"\n\nmsgid \"Character\"\nmsgstr \"Karakter\"\n\nmsgid \"Childrens language\"\nmsgstr \"Gyereknyelv\"\n\nmsgid \"Colloquialism\"\nmsgstr \"Köznyelvi kifejezés\"\n\nmsgid \"Company name\"\nmsgstr \"Cégnév\"\n\nmsgid \"Creature\"\nmsgstr \"Élőlény\"\n\nmsgid \"Dated term\"\nmsgstr \"Régimódi kifejezés\"\n\nmsgid \"Deity\"\nmsgstr \"Istenség\"\n\nmsgid \"Derogatory\"\nmsgstr \"Lekicsinylő\"\n\nmsgid \"Event\"\nmsgstr \"Esemény\"\n\nmsgid \"Document\"\nmsgstr \"Dokumentum\"\n\nmsgid \"Familiar language\"\nmsgstr \"Intim nyelv\"\n\nmsgid \"Female term/language\"\nmsgstr \"Női kifejezés/nyelv\"\n\nmsgid \"Fiction\"\nmsgstr \"Fikció\"\n\nmsgid \"Given name\"\nmsgstr \"Keresztnév\"\n\nmsgid \"Group\"\nmsgstr \"Csoport\"\n\nmsgid \"Historical term\"\nmsgstr \"Történelmi kifejezés\"\n\nmsgid \"Honorific language\"\nmsgstr \"Megtisztelő nyelvezet\"\n\nmsgid \"Humble language\"\nmsgstr \"Szerény nyelvezet\"\n\nmsgid \"Idiomatic expression\"\nmsgstr \"Szólás\"\n\nmsgid \"Jocular humorous term\"\nmsgstr \"Tréfás kifejezés\"\n\nmsgid \"Legend\"\nmsgstr \"Legenda\"\n\nmsgid \"Literary/formal term\"\nmsgstr \"Irodalmi/hivatalos kifejezés\"\n\nmsgid \"Manga slang\"\nmsgstr \"Manga szleng\"\n\nmsgid \"Male term/language\"\nmsgstr \"Férfi nyelvezet\"\n\nmsgid \"Mythology\"\nmsgstr \"Mitológia\"\n\nmsgid \"Internet slang\"\nmsgstr \"Internet szleng\"\n\nmsgid \"Object\"\nmsgstr \"Tárgy\"\n\nmsgid \"Obsolete term\"\nmsgstr \"Elavult kifejezls\"\n\nmsgid \"Obscure term\"\nmsgstr \"Homályos kifejezés\"\n\nmsgid \"Onomatopoetic or mimetic word\"\nmsgstr \"Hangutánzó szó\"\n\nmsgid \"Organization name\"\nmsgstr \"Szervezet neve\"\n\nmsgid \"Other\"\nmsgstr \"Egyéb\"\n\nmsgid \"Person name\"\nmsgstr \"Személynév\"\n\nmsgid \"Place name\"\nmsgstr \"Helynév\"\n\nmsgid \"Poetical term\"\nmsgstr \"Költői kifejezés\"\n\nmsgid \"Polite language\"\nmsgstr \"Udvarias nyelv\"\n\nmsgid \"Product name\"\nmsgstr \"Termék név\"\n\nmsgid \"Proverb\"\nmsgstr \"Közmondás\"\n\nmsgid \"Qutation\"\nmsgstr \"Idézet\"\n\nmsgid \"Rare\"\nmsgstr \"Ritka\"\n\nmsgid \"Religion\"\nmsgstr \"Vallás\"\n\nmsgid \"Sensitive\"\nmsgstr \"Érzékeny\"\n\nmsgid \"Service\"\nmsgstr \"Szolgáltatás\"\n\nmsgid \"Slang\"\nmsgstr \"Szleng\"\n\nmsgid \"Railway station\"\nmsgstr \"Vasútállomás\"\n\nmsgid \"Family or surname\"\nmsgstr \"Vezetéknév\"\n\nmsgid \"Usually written in kana\"\nmsgstr \"Általában kanával írják\"\n\nmsgid \"Unclassified name\"\nmsgstr \"Rendszerezetlen név\"\n\nmsgid \"Vulgar expression/word\"\nmsgstr \"Vulgáris kifejezés/szó\"\n\nmsgid \"Artwork\"\nmsgstr \"Műalkotás\"\n\nmsgid \"Rude/x-rated term\"\nmsgstr \"Sértő/X-besorolású kifejezés\"\n\nmsgid \"Yojijukugo\"\nmsgstr \"Yojijukugo\"\n\n# Fields\n\nmsgid \"{} term\"\nmsgstr \"{} kifejezés\"\n\nmsgid \"Agriculture\"\nmsgstr \"Mezőgazdaság\"\n\nmsgid \"Anatomy\"\nmsgstr \"Anatómia\"\n\nmsgid \"Archeology\"\nmsgstr \"Régészet\"\n\nmsgid \"Architecture\"\nmsgstr \"Építészet\"\n\nmsgid \"Art aesthetics\"\nmsgstr \"Művészet\"\n\nmsgid \"Astronomy\"\nmsgstr \"Űrtudomány\"\n\nmsgid \"Audio/visual\"\nmsgstr \"Audiovizuális\"\n\nmsgid \"Aviation\"\nmsgstr \"Repülés\"\n\nmsgid \"Baseball\"\nmsgstr \"Baseball\"\n\nmsgid \"Biochemistry\"\nmsgstr \"Biokémia\"\n\nmsgid \"Biology\"\nmsgstr \"Biológia\"\n\nmsgid \"Botany\"\nmsgstr \"Botanika\"\n\nmsgid \"Buddhism\"\nmsgstr \"Buddhizmus\"\n\nmsgid \"Business\"\nmsgstr \"Biznisz\"\n\nmsgid \"Chemistry\"\nmsgstr \"Kémia\"\n\nmsgid \"Christianity\"\nmsgstr \"Kereszténység\"\n\nmsgid \"Computing\"\nmsgstr \"Számítógépes tudomány\"\n\nmsgid \"Clothing\"\nmsgstr \"Ruházat\"\n\nmsgid \"Crystallography\"\nmsgstr \"Kristálytan\"\n\nmsgid \"Ecology\"\nmsgstr \"Ökológia\"\n\nmsgid \"Economics\"\nmsgstr \"Ökonómia\"\n\nmsgid \"Electricity\"\nmsgstr \"Elektromosság\"\n\nmsgid \"Electronics\"\nmsgstr \"Elektronika\"\n\nmsgid \"Embryology\"\nmsgstr \"Embriológia\"\n\nmsgid \"Engineering\"\nmsgstr \"Gépészet\"\n\nmsgid \"Entomology\"\nmsgstr \"Entomológia\"\n\nmsgid \"Finance\"\nmsgstr \"Pénzügytan\"\n\nmsgid \"Fishing\"\nmsgstr \"Horgászat\"\n\nmsgid \"FoodCooking\"\nmsgstr \"Főzés\"\n\nmsgid \"Gardening\"\nmsgstr \"Kertészkedés\"\n\nmsgid \"Genetics\"\nmsgstr \"Genetika\"\n\nmsgid \"Geography\"\nmsgstr \"Földrajz\"\n\nmsgid \"Geology\"\nmsgstr \"Geológia\"\n\nmsgid \"Geometry\"\nmsgstr \"Geometria\"\n\nmsgid \"Go (game)\"\nmsgstr \"Go (játék)\"\n\nmsgid \"Golf\"\nmsgstr \"Golf\"\n\nmsgid \"Grammar\"\nmsgstr \"Nyelvtan\"\n\nmsgid \"Greek mythology\"\nmsgstr \"Görög mitológia\"\n\nmsgid \"Hanafuda\"\nmsgstr \"Hanafuda\"\n\nmsgid \"Horseracing\"\nmsgstr \"Lóverseny\"\n\nmsgid \"Law\"\nmsgstr \"Jog\"\n\nmsgid \"Linguistics\"\nmsgstr \"Nyelvészet\"\n\nmsgid \"Logic\"\nmsgstr \"Logika\"\n\nmsgid \"Martial arts\"\nmsgstr \"Harcművészet\"\n\nmsgid \"Mahjong\"\nmsgstr \"Mahjong\"\n\nmsgid \"Mathematics\"\nmsgstr \"Matematika\"\n\nmsgid \"MechanicalEngineering\"\nmsgstr \"Gépészmérnök\"\n\nmsgid \"Medicine\"\nmsgstr \"Orvostudomány\"\n\nmsgid \"Climate/weather\"\nmsgstr \"Időjárás\"\n\nmsgid \"Military\"\nmsgstr \"Katonaság\"\n\nmsgid \"Music\"\nmsgstr \"Zene\"\n\nmsgid \"Ornithology\"\nmsgstr \"Orniológia\"\n\nmsgid \"Paleontology\"\nmsgstr \"Paleontológia\"\n\nmsgid \"Pathology\"\nmsgstr \"Patológia\"\n\nmsgid \"Pharmacy\"\nmsgstr \"Gyógyszerészet\"\n\nmsgid \"Philosophy\"\nmsgstr \"Filozófia\"\n\nmsgid \"Photography\"\nmsgstr \"Fényképezés\"\n\nmsgid \"Physics\"\nmsgstr \"Fizika\"\n\nmsgid \"Physiology\"\nmsgstr \"Fiziológia\"\n\nmsgid \"Printing\"\nmsgstr \"Nyomtatás\"\n\nmsgid \"Psychology\"\nmsgstr \"Pszichológia\"\n\nmsgid \"Psychiatry\"\nmsgstr \"Pszichiátria\"\n\nmsgid \"Railway\"\nmsgstr \"Vasút\"\n\nmsgid \"Shinto\"\nmsgstr \"Shinto\"\n\nmsgid \"Shogi\"\nmsgstr \"Shogi\"\n\nmsgid \"Sports\"\nmsgstr \"Sport\"\n\nmsgid \"Statistics\"\nmsgstr \"Statisztika\"\n\nmsgid \"Sumo\"\nmsgstr \"Szumó\"\n\nmsgid \"Telecommunications\"\nmsgstr \"Telekommunikáció\"\n\nmsgid \"Trademark\"\nmsgstr \"Védjegy\"\n\nmsgid \"Videogame\"\nmsgstr \"Videójáték\"\n\nmsgid \"Zoology\"\nmsgstr \"Zoológia\"\n\n# Part of speech\nmsgid \"Godan verb\"\nmsgstr \"Godan ige\"\n\nmsgid \"Irregular verb with {} ending\"\nmsgstr \"Szabálytalan ige {} végződéssel\"\n\nmsgid \"SoundFx\"\nmsgstr \"Hangeffekt\"\n\nmsgid \"Expression\"\nmsgstr \"Kifejezés\"\n\nmsgid \"Counter\"\nmsgstr \"Számlálószó\"\n\nmsgid \"Suffix\"\nmsgstr \"Utótag\"\n\nmsgid \"Prefix\"\nmsgstr \"Előtag\"\n\nmsgid \"Particle\"\nmsgstr \"Partikula\"\n\nmsgid \"Interjection\"\nmsgstr \"Indulatszó\"\n\nmsgid \"Symbol\"\nmsgstr \"Szimbólum\"\n\nmsgid \"Pronoun\"\nmsgstr \"Névmás\"\n\nmsgid \"Auxilary\"\nmsgstr \"Segédige\"\n\nmsgid \"Numeric\"\nmsgstr \"Szám\"\n\nmsgid \"Adverb-To\"\nmsgstr \"To Határozószó\"\n\nmsgid \"Adverb\"\nmsgstr \"Határozószó\"\n\nmsgid \"Adjective\"\nmsgstr \"Melléknév\"\n\nmsgid \"Auxilary adjective\"\nmsgstr \"Segédmelléknév\"\n\nmsgid \"Auxilary Verb\"\nmsgstr \"Segédige\"\n\nmsgid \"Verb\"\nmsgstr \"Ige\"\n\nmsgid \"Conjugation\"\nmsgstr \"Ragozás\"\n\nmsgid \"Unclassified\"\nmsgstr \"Csoportosítatlan\"\n\nmsgid \"Noun or verb describing a noun\"\nmsgstr \"Főnév, vagy főnevet leíró ige\"\n\nmsgid \"I adjective\"\nmsgstr \"I melléknév\"\n\nmsgid \"I adjective (conjugated like いい)\"\nmsgstr \"I melléknév (いい-ként ragozva)\"\n\nmsgid \"Ku adjective\"\nmsgstr \"Ku melléknév\"\n\nmsgid \"Na adjective\"\nmsgstr \"Na melléknév\"\n\nmsgid \"Formal form of na adjective\"\nmsgstr \"Na melléknév hivatalos alakja\"\n\nmsgid \"No adjective\"\nmsgstr \"No melléknév\"\n\nmsgid \"Pre noun adjective\"\nmsgstr \"Főnév előtti melléknév\"\n\nmsgid \"Shiku adjective\"\nmsgstr \"Skiku melléknév\"\n\nmsgid \"Taru adjective\"\nmsgstr \"Taru melléknév\"\n\nmsgid \"Noun\"\nmsgstr \"Főnév\"\n\nmsgid \"Noun adverbial\"\nmsgstr \"Határozói Főnév\"\n\nmsgid \"Prefix (noun)\"\nmsgstr \"Előtag (főnév)\"\n\nmsgid \"Suffix (noun)\"\nmsgstr \"Utótag (főnév)\"\n\nmsgid \"Temporal noun\"\nmsgstr \"Időt leíró főnév\"\n\nmsgid \"Unspecified verb\"\nmsgstr \"Csoportosítatlan ige\"\n\nmsgid \"Intransitive verb\"\nmsgstr \"Intransitív ige\"\n\nmsgid \"Transitive verb\"\nmsgstr \"Tranzitív ige\"\n\nmsgid \"Ichidan verb\"\nmsgstr \"Ichidan ige\"\n\nmsgid \"Ichidan zuru verb\"\nmsgstr \"Ichidan zuru ige\"\n\nmsgid \"Ichidan kureru verb\"\nmsgstr \"Ichidan kureru ige\"\n\nmsgid \"Kuru verb\"\nmsgstr \"Kuru ige\"\n\nmsgid \"Noun taking suru\"\nmsgstr \"Suru-t felvevő főnév\"\n\nmsgid \"Suru verb\"\nmsgstr \"Suru ige\"\n\nmsgid \"Suru special\"\nmsgstr \"Speciális suru\"\n\nmsgid \"Pre-noun\"\nmsgstr \"Főnév előtti\"\n\n# this thingy -> \" \"\nmsgid \"Space\"\nmsgstr \"Szóköz\"\n\n## Name search\nmsgctxt \"name_type\"\nmsgid \"Company\"\nmsgstr \"Cég\"\n\nmsgctxt \"name_type\"\nmsgid \"Female\"\nmsgstr \"Női\"\n\nmsgctxt \"name_type\"\nmsgid \"Male\"\nmsgstr \"Férfi\"\n\nmsgctxt \"name_type\"\nmsgid \"Organization\"\nmsgstr \"Szervezet\"\n\nmsgctxt \"name_type\"\nmsgid \"Persons name\"\nmsgstr \"Személy\"\n\nmsgctxt \"name_type\"\nmsgid \"Place\"\nmsgstr \"Hely\"\n\nmsgctxt \"name_type\"\nmsgid \"Product\"\nmsgstr \"Termék\"\n\nmsgctxt \"name_type\"\nmsgid \"(Railway)Station\"\nmsgstr \"Vasútállomás\"\n\nmsgctxt \"name_type\"\nmsgid \"Surname\"\nmsgstr \"Vezetéknév\"\n\nmsgctxt \"name_type\"\nmsgid \"Unknown\"\nmsgstr \"Ismeretlen\"\n\nmsgctxt \"name_type\"\nmsgid \"Art work\"\nmsgstr \"Műalkotás\"\n\nmsgctxt \"name_type\"\nmsgid \"Character\"\nmsgstr \"Karakter\"\n\nmsgctxt \"name_type\"\nmsgid \"Deity\"\nmsgstr \"Istenség\"\n\nmsgctxt \"name_type\"\nmsgid \"Document\"\nmsgstr \"Dokumetum\"\n\nmsgctxt \"name_type\"\nmsgid \"Event\"\nmsgstr \"Esemény\"\n\nmsgctxt \"name_type\"\nmsgid \"Fiction\"\nmsgstr \"Fikció\"\n\nmsgctxt \"name_type\"\nmsgid \"Group\"\nmsgstr \"Csoport\"\n\nmsgctxt \"name_type\"\nmsgid \"Legend\"\nmsgstr \"Legenda\"\n\nmsgctxt \"name_type\"\nmsgid \"Mythology\"\nmsgstr \"Mitológia\"\n\nmsgctxt \"name_type\"\nmsgid \"Object\"\nmsgstr \"Tárgy\"\n\nmsgctxt \"name_type\"\nmsgid \"Other\"\nmsgstr \"Egyéb\"\n\nmsgctxt \"name_type\"\nmsgid \"Religion\"\nmsgstr \"Vallás\"\n\nmsgctxt \"name_type\"\nmsgid \"Service\"\nmsgstr \"Szolgáltatás\"\n\n# \"No names found\"\nmsgid \"names\"\nmsgstr \"név\"\n\n## Search help\nmsgid \"Search Help\"\nmsgstr \"Keresési segéd\"\n\nmsgid \"No {} found\"\nmsgstr \"Nincs a keresésnek megfelelő {}\"\n\nmsgid \"Your default search language might not fit your input\"\nmsgstr \"Az alapértelmezett keresési nyelved lehet, hogy nem passzol a bemenethez\"\n\nmsgid \"Check your search for typos\"\nmsgstr \"Ellenőrizd, hogy nem írtad-e el a keresési kifejezésedet\"\n\nmsgid \"Use more generic search terms\"\nmsgstr \"Használj általánosabb keresési kifejezéseket\"\n\nmsgid \"Try finding your search in a different category using\"\nmsgstr \"Próbáld a kifejezésedet egy másik kategóriában megkeresni:\"\n\nmsgid \"Your search request might not be included in our database yet\"\nmsgstr \"Lehet, hogy az adatbázisunk még nem tartalmazza a keresési kifejezésedet\"\n\nmsgid \"If you think your search should be contained in our database, submit an issue on\"\nmsgstr \"Ha úgy gondolod, hogy az adatbázisunknak tartalmaznia kéne a keresésedet, küldj be egy kérelmet:\"\n\n# Also check our Trello board since we might be working on it\nmsgid \"Also check our\"\nmsgstr \"Ellenőrizd a\"\n\nmsgid \"since we might be working on it!\"\nmsgstr \", hátha épp dolgozunk rajta\"\n \n# Paginator\nmsgid \"First\"\nmsgstr \"Első\"\n \nmsgid \"Last\"\nmsgstr \"Utolsó\"\n"
  },
  {
    "path": "rustfmt.toml",
    "content": "edition=\"2021\"\n"
  },
  {
    "path": "scripts/gen_locales.sh",
    "content": "#!/bin/bash\n\nfor i in `ls ./locales | grep \\.po`;do msgfmt ./locales/$i -o /tmp/${i%.po}.mo;done\nmv /tmp/*.mo ./locales\n"
  }
]