[
  {
    "path": ".editorconfig",
    "content": "root = true\n\n[*]\ncharset = utf-8\nend_of_line = lf\nindent_style = space\nindent_size = 4\ninsert_final_newline = true\ntrim_trailing_whitespace = true\n"
  },
  {
    "path": ".gitattributes",
    "content": "*               text=auto eol=lf\n*.{cmd,[cC][mM][dD]} text eol=crlf\n*.{bat,[bB][aA][tT]} text eol=crlf\n\nCONTRIBUTORS.md merge=union\nREADME.md       text\nLICENSE         text\n\n*.css           text\n*.eot           binary\n*.gif           binary\n*.html          text diff=html\n*.ico           binary\n*.*ignore       text\n*.jpg           binary\n*.js            text\n*.json          text\n*.lock          text -diff\n*.map           text -diff\n*.md            text\n*.otf           binary\n*.png           binary\n*.py            text diff=python\n*.svg           binary\n*.ts            text\n*.ttf           binary\n*.sass          text\n*.webp          binary\n*.woff          binary\n*.woff2         binary\n\n.editorconfig   text\n.gitattributes  export-ignore\n.gitignore      export-ignore\n\n*.gitattributes linguist-language=gitattributes\nlocales/*.json merge=union\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.md",
    "content": "---\nname: Bug report\nabout: Create a bug report\ntitle: ''\nlabels: bug\nassignees: ''\n---\n\n**Describe the bug**\n\n<!-- A clear and concise description of what the bug is. -->\n\n**To Reproduce**\n\n<!-- Steps to reproduce the behavior: -->\n\n1. Go to '...'\n2. Click on '....'\n3. Scroll down to '....'\n4. See error\n\n**Expected behavior**\n\n<!-- A clear and concise description of what you expected to happen. -->\n\n**Logs**\n\n<!-- Please paste any log errors. -->\n\n**Screenshots**\n\n<!-- If applicable, add screenshots to help explain your problem. -->\n\n**System (please complete the following information):**\n\n-   OS: [e.g. Docker, Debian, Windows]\n-   Browser: [e.g. Firefox, Chrome, Safari]\n-   Jellyfin version: [e.g. 10.0.1]\n-   Cast Receiver version: [e.g. Stable, Unstable]\n-   Cast client: [e.g Ultra]\n\n**Additional context**\n\n<!-- Add any other context about the problem here. -->\n"
  },
  {
    "path": ".github/workflows/lint.yaml",
    "content": "name: Lint\n\non:\n    push:\n        branches:\n            - master\n    pull_request:\n        branches:\n            - master\n\njobs:\n    lint:\n        name: Lint TS and CSS\n        runs-on: ubuntu-latest\n\n        steps:\n            - name: Checkout\n              uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2\n\n            - name: Setup node env\n              uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0\n              with:\n                  node-version: 20\n\n            - name: Install dependencies\n              run: npm ci --no-audit\n\n            - name: Build for production\n              run: npm run build\n\n            - name: Run ESLint\n              run: npm run lint\n"
  },
  {
    "path": ".github/workflows/publish.yaml",
    "content": "name: Publish\n\non:\n    push:\n        branches:\n            - master\n        tags:\n            - '*'\n    pull_request:\n        branches:\n            - master\n\njobs:\n    build:\n        name: Build\n        runs-on: ubuntu-latest\n\n        steps:\n            - name: Checkout\n              uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2\n\n            - name: Setup node env\n              uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0\n              with:\n                  node-version: 20\n\n            - name: Install dependencies\n              run: npm ci --no-audit\n\n            - name: Update version in package.json\n              run: |\n                  PACKAGE_JSON=$(jq --indent 4 \".version += \\\"-$GITHUB_SHA\\\"\" package.json)\n                  echo $PACKAGE_JSON > package.json\n\n            - name: Build\n              run: npm run build\n\n            - name: Prepare artifacts\n              run: |\n                  test -d dist\n                  mv dist jellyfin-chromecast\n                  zip -r \"jellyfin-chromecast.zip\" \"jellyfin-chromecast\"\n\n            - name: Upload artifacts\n              uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1\n              with:\n                  name: jellyfin-chromecast\n                  path: jellyfin-chromecast.zip\n                  if-no-files-found: error\n\n    publish:\n        name: Publish\n        runs-on: ubuntu-latest\n        if: ${{ contains(github.repository_owner, 'jellyfin') && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags')) }}\n        needs: [build]\n\n        steps:\n            - name: Set JELLYFIN_VERSION to git tag\n              if: ${{ startsWith(github.ref, 'refs/tags') }}\n              run: echo \"JELLYFIN_VERSION=$(echo ${GITHUB_REF#refs/tags/v} | tr / -)\" >> $GITHUB_ENV\n\n            - name: Set JELLYFIN_VERSION to unstable\n              if: ${{ github.ref == 'refs/heads/master' }}\n              run: echo \"JELLYFIN_VERSION=unstable\" >> $GITHUB_ENV\n\n            - name: Download artifact\n              uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1\n              with:\n                  name: jellyfin-chromecast\n\n            - name: Upload release archive to GitHub release\n              if: ${{ startsWith(github.ref, 'refs/tags') }}\n              uses: alexellis/upload-assets@13926a61cdb2cb35f5fdef1c06b8b591523236d3 # 0.4.1\n              env:\n                  GITHUB_TOKEN: ${{ secrets.JF_BOT_TOKEN }}\n              with:\n                  asset_paths: '[\"jellyfin-chromecast.zip\"]'\n\n            - name: Upload release archive to repo.jellyfin.org\n              uses: burnett01/rsync-deployments@33214bd98ba4ac2be90f5976672b3f030fce9ce4 # 7.1.0\n              with:\n                  switches: -vrptz\n                  path: jellyfin-chromecast.zip\n                  remote_path: /srv/incoming/chromecast/${{ env.JELLYFIN_VERSION }}/\n                  remote_host: ${{ secrets.REPO_HOST }}\n                  remote_user: ${{ secrets.REPO_USER }}\n                  remote_key: ${{ secrets.REPO_KEY }}\n\n            - name: Update repo.jellyfin.org symlinks\n              uses: appleboy/ssh-action@0ff4204d59e8e51228ff73bce53f80d53301dee2 # v1.2.5\n              with:\n                  host: ${{ secrets.REPO_HOST }}\n                  username: ${{ secrets.REPO_USER }}\n                  key: ${{ secrets.REPO_KEY }}\n                  envs: JELLYFIN_VERSION\n                  script_stop: true\n                  script: |\n                      if [ -d \"/srv/repository/main/client/chromecast/versions/${{ env.JELLYFIN_VERSION }}\" ] && [ -n \"${{ env.JELLYFIN_VERSION }}\" ]; then\n                        sudo rm -r /srv/repository/main/client/chromecast/versions/${{ env.JELLYFIN_VERSION }};\n                      fi\n                      sudo mv /srv/incoming/chromecast/${{ env.JELLYFIN_VERSION }} /srv/repository/main/client/chromecast/versions/${{ env.JELLYFIN_VERSION }};\n                      cd /srv/repository/main/client/chromecast;\n                      sudo rm -rf *.zip;\n                      sudo ln -s versions/${JELLYFIN_VERSION}/jellyfin-chromecast-${JELLYFIN_VERSION}.zip .;\n\n    deploy:\n        name: Deploy\n        runs-on: ubuntu-latest\n        if: ${{ contains(github.repository_owner, 'jellyfin') && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags')) }}\n        needs: [build]\n\n        steps:\n            - name: Download Artifact\n              uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1\n              with:\n                  name: jellyfin-chromecast\n\n            - name: Unzip artifact\n              run: unzip jellyfin-chromecast.zip -d .\n\n            - name: Deploy to apps.jellyfin.org/chromecast/unstable\n              uses: burnett01/rsync-deployments@33214bd98ba4ac2be90f5976672b3f030fce9ce4 # 7.1.0\n              with:\n                  switches: -vrptz\n                  path: jellyfin-chromecast/\n                  remote_path: /srv/chromecast/unstable/\n                  remote_host: ${{ secrets.DEPLOY_APPS_HOST }}\n                  remote_user: ${{ secrets.DEPLOY_APPS_USER }}\n                  remote_key: ${{ secrets.DEPLOY_APPS_KEY }}\n\n            - name: Deploy to apps.jellyfin.org/chromecast/stable\n              if: ${{ startsWith(github.ref, 'refs/tags') }}\n              uses: burnett01/rsync-deployments@33214bd98ba4ac2be90f5976672b3f030fce9ce4 # 7.1.0\n              with:\n                  switches: -vrptz\n                  path: jellyfin-chromecast/\n                  remote_path: /srv/chromecast/stable/\n                  remote_host: ${{ secrets.DEPLOY_APPS_HOST }}\n                  remote_user: ${{ secrets.DEPLOY_APPS_USER }}\n                  remote_key: ${{ secrets.DEPLOY_APPS_KEY }}\n"
  },
  {
    "path": ".github/workflows/test.yaml",
    "content": "name: Test\n\non:\n    push:\n        branches:\n            - master\n    pull_request:\n        branches:\n            - master\n\njobs:\n    jest:\n        name: Jest\n        runs-on: ubuntu-latest\n\n        steps:\n            - name: Checkout\n              uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2\n\n            - name: Setup node env\n              uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0\n              with:\n                  node-version: 20\n\n            - name: Install dependencies\n              run: npm ci --no-audit\n\n            - name: Run tests\n              run: npm run test\n"
  },
  {
    "path": ".gitignore",
    "content": "# ide\n.idea\ntags\n\n# npm/yarn\nnode_modules\ndist\nyarn-error.log\n"
  },
  {
    "path": ".npmrc",
    "content": "fund=false\n"
  },
  {
    "path": ".prettierrc.yaml",
    "content": "semi: true\nsingleQuote: true\ntabWidth: 4\ntrailingComma: none\n"
  },
  {
    "path": ".stylelintrc.json",
    "content": "{\n    \"extends\": [\"stylelint-config-standard\"],\n    \"rules\": {\n        \"selector-class-pattern\": null,\n        \"selector-id-pattern\": null\n    }\n}\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing\n\n## Development\n\n### Development Environment\n\nThe development environment is setup with editorconfig. Code style is enforced by prettier and eslint for Javascript/Typescript linting\n\n-   [editorconfig](https://editorconfig.org/)\n-   [prettier](https://prettier.io/)\n-   [eslint](https://eslint.org/)\n\n### Environment variables\n\n| name          | required | description                                               | default if not set |\n| ------------- | -------- | --------------------------------------------------------- | ------------------ |\n| RECEIVER_PORT | No       | The port used for the dev server when `npm start` is used | 9000               |\n\n### Building/Using\n\n`npm start` - Build a development version and start a dev server\n\n`npm run build` - Build a production version\n\n`npm run test` - Run tests\n\n`npm run lint` - Run linting and prettier\n\n1. Register a new [application](https://developers.google.com/cast/docs/registration). It is important that you choose a \"Custom application\", the rest of the details are up to you (name, description, etc). You will need a web server to host the files on.\n\n2.  Ensure that you can use this app:\n    #### For versions 10.8.x and earlier:\n    - Set up a local copy of [jellyfin-web](https://github.com/jellyfin/jellyfin-web).\n    - Change `applicationStable` and `applicationUnstable` in `jellyfin-web/src/plugins/chromecastPlayer/plugin.js` to your own application ID.\n    - Run the local copy of jellyfin-web using the provided instructions in the repo.\n    \n    #### For versions 10.9.x and beyond:\n    - Add your `CastReceiverApplication` `ID` and `Name` to the jellyfin `system.xml` in the `configuration` folder.\n    - Your custom hosted application is now available to select next to `stable` and `unstable`. From the client of your choice.\n\n5. Clone this repo and run `npm install`. This will install all dependencies, run tests and build a production build by default.\n6. Make changes and build with `npm run build`.\n7. Before pushing your changes, make sure to run `npm run test` and `npm run lint`.\n\n> NOTE: It is recommended to symlink the `dist` folder pointing to a location on your web server hosting the files. That way you can refresh the cast receiver via the Chrome Remote Debugger and see your changes without having to manually copy after each build.\n\n## Pull Requests\n\nThis project uses the standard Github Fork and PR flow\n"
  },
  {
    "path": "LICENSE.md",
    "content": "GNU GENERAL PUBLIC LICENSE\n                       Version 2, June 1991\n\n Copyright (C) 1989, 1991 Free Software Foundation, Inc.,\n 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n                            Preamble\n\n  The licenses for most software are designed to take away your\nfreedom to share and change it.  By contrast, the GNU General Public\nLicense is intended to guarantee your freedom to share and change free\nsoftware--to make sure the software is free for all its users.  This\nGeneral Public License applies to most of the Free Software\nFoundation's software and to any other program whose authors commit to\nusing it.  (Some other Free Software Foundation software is covered by\nthe GNU Lesser General Public License instead.)  You can apply it to\nyour programs, too.\n\n  When we speak of free software, we are referring to freedom, not\nprice.  Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthis service if you wish), that you receive source code or can get it\nif you want it, that you can change the software or use pieces of it\nin new free programs; and that you know you can do these things.\n\n  To protect your rights, we need to make restrictions that forbid\nanyone to deny you these rights or to ask you to surrender the rights.\nThese restrictions translate to certain responsibilities for you if you\ndistribute copies of the software, or if you modify it.\n\n  For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must give the recipients all the rights that\nyou have.  You must make sure that they, too, receive or can get the\nsource code.  And you must show them these terms so they know their\nrights.\n\n  We protect your rights with two steps: (1) copyright the software, and\n(2) offer you this license which gives you legal permission to copy,\ndistribute and/or modify the software.\n\n  Also, for each author's protection and ours, we want to make certain\nthat everyone understands that there is no warranty for this free\nsoftware.  If the software is modified by someone else and passed on, we\nwant its recipients to know that what they have is not the original, so\nthat any problems introduced by others will not reflect on the original\nauthors' reputations.\n\n  Finally, any free program is threatened constantly by software\npatents.  We wish to avoid the danger that redistributors of a free\nprogram will individually obtain patent licenses, in effect making the\nprogram proprietary.  To prevent this, we have made it clear that any\npatent must be licensed for everyone's free use or not licensed at all.\n\n  The precise terms and conditions for copying, distribution and\nmodification follow.\n\n                    GNU GENERAL PUBLIC LICENSE\n   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION\n\n  0. This License applies to any program or other work which contains\na notice placed by the copyright holder saying it may be distributed\nunder the terms of this General Public License.  The \"Program\", below,\nrefers to any such program or work, and a \"work based on the Program\"\nmeans either the Program or any derivative work under copyright law:\nthat is to say, a work containing the Program or a portion of it,\neither verbatim or with modifications and/or translated into another\nlanguage.  (Hereinafter, translation is included without limitation in\nthe term \"modification\".)  Each licensee is addressed as \"you\".\n\nActivities other than copying, distribution and modification are not\ncovered by this License; they are outside its scope.  The act of\nrunning the Program is not restricted, and the output from the Program\nis covered only if its contents constitute a work based on the\nProgram (independent of having been made by running the Program).\nWhether that is true depends on what the Program does.\n\n  1. You may copy and distribute verbatim copies of the Program's\nsource code as you receive it, in any medium, provided that you\nconspicuously and appropriately publish on each copy an appropriate\ncopyright notice and disclaimer of warranty; keep intact all the\nnotices that refer to this License and to the absence of any warranty;\nand give any other recipients of the Program a copy of this License\nalong with the Program.\n\nYou may charge a fee for the physical act of transferring a copy, and\nyou may at your option offer warranty protection in exchange for a fee.\n\n  2. You may modify your copy or copies of the Program or any portion\nof it, thus forming a work based on the Program, and copy and\ndistribute such modifications or work under the terms of Section 1\nabove, provided that you also meet all of these conditions:\n\n    a) You must cause the modified files to carry prominent notices\n    stating that you changed the files and the date of any change.\n\n    b) You must cause any work that you distribute or publish, that in\n    whole or in part contains or is derived from the Program or any\n    part thereof, to be licensed as a whole at no charge to all third\n    parties under the terms of this License.\n\n    c) If the modified program normally reads commands interactively\n    when run, you must cause it, when started running for such\n    interactive use in the most ordinary way, to print or display an\n    announcement including an appropriate copyright notice and a\n    notice that there is no warranty (or else, saying that you provide\n    a warranty) and that users may redistribute the program under\n    these conditions, and telling the user how to view a copy of this\n    License.  (Exception: if the Program itself is interactive but\n    does not normally print such an announcement, your work based on\n    the Program is not required to print an announcement.)\n\nThese requirements apply to the modified work as a whole.  If\nidentifiable sections of that work are not derived from the Program,\nand can be reasonably considered independent and separate works in\nthemselves, then this License, and its terms, do not apply to those\nsections when you distribute them as separate works.  But when you\ndistribute the same sections as part of a whole which is a work based\non the Program, the distribution of the whole must be on the terms of\nthis License, whose permissions for other licensees extend to the\nentire whole, and thus to each and every part regardless of who wrote it.\n\nThus, it is not the intent of this section to claim rights or contest\nyour rights to work written entirely by you; rather, the intent is to\nexercise the right to control the distribution of derivative or\ncollective works based on the Program.\n\nIn addition, mere aggregation of another work not based on the Program\nwith the Program (or with a work based on the Program) on a volume of\na storage or distribution medium does not bring the other work under\nthe scope of this License.\n\n  3. You may copy and distribute the Program (or a work based on it,\nunder Section 2) in object code or executable form under the terms of\nSections 1 and 2 above provided that you also do one of the following:\n\n    a) Accompany it with the complete corresponding machine-readable\n    source code, which must be distributed under the terms of Sections\n    1 and 2 above on a medium customarily used for software interchange; or,\n\n    b) Accompany it with a written offer, valid for at least three\n    years, to give any third party, for a charge no more than your\n    cost of physically performing source distribution, a complete\n    machine-readable copy of the corresponding source code, to be\n    distributed under the terms of Sections 1 and 2 above on a medium\n    customarily used for software interchange; or,\n\n    c) Accompany it with the information you received as to the offer\n    to distribute corresponding source code.  (This alternative is\n    allowed only for noncommercial distribution and only if you\n    received the program in object code or executable form with such\n    an offer, in accord with Subsection b above.)\n\nThe source code for a work means the preferred form of the work for\nmaking modifications to it.  For an executable work, complete source\ncode means all the source code for all modules it contains, plus any\nassociated interface definition files, plus the scripts used to\ncontrol compilation and installation of the executable.  However, as a\nspecial exception, the source code distributed need not include\nanything that is normally distributed (in either source or binary\nform) with the major components (compiler, kernel, and so on) of the\noperating system on which the executable runs, unless that component\nitself accompanies the executable.\n\nIf distribution of executable or object code is made by offering\naccess to copy from a designated place, then offering equivalent\naccess to copy the source code from the same place counts as\ndistribution of the source code, even though third parties are not\ncompelled to copy the source along with the object code.\n\n  4. You may not copy, modify, sublicense, or distribute the Program\nexcept as expressly provided under this License.  Any attempt\notherwise to copy, modify, sublicense or distribute the Program is\nvoid, and will automatically terminate your rights under this License.\nHowever, parties who have received copies, or rights, from you under\nthis License will not have their licenses terminated so long as such\nparties remain in full compliance.\n\n  5. You are not required to accept this License, since you have not\nsigned it.  However, nothing else grants you permission to modify or\ndistribute the Program or its derivative works.  These actions are\nprohibited by law if you do not accept this License.  Therefore, by\nmodifying or distributing the Program (or any work based on the\nProgram), you indicate your acceptance of this License to do so, and\nall its terms and conditions for copying, distributing or modifying\nthe Program or works based on it.\n\n  6. Each time you redistribute the Program (or any work based on the\nProgram), the recipient automatically receives a license from the\noriginal licensor to copy, distribute or modify the Program subject to\nthese terms and conditions.  You may not impose any further\nrestrictions on the recipients' exercise of the rights granted herein.\nYou are not responsible for enforcing compliance by third parties to\nthis License.\n\n  7. If, as a consequence of a court judgment or allegation of patent\ninfringement or for any other reason (not limited to patent issues),\nconditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License.  If you cannot\ndistribute so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you\nmay not distribute the Program at all.  For example, if a patent\nlicense would not permit royalty-free redistribution of the Program by\nall those who receive copies directly or indirectly through you, then\nthe only way you could satisfy both it and this License would be to\nrefrain entirely from distribution of the Program.\n\nIf any portion of this section is held invalid or unenforceable under\nany particular circumstance, the balance of the section is intended to\napply and the section as a whole is intended to apply in other\ncircumstances.\n\nIt is not the purpose of this section to induce you to infringe any\npatents or other property right claims or to contest validity of any\nsuch claims; this section has the sole purpose of protecting the\nintegrity of the free software distribution system, which is\nimplemented by public license practices.  Many people have made\ngenerous contributions to the wide range of software distributed\nthrough that system in reliance on consistent application of that\nsystem; it is up to the author/donor to decide if he or she is willing\nto distribute software through any other system and a licensee cannot\nimpose that choice.\n\nThis section is intended to make thoroughly clear what is believed to\nbe a consequence of the rest of this License.\n\n  8. If the distribution and/or use of the Program is restricted in\ncertain countries either by patents or by copyrighted interfaces, the\noriginal copyright holder who places the Program under this License\nmay add an explicit geographical distribution limitation excluding\nthose countries, so that distribution is permitted only in or among\ncountries not thus excluded.  In such case, this License incorporates\nthe limitation as if written in the body of this License.\n\n  9. The Free Software Foundation may publish revised and/or new versions\nof the General Public License from time to time.  Such new versions will\nbe similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\nEach version is given a distinguishing version number.  If the Program\nspecifies a version number of this License which applies to it and \"any\nlater version\", you have the option of following the terms and conditions\neither of that version or of any later version published by the Free\nSoftware Foundation.  If the Program does not specify a version number of\nthis License, you may choose any version ever published by the Free Software\nFoundation.\n\n  10. If you wish to incorporate parts of the Program into other free\nprograms whose distribution conditions are different, write to the author\nto ask for permission.  For software which is copyrighted by the Free\nSoftware Foundation, write to the Free Software Foundation; we sometimes\nmake exceptions for this.  Our decision will be guided by the two goals\nof preserving the free status of all derivatives of our free software and\nof promoting the sharing and reuse of software generally.\n\n                            NO WARRANTY\n\n  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY\nFOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN\nOTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES\nPROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED\nOR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\nMERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS\nTO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE\nPROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,\nREPAIR OR CORRECTION.\n\n  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR\nREDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,\nINCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING\nOUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED\nTO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY\nYOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER\nPROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGES.\n\n                     END OF TERMS AND CONDITIONS\n\n            How to Apply These Terms to Your New Programs\n\n  If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n  To do so, attach the following notices to the program.  It is safest\nto attach them to the start of each source file to most effectively\nconvey the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n    {{description}}\n    Copyright (C) {{year}}  {{fullname}}\n\n    This program is free software; you can redistribute it and/or modify\n    it under the terms of the GNU General Public License as published by\n    the Free Software Foundation; either version 2 of the License, or\n    (at your option) any later version.\n\n    This program is distributed in the hope that it will be useful,\n    but WITHOUT ANY WARRANTY; without even the implied warranty of\n    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n    GNU General Public License for more details.\n\n    You should have received a copy of the GNU General Public License along\n    with this program; if not, write to the Free Software Foundation, Inc.,\n    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n\nAlso add information on how to contact you by electronic and paper mail.\n\nIf the program is interactive, make it output a short notice like this\nwhen it starts in an interactive mode:\n\n    Gnomovision version 69, Copyright (C) year name of author\n    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n    This is free software, and you are welcome to redistribute it\n    under certain conditions; type `show c' for details.\n\nThe hypothetical commands `show w' and `show c' should show the appropriate\nparts of the General Public License.  Of course, the commands you use may\nbe called something other than `show w' and `show c'; they could even be\nmouse-clicks or menu items--whatever suits your program.\n\nYou should also get your employer (if you work as a programmer) or your\nschool, if any, to sign a \"copyright disclaimer\" for the program, if\nnecessary.  Here is a sample; alter the names:\n\n  Yoyodyne, Inc., hereby disclaims all copyright interest in the program\n  `Gnomovision' (which makes passes at compilers) written by James Hacker.\n\n  {signature of Ty Coon}, 1 April 1989\n  Ty Coon, President of Vice\n\nThis General Public License does not permit incorporating your program into\nproprietary programs.  If your program is a subroutine library, you may\nconsider it more useful to permit linking proprietary applications with the\nlibrary.  If this is what you want to do, use the GNU Lesser General\nPublic License instead of this License.\n"
  },
  {
    "path": "README.md",
    "content": "<h1 align=\"center\">Jellyfin Cast Web Receiver</h1>\n<h3 align=\"center\">Part of the <a href=\"https://jellyfin.org\">Jellyfin Project</a></h3>\n\n<p align=\"center\">\n<img alt=\"Logo Banner\" src=\"https://raw.githubusercontent.com/jellyfin/jellyfin-ux/master/branding/SVG/banner-logo-solid.svg?sanitize=true\"/>\n<br/>\n<br/>\n<a href=\"https://github.com/jellyfin/jellyfin-chromecast\">\n<img alt=\"GPL 2.0 License\" src=\"https://img.shields.io/github/license/jellyfin/jellyfin-chromecast.svg\"/>\n</a>\n<a href=\"https://github.com/jellyfin/jellyfin-chromecast/releases\">\n<img alt=\"Current Release\" src=\"https://img.shields.io/github/release/jellyfin/jellyfin-chromecast.svg\"/>\n</a>\n</p>\n\nThe Jellyfin Cast Web Receiver is the frontend used when casting to a Google Cast capable device. It is used by default when casting from the Jellyfin Android app or Jellyfin web client.\n\n### How do I use it?\n\nA `stable` and `unstable` version of this app are already included in the Jellyfin server. There is no need to separately install this project. To host your own version (for developing) see `CONTRIBUTING.md`.\n\nThe `stable` version is the latest released version. `unstable` is updated automatically from the `master` branch.\n\n### What does it do?\n\nThis is a `web receiver` as defined in the [Google Cast architecture](https://developers.google.com/cast/docs/overview).\n\nAs soon as you press the \"cast\" button on your client this application will start on you cast-capable device and handle playback functionality. \n\n### What doesn't it do?\n\nAnything related to your non-cast device (e.g. your phone, browser, other device) or anything about the inclusion of casting for a specific client (e.g. casting from the iOS app).\n\nAny issues/features related to that: check the respective repository.\n\n### Something not working right?\n\nFirst check if the issue is actually Google Cast related. So answer the question:\n\n`\"Can I reproduce the issue on any other way then when casting to a Google Cast capable device?\"`\n\nIf yes: The issue probably lies somewhere else. \nIf no: [Open an issue on GitHub](https://github.com/jellyfin/jellyfin-chromecast/issues/new/choose).\n\n### Testing\n\nJellyfin allows switching between a `stable` and `unstable` version of the client. Go the client of your choice and: `user` -> `settings` -> `playback` -> `Google Cast version`.\n\nNote that this setting is set per-user.\n"
  },
  {
    "path": "eslint.config.mjs",
    "content": "import jsdoc from 'eslint-plugin-jsdoc';\nimport promise from 'eslint-plugin-promise';\nimport importPlugin from 'eslint-plugin-import';\nimport globals from 'globals';\nimport eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended';\nimport eslint from '@eslint/js';\nimport tseslint from 'typescript-eslint';\nimport json from 'eslint-plugin-json';\n\nexport default [\n    eslint.configs.recommended,\n    jsdoc.configs['flat/recommended'],\n    eslintPluginPrettierRecommended,\n    ...tseslint.configs.strict,\n    ...tseslint.configs.stylisticTypeChecked,\n    promise.configs['flat/recommended'],\n    importPlugin.flatConfigs.errors,\n    importPlugin.flatConfigs.warnings,\n    {\n        ignores: ['dist/*']\n    },\n    {\n        settings: {\n            'import/resolver': {\n                typescript: {\n                    alwaysTryTypes: true\n                }\n            }\n        }\n    },\n    {\n        files: ['**/*.json'],\n        ...json.configs['recommended'],\n        ...tseslint.configs.disableTypeChecked\n    },\n    {\n        files: ['**/*.ts'],\n        ...importPlugin.flatConfigs.typescript,\n        languageOptions: {\n            parser: tseslint.parser\n        }\n    },\n    {\n        files: ['eslint.config.mjs'],\n        ...tseslint.configs.disableTypeChecked\n    },\n    {\n        files: ['**/*.ts', '**/*.js'],\n        languageOptions: {\n            globals: {\n                ...globals.browser,\n                ...globals.es2015\n            },\n            parserOptions: {\n                projectService: true,\n                tsconfigRootDir: import.meta.dirname\n            }\n        },\n        rules: {\n            '@typescript-eslint/explicit-function-return-type': 'error',\n            '@typescript-eslint/no-explicit-any': 'warn',\n            '@typescript-eslint/no-unnecessary-type-assertion': 'error',\n            '@typescript-eslint/no-unused-expressions': 'warn',\n            '@typescript-eslint/no-unused-vars': 'error',\n            '@typescript-eslint/prefer-ts-expect-error': 'error',\n            curly: 'error',\n            'import/newline-after-import': 'error',\n            'import/order': 'error',\n            'jsdoc/check-indentation': 'error',\n            'jsdoc/check-param-names': 'error',\n            'jsdoc/check-property-names': 'error',\n            'jsdoc/check-syntax': 'error',\n            'jsdoc/check-tag-names': 'error',\n            'jsdoc/no-types': 'error',\n            'jsdoc/require-description': 'warn',\n            'jsdoc/require-hyphen-before-param-description': 'error',\n            'jsdoc/require-jsdoc': 'error',\n            'jsdoc/require-param-description': 'warn',\n            //TypeScript and IntelliSense already provides us information about the function typings while hovering and\n            // eslint-jsdoc doesn't detect a mismatch between what's declared in the function and what's declared in\n            // JSDOC.\n            'jsdoc/require-param-type': 'off',\n            'jsdoc/require-returns-type': 'off',\n            'jsdoc/valid-types': 'off',\n            'padding-line-between-statements': [\n                'error',\n                // Always require blank lines after directives (like 'use-strict'), except between directives\n                { blankLine: 'always', next: '*', prev: 'directive' },\n                { blankLine: 'any', next: 'directive', prev: 'directive' },\n                // Always require blank lines after import, except between imports\n                { blankLine: 'always', next: '*', prev: 'import' },\n                { blankLine: 'any', next: 'import', prev: 'import' },\n                // Always require blank lines before and after every sequence of variable declarations and export\n                {\n                    blankLine: 'always',\n                    next: ['const', 'let', 'var', 'export'],\n                    prev: '*'\n                },\n                {\n                    blankLine: 'always',\n                    next: '*',\n                    prev: ['const', 'let', 'var', 'export']\n                },\n                {\n                    blankLine: 'any',\n                    next: ['const', 'let', 'var', 'export'],\n                    prev: ['const', 'let', 'var', 'export']\n                },\n                // Always require blank lines before and after class declaration, if, do/while, switch, try\n                {\n                    blankLine: 'always',\n                    next: [\n                        'if',\n                        'class',\n                        'for',\n                        'do',\n                        'while',\n                        'switch',\n                        'try'\n                    ],\n                    prev: '*'\n                },\n                {\n                    blankLine: 'always',\n                    next: '*',\n                    prev: ['if', 'class', 'for', 'do', 'while', 'switch', 'try']\n                },\n                // Always require blank lines before return statements\n                { blankLine: 'always', next: 'return', prev: '*' }\n            ],\n            'prefer-arrow-callback': 'error',\n            'prefer-template': 'error',\n            'promise/no-nesting': 'error',\n            'promise/no-return-in-finally': 'error',\n            'promise/prefer-await-to-callbacks': 'error',\n            'promise/prefer-await-to-then': 'error',\n            'sort-keys': [\n                'error',\n                'asc',\n                { caseSensitive: false, minKeys: 2, natural: true }\n            ],\n            'sort-vars': 'error'\n        }\n    },\n    {\n        files: ['*.js'],\n        languageOptions: {\n            globals: {\n                ...globals.node\n            }\n        }\n    }\n];\n"
  },
  {
    "path": "package.json",
    "content": "{\n    \"name\": \"jellyfin-chromecast\",\n    \"description\": \"Cast receiver for Jellyfin\",\n    \"version\": \"3.0.0\",\n    \"type\": \"module\",\n    \"bugs\": {\n        \"url\": \"https://github.com/jellyfin/jellyfin-chromecast/issues\"\n    },\n    \"dependencies\": {\n        \"@jellyfin/sdk\": \"0.12.0\"\n    },\n    \"devDependencies\": {\n        \"@types/chromecast-caf-receiver\": \"6.0.26\",\n        \"@types/node\": \"24.12.2\",\n        \"eslint\": \"9.39.4\",\n        \"eslint-config-prettier\": \"10.1.8\",\n        \"eslint-import-resolver-typescript\": \"4.4.4\",\n        \"eslint-plugin-import\": \"2.32.0\",\n        \"eslint-plugin-jsdoc\": \"61.7.1\",\n        \"eslint-plugin-json\": \"4.0.1\",\n        \"eslint-plugin-prettier\": \"5.5.5\",\n        \"eslint-plugin-promise\": \"7.3.0\",\n        \"prettier\": \"3.6.2\",\n        \"stylelint\": \"16.26.1\",\n        \"stylelint-config-standard\": \"39.0.1\",\n        \"typescript\": \"6.0.3\",\n        \"typescript-eslint\": \"8.59.1\",\n        \"vite\": \"8.0.9\",\n        \"vitest\": \"4.1.5\"\n    },\n    \"homepage\": \"https://jellyfin.org/\",\n    \"license\": \"GPL-2.0-or-later\",\n    \"repository\": {\n        \"type\": \"git\",\n        \"url\": \"git+https://github.com/jellyfin/jellyfin-chromecast.git\"\n    },\n    \"scripts\": {\n        \"start\": \"vite\",\n        \"build\": \"vite build\",\n        \"test\": \"vitest\",\n        \"lint\": \"npm run lint:code && npm run lint:ts && npm run lint:css\",\n        \"lint:code\": \"eslint\",\n        \"lint:ts\": \"tsc --noEmit\",\n        \"lint:css\": \"stylelint src/**/*.css\"\n    }\n}\n"
  },
  {
    "path": "renovate.json",
    "content": "{\n    \"$schema\": \"https://docs.renovatebot.com/renovate-schema.json\",\n    \"extends\": [\n        \"github>jellyfin/.github//renovate-presets/nodejs\",\n        \":dependencyDashboard\"\n    ]\n}\n"
  },
  {
    "path": "src/app.ts",
    "content": "import { RepeatMode } from '@jellyfin/sdk/lib/generated-client/models/repeat-mode';\n\nimport './components/maincontroller';\nimport './css/jellyfin.css';\n\nwindow.mediaElement = document.getElementById('video-player');\n\nwindow.repeatMode = RepeatMode.RepeatNone;\n"
  },
  {
    "path": "src/components/__tests__/jellyfinApi.test.ts",
    "content": "import { describe, beforeAll, beforeEach, test, expect } from 'vitest';\nimport { JellyfinApi } from '../jellyfinApi';\n\nconst setupMockCastSenders = (): void => {\n    const getSenders = (): any[] => [{ id: 'thisIsSenderId' }]; // eslint-disable-line @typescript-eslint/no-explicit-any\n    const getInstance = (): any => ({ getSenders }); // eslint-disable-line @typescript-eslint/no-explicit-any\n\n    // @ts-expect-error cast is already defined globally, however since we're mocking it we need to redefine it.\n    global.cast = {\n        framework: {\n            CastReceiverContext: {\n                getInstance\n            }\n        }\n    };\n};\n\ndescribe('creating basic urls', () => {\n    beforeAll(() => {\n        setupMockCastSenders();\n    });\n\n    beforeEach(() => {\n        JellyfinApi.setServerInfo('thisIsAccessToken', 'thisIsServerAddress');\n    });\n\n    test('should return correct url', () => {\n        const result = JellyfinApi.createUrl('somePath');\n        const correct = 'thisIsServerAddress/somePath';\n\n        expect(result).toEqual(correct);\n    });\n\n    test('should remove leading slashes', () => {\n        const result = JellyfinApi.createUrl('///////somePath');\n        const correct = 'thisIsServerAddress/somePath';\n\n        expect(result).toEqual(correct);\n    });\n\n    test('should return empty string on undefined serverAddress', () => {\n        JellyfinApi.setServerInfo();\n\n        const result = JellyfinApi.createUrl('somePath');\n        const correct = '';\n\n        expect(result).toEqual(correct);\n    });\n});\n\ndescribe('creating image urls', () => {\n    beforeAll(() => {\n        setupMockCastSenders();\n    });\n\n    beforeEach(() => {\n        JellyfinApi.setServerInfo('thisIsAccessToken', 'thisIsServerAddress');\n    });\n\n    test('should return correct url with all parameters provided', () => {\n        const itemId = '1';\n        const imageType = 'Primary';\n        const imageTag = 'sampleTag';\n        const imdIdx = 0;\n\n        const result = JellyfinApi.createImageUrl(\n            itemId,\n            imageType,\n            imageTag,\n            imdIdx\n        );\n        const correct = `thisIsServerAddress/Items/${itemId}/Images/${imageType}/${imdIdx.toString()}?tag=${imageTag}`;\n\n        expect(result).toEqual(correct);\n    });\n\n    test('should return correct url with minimal parameters provided', () => {\n        const itemId = '1';\n        const imageType = 'Primary';\n        const imageTag = 'sampleTag';\n        const imdIdx = 0;\n\n        const result = JellyfinApi.createImageUrl(itemId, imageType, imageTag);\n        const correct = `thisIsServerAddress/Items/${itemId}/Images/${imageType}/${imdIdx.toString()}?tag=${imageTag}`;\n\n        expect(result).toEqual(correct);\n    });\n\n    test('should return empty string on undefined serverAddress', () => {\n        JellyfinApi.setServerInfo();\n\n        const result = JellyfinApi.createImageUrl('', '', '');\n        const correct = '';\n\n        expect(result).toEqual(correct);\n    });\n});\n"
  },
  {
    "path": "src/components/codecSupportHelper.ts",
    "content": "import { VideoRangeType } from '@jellyfin/sdk/lib/generated-client';\n\nconst castContext = cast.framework.CastReceiverContext.getInstance();\n\n/**\n * Converts a codec string to the appropriate MIME type to use for testing support.\n * @param codec - The codec in question.\n * @returns The MIME type to use for testing support.\n */\nfunction videoCodecToMimeType(codec: VideoCodec): string {\n    switch (codec) {\n        case VideoCodec.H264:\n        case VideoCodec.H265:\n            return 'video/mp4';\n        case VideoCodec.VP8:\n        case VideoCodec.VP9:\n        case VideoCodec.AV1:\n            return 'video/webm';\n    }\n}\n\n/**\n * Get the string to use for testing support of a codec.\n * @param codec - The codec in question.\n * @param profile - The profile for the codec.\n * @param level - The level for the codec.\n * @param bitDepth - The bit depth of the video.\n * @returns The string to use for testing support of the codec.\n */\nfunction getCodecString(\n    codec: VideoCodec,\n    profile?: string,\n    level?: number,\n    bitDepth?: number\n): string {\n    switch (codec) {\n        case VideoCodec.H264: {\n            // Default to the oldest baseline profile.\n            profile = profile ?? 'baseline';\n\n            let profileFlag: string;\n\n            switch (profile) {\n                case 'high 10':\n                    profileFlag = '6e00';\n                    break;\n                case 'high':\n                    profileFlag = '6400';\n                    break;\n                case 'main':\n                    profileFlag = '4d00';\n                    break;\n                case 'constrained baseline':\n                    profileFlag = '4240';\n                    break;\n                case 'baseline':\n                default:\n                    profileFlag = '4200';\n                    break;\n            }\n\n            // Levels are bound by max frame size (macroblocks) and decoding\n            // speed (macroblocks/s).\n            // A macroblock is 16x16 pixels.\n            //\n            // See:\n            //   * https://en.wikipedia.org/wiki/Advanced_Video_Coding#Levels\n            level = level ?? 10;\n\n            const levelFlag = level.toString(16).padStart(2, '0');\n\n            return `avc1.${profileFlag}${levelFlag}`;\n        }\n        case VideoCodec.H265: {\n            let profileFlag: string;\n            let constraintFlag: number;\n\n            switch (profile) {\n                case 'main 10':\n                    profileFlag = 'L';\n                    constraintFlag = 4;\n                    break;\n                case 'high':\n                    profileFlag = 'H';\n                    constraintFlag = 4;\n                    break;\n                case 'high 10':\n                    profileFlag = 'H';\n                    constraintFlag = 4;\n                    break;\n                case 'main':\n                default:\n                    profileFlag = 'L';\n                    constraintFlag = 0;\n                    break;\n            }\n\n            // Levels are bound by the luma picture size (total pixels) and\n            // luma sample rate (samples/s).\n            level = level ?? 30;\n\n            return `hev1.1.${constraintFlag}.${profileFlag}${level}.B0`;\n        }\n        case VideoCodec.VP8:\n            return 'vp8';\n        case VideoCodec.VP9: {\n            let profileFlag: string;\n\n            switch (profile?.toLowerCase()) {\n                case 'profile 1':\n                    profileFlag = '01';\n                    break;\n                case 'profile 2':\n                    profileFlag = '02';\n                    break;\n                case 'profile 3':\n                    profileFlag = '03';\n                    break;\n                case 'profile 0':\n                default:\n                    profileFlag = '00';\n                    break;\n            }\n\n            level = level ?? 1.0;\n            bitDepth = bitDepth ?? 8;\n\n            const bitDepthFlag = bitDepth.toString().padStart(2, '0');\n\n            return `vp09.${profileFlag}.${level * 10}.${bitDepthFlag}`;\n        }\n        case VideoCodec.AV1: {\n            let profileFlag: string;\n\n            switch (profile?.toLowerCase()) {\n                case 'high':\n                    profileFlag = '1';\n                    break;\n                case 'professional':\n                    profileFlag = '2';\n                    break;\n                case 'main':\n                default:\n                    profileFlag = '0';\n                    break;\n            }\n\n            // This level should correspond to the `seq_level_idx`.\n            level = level ?? 0;\n            bitDepth = bitDepth ?? 8;\n\n            const levelFlag = level.toString().padStart(2, '0');\n            const bitDepthFlag = bitDepth.toString().padStart(2, '0');\n\n            // Assume main tier, since the condition language has no way to\n            // express that.\n            return `av01.${profileFlag}.${levelFlag}M.${bitDepthFlag}`;\n        }\n    }\n}\n\n/**\n * Utility class representing a video resolution.\n */\nexport class Resolution {\n    public width: number;\n    public height: number;\n    constructor(width: number, height: number) {\n        this.width = width;\n        this.height = height;\n    }\n\n    public equals(other: Resolution): boolean {\n        return this.width === other.width && this.height === other.height;\n    }\n}\n\n/**\n * Known video codecs\n */\nexport enum VideoCodec {\n    H264 = 'h264',\n    H265 = 'hevc',\n    VP8 = 'vp8',\n    VP9 = 'vp9',\n    AV1 = 'av1'\n}\n\n/**\n * Checks if there is E-AC-3 support.\n * This check returns in line with the cast settings made in Google Home.\n * If the device is in auto, EDID information will be used, otherwise it\n * depends on the manual setting.\n *\n * Currently it's disabled because of problems getting it to work with HLS.\n * @returns true if E-AC-3 can be played\n */\nexport function hasEAC3Support(): boolean {\n    //return castContext.canDisplayType('audio/mp4', 'ec-3');\n    return false;\n}\n\n/**\n * Checks if there is AC-3 support.\n * This check returns in line with the cast settings made in Google Home.\n * If the device is in auto, EDID information will be used, otherwise it\n * depends on the manual setting.\n *\n * Currently it's disabled because of problems getting it to work with HLS.\n * @returns true if AC-3 can be played\n */\nexport function hasAC3Support(): boolean {\n    //return castContext.canDisplayType('audio/mp4', 'ac-3');\n    return false;\n}\n\n/**\n * Checks for every supported video codec.\n * @returns An array of supported video codecs\n */\nexport function getSupportedVideoCodecs(): VideoCodec[] {\n    const supportedVideoCodecs: VideoCodec[] = [];\n\n    for (const videoCodec of Object.values(VideoCodec)) {\n        if (hasVideoCodecSupport(videoCodec)) {\n            supportedVideoCodecs.push(videoCodec);\n        }\n    }\n\n    return supportedVideoCodecs;\n}\n\n/**\n * Check if the device has any video support.\n * @returns `true` if the device can display video.\n */\nexport function hasVideoSupport(): boolean {\n    const deviceCaps = castContext.getDeviceCapabilities();\n\n    return deviceCaps?.[\n        cast.framework.system.DeviceCapabilities.DISPLAY_SUPPORTED\n    ];\n}\n\n/**\n * Gets whether the particular codec is supported.\n * @param codec - The codec in question\n * @returns `true` if the codec is supported.\n */\nexport function hasVideoCodecSupport(codec: VideoCodec): boolean {\n    const mimeType = videoCodecToMimeType(codec);\n    const codecString = getCodecString(codec);\n\n    return castContext.canDisplayType(mimeType, codecString);\n}\n\n/**\n * Get the supported video ranges for a given codec profile and level.\n * @param codec - The codec in question.\n * @param profile - The profile in question.\n * @param level - The level in question.\n * @returns A set of supported video ranges.\n */\nexport function getVideoRangeSupport(\n    codec: VideoCodec,\n    profile: string,\n    level: number\n): Set<VideoRangeType> {\n    const supportedRanges = new Set<VideoRangeType>([VideoRangeType.Sdr]);\n\n    profile = profile.toLowerCase();\n\n    const mimeType = videoCodecToMimeType(codec);\n    const codecString = getCodecString(codec, profile, level, 10);\n\n    switch (codec) {\n        case VideoCodec.H265: {\n            if (profile !== 'main 10' && profile !== 'high 10') {\n                break;\n            }\n\n            // HEVC vs. DoVi levels and max pixel rate (luma sample rate)\n            // +------------+---------------+------------+---------------+\n            // | HEVC Level | HEVC Max PPS  | DoVi Level | DoVi Max PPS  |\n            // +------------+---------------+------------+---------------+\n            // | 3.0        | 16_588_800    | 01         | 22_118_400    |\n            // | 3.1        | 33_177_600    | 03         | 49_766_400    |\n            // | 4.0        | 66_846_720    | 04         | 124_416_000   |\n            // | 4.1        | 133_693_440   | 06         | 199_065_600   |\n            // | 5.0        | 267_386_880   | 07         | 248_832_000   |\n            // | 5.1        | 534_773_760   | 10         | 995_328_000   |\n            // | 6.0        | 1_069_547_520 | 11         | 1_990_656_000 |\n            // | 6.1        | 2_139_095_040 | 13         | 3_981_312_000 |\n            // +------------+---------------+------------+---------------+\n            const doviLevel = ((): string => {\n                if (level <= 30 * 3) {\n                    return '01';\n                } else if (level <= 31 * 3) {\n                    return '03';\n                } else if (level <= 40 * 3) {\n                    return '04';\n                } else if (level <= 41 * 3) {\n                    return '06';\n                } else if (level <= 50 * 3) {\n                    return '07';\n                } else if (level <= 51 * 3) {\n                    return '10';\n                } else if (level <= 60 * 3) {\n                    return '11';\n                } else {\n                    return '13';\n                }\n            })();\n\n            if (castContext.canDisplayType(mimeType, `dvhe.05.${doviLevel}`)) {\n                supportedRanges.add(VideoRangeType.Dovi);\n            }\n\n            if (castContext.canDisplayType(mimeType, `dvhe.08.${doviLevel}`)) {\n                supportedRanges.add(VideoRangeType.DoviWithSdr);\n                supportedRanges.add(VideoRangeType.DoviWithHlg);\n                supportedRanges.add(VideoRangeType.DoviWithHdr10);\n            }\n\n            break;\n        }\n        case VideoCodec.AV1: {\n            // AV1 vs. DoVi levels and max pixel rate (luma sample rate)\n            // +-------------------+---------------+------------+---------------+\n            // | AV1 seq_level_idx | AV1 Max PPS   | DoVi Level | DoVi Max PPS  |\n            // +-------------------+---------------+------------+---------------+\n            // | 4                 | 19_975_680    | 01         | 22_118_400    |\n            // | 5                 | 31_950_720    | 03         | 49_766_400    |\n            // | 8                 | 70_778_880    | 04         | 124_416_000   |\n            // | 9                 | 141_557_760   | 06         | 199_065_600   |\n            // | 12                | 267_386_880   | 07         | 248_832_000   |\n            // | 13                | 534_773_760   | 10         | 995_328_000   |\n            // | 16                | 1_069_547_520 | 11         | 1_990_656_000 |\n            // | 6.1               | 2_139_095_040 | 13         | 3_981_312_000 |\n            // +-------------------+---------------+------------+---------------+\n            const doviLevel = ((): string => {\n                if (level <= 4) {\n                    return '01';\n                } else if (level <= 5) {\n                    return '03';\n                } else if (level <= 8) {\n                    return '04';\n                } else if (level <= 9) {\n                    return '06';\n                } else if (level <= 12) {\n                    return '07';\n                } else if (level <= 13) {\n                    return '10';\n                } else if (level <= 16) {\n                    return '11';\n                } else {\n                    return '13';\n                }\n            })();\n\n            // 110: Chroma subsampling (4:2:0), not Monochrome\n            // 09: Color Primary (BT.2020)\n            // 16: Transfer Characteristics (PQ, SMPTE ST 2084)\n            // 09: Matrix Coefficients (BT.2020 non-constant luminance)\n            // 0: Studio swing representation\n            const hasHdr10Support = castContext.canDisplayType(\n                mimeType,\n                `${codecString}.110.09.16.09.0`\n            );\n\n            if (hasHdr10Support) {\n                supportedRanges.add(VideoRangeType.Hdr10);\n                supportedRanges.add(VideoRangeType.Hdr10Plus);\n            }\n\n            // 110: Chroma subsampling (4:2:0), not Monochrome\n            // 09: Color Primary (BT.2020)\n            // 18: Transfer Characteristics (BT.2100 HLG, ARIB STD-B67)\n            // 09: Matrix Coefficients (BT.2020 non-constant luminance)\n            // 0: Studio swing representation\n            const hasHlgSupport = castContext.canDisplayType(\n                mimeType,\n                `${codecString}.110.09.18.09.0`\n            );\n\n            if (hasHlgSupport) {\n                supportedRanges.add(VideoRangeType.Hlg);\n            }\n\n            // Dolby Vision with AV1 is profile 10.\n            if (castContext.canDisplayType(mimeType, `dav1.10.${doviLevel}`)) {\n                supportedRanges.add(VideoRangeType.Dovi);\n                supportedRanges.add(VideoRangeType.DoviWithSdr);\n                supportedRanges.add(VideoRangeType.DoviWithHlg);\n                supportedRanges.add(VideoRangeType.DoviWithHdr10);\n            }\n\n            break;\n        }\n        case VideoCodec.VP9: {\n            // 01: Chroma subsampling (4:2:0)\n            // 09: Color Primary (BT.2020)\n            // 16: Transfer Characteristics (PQ)\n            // 09: Matrix Coefficients (BT.2020 non-constant luminance)\n            // 01: Enforce legal color range\n            const hasHdr10Support = castContext.canDisplayType(\n                mimeType,\n                `${codecString}.01.09.16.09.01`\n            );\n\n            if (hasHdr10Support) {\n                supportedRanges.add(VideoRangeType.Hdr10);\n            }\n\n            break;\n        }\n        case VideoCodec.H264: {\n            // H.264 supports 8-bit Dolby Vision with BL signal cross-compatibility with SDR.\n            if (profile !== 'high') {\n                break;\n            }\n\n            // H.264 Max PPS values calculated assuming largest (16x16) macroblock\n            //\n            // +-------------+---------------+------------+---------------+\n            // | H.264 Level | H.264 Max PPS | DoVi Level | DoVi Max PPS  |\n            // +-------------+---------------+------------+---------------+\n            // | 3.0         | 10_368_000    | 01         | 22_118_400    |\n            // | 3.1         | 27_648_000    | 02         | 27_648_000    |\n            // | 4.0         | 62_914_560    | 05         | 124_416_000   |\n            // | 4.1         | 62_914_560    | 05         | 124_416_000   |\n            // | 4.2         | 133_693_440   | 06         | 199_065_600   |\n            // | 5.0         | 150_994_944   | 06         | 199_065_600   |\n            // | 5.1         | 251_658_240   | 08         | 398_131_200   |\n            // | 5.2         | 530_841_600   | 10         | 995_328_000   |\n            // | 6.0         | 1_069_547_520 | 12         | 1_990_656_000 |\n            // | 6.1         | 2_139_095_040 | 13         | 3_981_312_000 |\n            // +-------------+---------------+------------+---------------+\n            const doviLevel = ((): string => {\n                if (level <= 30) {\n                    return '01';\n                } else if (level <= 31) {\n                    return '02';\n                } else if (level <= 41) {\n                    return '05';\n                } else if (level <= 50) {\n                    return '06';\n                } else if (level <= 51) {\n                    return '08';\n                } else if (level <= 52) {\n                    return '10';\n                } else if (level <= 60) {\n                    return '12';\n                } else {\n                    return '13';\n                }\n            })();\n\n            if (castContext.canDisplayType(mimeType, `dvav.09.${doviLevel}`)) {\n                supportedRanges.add(VideoRangeType.DoviWithSdr);\n            }\n\n            break;\n        }\n    }\n\n    return supportedRanges;\n}\n\n/**\n * Check if this device can play text tracks.\n * This is not supported on Chromecast Audio,\n * but otherwise is.\n * @returns `true` if text tracks are supported\n */\nexport function hasTextTrackSupport(): boolean {\n    return hasVideoSupport();\n}\n\n/**\n * Get the max supported media bitrate for the active Cast device.\n * @returns `number` representing the max supported bitrate.\n */\nexport function getMaxBitrateSupport(): number {\n    // FIXME: We should get this dynamically or hardcode this to values\n    // we see fit for each Cast device. More testing is needed.\n    // 120Mb/s ?\n    return 120000000;\n}\n\n/**\n * Tests the max resolution supported by the device of a particular codec.\n * @param codec - The codec in question.\n * @param profile - The profile for the codec.\n * @param level - The level for the codec.\n * @param bitDepth - The bit depth of the video.\n * @returns `number` representing the maximum resolution supported.\n */\nexport function getMaxResolutionSupported(\n    codec: VideoCodec,\n    profile: string,\n    level: number,\n    bitDepth: number\n): Resolution {\n    // This function iteratively tests the maximum resolution assuming a 16:9\n    // resolution ratio. This should be a good enough approximation for most\n    // devices.\n    //\n    // In reality, some encoders may be limited by pixel count instead of\n    // resolution, but other devices may arbitrarily limit the resolution.\n\n    let maxRes = new Resolution(0, 0);\n    let newRes = new Resolution(0, 0);\n    const mimeType = videoCodecToMimeType(codec);\n    const codecString = getCodecString(codec, profile, level, bitDepth);\n\n    // Limit the upper bound to 32K, which is more than enough.\n    while (newRes.width < 30720) {\n        newRes = ((): Resolution => {\n            // Progressively increase steps as resolution increases.\n            if (newRes.height >= 2160) {\n                return new Resolution(newRes.width + 1280, newRes.height + 720);\n            } else if (newRes.height >= 1080) {\n                return new Resolution(newRes.width + 640, newRes.height + 360);\n            } else {\n                return new Resolution(newRes.width + 320, newRes.height + 180);\n            }\n        })();\n\n        if (\n            !castContext.canDisplayType(\n                mimeType,\n                codecString,\n                newRes.width,\n                newRes.height\n            )\n        ) {\n            continue;\n        }\n\n        maxRes = newRes;\n    }\n\n    // As a compromise, after we've found the maximum 16:9 resolution, try\n    // checking other resolutions. These resolutions are ordered descending by\n    // the scaling factor of the expanding dimension -- in the sense that we\n    // check 2.40:1 before 1.85:1. We also prioritize wider resolutions over\n    // taller resolutions.\n    //\n    // In these checks, we hold one resolution constant and expand the other to\n    // test.\n    const otherResolutions = [\n        // Wider resolutions\n\n        // 32:9 is a super ultrawide resolution typically used by monitors.\n        new Resolution(Math.floor(maxRes.height * 3.555), maxRes.height),\n\n        // 2.40:1 is used by some cinema shot on 35mm film.\n        new Resolution(Math.floor(maxRes.height * 2.4), maxRes.height),\n\n        // \"21:9\" is the marketing term for multiple ultrawide resolutions.\n        // The real aspect ratio is somewhere between 2.37:1 and 2.38:1.\n        new Resolution(Math.floor(maxRes.height * 2.37037), maxRes.height),\n\n        // 1.90:1 is a common IMAX resolution.\n        new Resolution(Math.floor(maxRes.height * 1.9), maxRes.height),\n\n        // 1.85:1 is sometimes used in Hollywood cinema.\n        new Resolution(Math.floor(maxRes.height * 1.85), maxRes.height),\n\n        // Taller resolutions.\n\n        // 9:19.5 is a common resolution for a horizontal modern phone.\n        new Resolution(maxRes.width, Math.floor(maxRes.width / 9) * 19.5),\n\n        // 9:16 is the vertical version of 16:9.\n        new Resolution(maxRes.width, Math.floor(maxRes.width / 9) * 16),\n\n        // 1:1 resolution\n        new Resolution(maxRes.width, maxRes.width),\n\n        // 4:3 is an older but still common resolution found on old TVs.\n        new Resolution(maxRes.width, Math.floor((maxRes.width / 4) * 3)),\n\n        // 16:10 is a common resolution for computer displays.\n        new Resolution(maxRes.width, Math.floor((maxRes.width / 16) * 10))\n    ];\n\n    for (const newRes of otherResolutions) {\n        if (\n            castContext.canDisplayType(\n                mimeType,\n                codecString,\n                newRes.width,\n                newRes.height\n            )\n        ) {\n            // Return early, since it'll be the best we'll find.\n            return newRes;\n        }\n    }\n\n    return maxRes;\n}\n\n/**\n * Gets the supported profiles for a given video codec.\n * @param codec - The video codec in question.\n * @returns An array of the supported profiles.\n */\nexport function getVideoProfileSupport(codec: VideoCodec): string[] {\n    const possibleProfiles = ((): string[] => {\n        switch (codec) {\n            case VideoCodec.H264:\n                return [\n                    'constrained baseline',\n                    'baseline',\n                    'main',\n                    'high',\n                    'high 10'\n                ];\n            case VideoCodec.H265:\n                return ['main', 'main 10', 'high', 'high 10'];\n            case VideoCodec.AV1:\n                return ['main', 'high', 'professional'];\n            case VideoCodec.VP8:\n                return [''];\n            case VideoCodec.VP9:\n                return ['Profile 0', 'Profile 1', 'Profile 2', 'Profile 3'];\n        }\n    })();\n\n    const mimeType = videoCodecToMimeType(codec);\n    const supportedProfiles = possibleProfiles.filter((profile) => {\n        const codecString = getCodecString(codec, profile);\n\n        return castContext.canDisplayType(mimeType, codecString);\n    });\n\n    return supportedProfiles;\n}\n\n/**\n * Gets the highest level supported by the given codec profile.\n * @param codec - The codec in question.\n * @param profile - The profile for the codec.\n * @param bitDepth - The bit depth of the video.\n * @returns `number` representing the  highest level supported.\n */\nexport function getVideoCodecHighestLevelSupport(\n    codec: VideoCodec,\n    profile?: string,\n    bitDepth?: number\n): number | undefined {\n    const possibleLevels = ((): number[] => {\n        switch (codec) {\n            case VideoCodec.H264:\n                return [\n                    10, 11, 12, 13, 20, 21, 22, 30, 31, 32, 40, 41, 42, 50, 51,\n                    52, 60, 61, 62\n                ];\n            case VideoCodec.H265:\n                // The server expects H.265 levels to be multiplied by 3.\n                return [10, 20, 21, 30, 31, 40, 41, 50, 51, 52, 60, 61, 62].map(\n                    (level) => level * 3\n                );\n            case VideoCodec.AV1:\n                // This level should correspond to the `seq_level_idx`.\n                return [0, 1, 4, 5, 8, 9, 12, 13, 14, 15, 16, 17, 18, 19];\n            case VideoCodec.VP8:\n                return [];\n            case VideoCodec.VP9:\n                return [\n                    1.0, 1.1, 2.0, 2.1, 3.0, 3.1, 4.0, 4.1, 5.0, 5.1, 5.2, 6.0,\n                    6.1, 6.2\n                ];\n        }\n    })();\n\n    const mimeType = videoCodecToMimeType(codec);\n    const supportedLevels: number[] = [];\n\n    for (const level of possibleLevels) {\n        const codecString = getCodecString(codec, profile, level, bitDepth);\n        const supported = castContext.canDisplayType(mimeType, codecString);\n\n        if (!supported) {\n            break;\n        }\n\n        supportedLevels.push(level);\n    }\n\n    return supportedLevels.length > 0\n        ? supportedLevels[supportedLevels.length - 1]\n        : undefined;\n}\n\n/**\n * Gets the highest bit depth supported by the given codec profile.\n * @param codec - The codec in question.\n * @param profile - The profile for the codec.\n * @param level - The level for the codec.\n * @returns The highest bit depth supported by the given codec profile.\n */\nexport function getVideoCodecHighestBitDepthSupport(\n    codec: VideoCodec,\n    profile?: string,\n    level?: number\n): number | undefined {\n    const possibleBitDepths = ((): number[] => {\n        switch (codec) {\n            case VideoCodec.H264:\n                switch (profile?.toLowerCase()) {\n                    case 'high 10':\n                        return [10, 8];\n                    default:\n                        return [8];\n                }\n            case VideoCodec.H265:\n                switch (profile?.toLowerCase()) {\n                    case 'main 10':\n                    case 'high 10':\n                        return [10, 8];\n                    default:\n                        return [8];\n                }\n            case VideoCodec.AV1:\n                switch (profile?.toLowerCase()) {\n                    case 'professional':\n                        return [10, 8];\n                    default:\n                        return [8];\n                }\n            case VideoCodec.VP8:\n                // VP8's bitstream officially only supports up to 8 bits.\n                return [8];\n            case VideoCodec.VP9:\n                switch (profile?.toLowerCase()) {\n                    case 'profile 2':\n                    case 'profile 3':\n                        return [12, 10];\n                    default:\n                        return [8];\n                }\n        }\n    })();\n\n    return possibleBitDepths.find((bitDepth) => {\n        const mimeType = videoCodecToMimeType(codec);\n        const codecString = getCodecString(codec, profile, level, bitDepth);\n\n        return castContext.canDisplayType(mimeType, codecString);\n    });\n}\n\n/**\n * Gets the minimum bit depth required for a given codec and profile.\n * @param codec - The codec in question.\n * @param profile - The profile for the codec.\n * @returns The minimum bit depth required.\n */\nexport function getVideoCodecMinimumBitDepth(\n    codec: VideoCodec,\n    profile: string\n): number {\n    profile = profile.toLowerCase();\n\n    // VP9 profiles 2 and 3 require 10 bit depth.\n    if (\n        codec === VideoCodec.VP9 &&\n        (profile === 'profile 2' || profile === 'profile 3')\n    ) {\n        return 10;\n    }\n\n    return 8;\n}\n\n/**\n * Get VPX (VP8, VP9) codecs supported by the active Cast device.\n * @returns An array of the supported WebM codecs.\n */\nexport function getSupportedWebMVideoCodecs(): VideoCodec[] {\n    const possibleCodecs = [VideoCodec.VP8, VideoCodec.VP9, VideoCodec.AV1];\n\n    const supportedCodecs = possibleCodecs.filter((codec) => {\n        return castContext.canDisplayType('video/webm', getCodecString(codec));\n    });\n\n    return supportedCodecs;\n}\n\n/**\n * Get supported video codecs suitable for use in an MP4 container.\n * @returns An array of the supported MP4 video codecs.\n */\nexport function getSupportedMP4VideoCodecs(): VideoCodec[] {\n    const possibleCodecs = [VideoCodec.H264, VideoCodec.H265, VideoCodec.AV1];\n\n    const supportedCodecs = possibleCodecs.filter((codec) => {\n        return castContext.canDisplayType('video/mp4', getCodecString(codec));\n    });\n\n    return supportedCodecs;\n}\n\n/**\n * Get supported audio codecs suitable for use in an MP4 container.\n * @returns Supported MP4 audio codecs.\n */\nexport function getSupportedMP4AudioCodecs(): string[] {\n    const codecs = ['aac', 'mp3', 'opus'];\n\n    if (hasEAC3Support()) {\n        codecs.push('eac3');\n    }\n\n    if (hasAC3Support()) {\n        codecs.push('ac3');\n    }\n\n    return codecs;\n}\n\n/**\n * Get supported video codecs suitable for use with HLS.\n * @returns Supported HLS video codecs.\n */\nexport function getSupportedHLSVideoCodecs(): VideoCodec[] {\n    // The server now supports fmp4, so return a list of all supported mp4\n    // codecs.\n    return getSupportedMP4VideoCodecs();\n}\n\n/**\n * Get supported audio codecs suitable for use with HLS.\n * @returns All supported HLS audio codecs.\n */\nexport function getSupportedHLSAudioCodecs(): string[] {\n    // HLS basically supports whatever MP4 supports.\n    return getSupportedMP4AudioCodecs();\n}\n\n/**\n * Get supported audio codecs suitable for use in a WebM container.\n * @returns All supported WebM audio codecs.\n */\nexport function getSupportedWebMAudioCodecs(): string[] {\n    return ['vorbis', 'opus'];\n}\n\n/**\n * Get supported audio codecs.\n * @returns the supported audio codecs.\n */\nexport function getSupportedAudioCodecs(): string[] {\n    return ['opus', 'vorbis', 'mp3', 'aac', 'flac', 'wav'];\n}\n"
  },
  {
    "path": "src/components/commandHandler.ts",
    "content": "import { getReportingParams, TicksPerSecond } from '../helpers';\nimport type {\n    DataMessage,\n    DisplayRequest,\n    PlayRequest,\n    SeekRequest,\n    SetIndexRequest,\n    SetRepeatModeRequest,\n    SupportedCommands\n} from '../types/global';\nimport { AppStatus } from '../types/appStatus';\nimport {\n    translateItems,\n    shuffle,\n    instantMix,\n    setAudioStreamIndex,\n    setSubtitleStreamIndex,\n    seek\n} from './maincontroller';\nimport { reportPlaybackProgress } from './jellyfinActions';\nimport { PlaybackManager } from './playbackManager';\nimport { DocumentManager } from './documentManager';\n\n// eslint-disable-next-line @typescript-eslint/no-extraneous-class\nexport abstract class CommandHandler {\n    private static playerManager: framework.PlayerManager;\n    private static supportedCommands: SupportedCommands = {\n        DisplayContent: CommandHandler.displayContentHandler,\n        Identify: CommandHandler.IdentifyHandler,\n        InstantMix: CommandHandler.instantMixHandler,\n        Mute: CommandHandler.MuteHandler,\n        NextTrack: CommandHandler.nextTrackHandler,\n        Pause: CommandHandler.PauseHandler,\n        PlayLast: CommandHandler.playLastHandler,\n        PlayNext: CommandHandler.playNextHandler,\n        PlayNow: CommandHandler.playNowHandler,\n        PlayPause: CommandHandler.PlayPauseHandler,\n        PreviousTrack: CommandHandler.previousTrackHandler,\n        Seek: CommandHandler.SeekHandler,\n        SetAudioStreamIndex: CommandHandler.setAudioStreamIndexHandler,\n        SetRepeatMode: CommandHandler.SetRepeatModeHandler,\n        SetSubtitleStreamIndex: CommandHandler.setSubtitleStreamIndexHandler,\n        SetVolume: CommandHandler.SetVolumeHandler,\n        Shuffle: CommandHandler.shuffleHandler,\n        Stop: CommandHandler.StopHandler,\n        ToggleMute: CommandHandler.ToggleMuteHandler,\n        Unmute: CommandHandler.MuteHandler,\n        Unpause: CommandHandler.UnpauseHandler,\n        VolumeDown: CommandHandler.VolumeDownHandler,\n        VolumeUp: CommandHandler.VolumeUpHandler\n    };\n\n    static configure(playerManager: framework.PlayerManager): void {\n        this.playerManager = playerManager;\n    }\n\n    static playNextHandler(data: DataMessage): void {\n        translateItems(data, data.options as PlayRequest, data.command);\n    }\n\n    static playNowHandler(data: DataMessage): void {\n        translateItems(data, data.options as PlayRequest, data.command);\n    }\n\n    static playLastHandler(data: DataMessage): void {\n        translateItems(data, data.options as PlayRequest, data.command);\n    }\n\n    static shuffleHandler(data: DataMessage): void {\n        shuffle(\n            data,\n            data.options as PlayRequest,\n            (data.options as PlayRequest).items[0]\n        );\n    }\n\n    static instantMixHandler(data: DataMessage): void {\n        instantMix(\n            data,\n            data.options as PlayRequest,\n            (data.options as PlayRequest).items[0]\n        );\n    }\n\n    static displayContentHandler(data: DataMessage): void {\n        if (PlaybackManager.isIdle()) {\n            DocumentManager.showItemId((data.options as DisplayRequest).ItemId);\n        }\n    }\n\n    static nextTrackHandler(): void {\n        if (PlaybackManager.hasNextItem()) {\n            PlaybackManager.playNextItem(true);\n        }\n    }\n\n    static previousTrackHandler(): void {\n        if (PlaybackManager.hasPrevItem()) {\n            PlaybackManager.playPreviousItem();\n        }\n    }\n\n    static setAudioStreamIndexHandler(data: DataMessage): void {\n        setAudioStreamIndex(\n            PlaybackManager.playbackState,\n            (data.options as SetIndexRequest).index\n        );\n    }\n\n    static setSubtitleStreamIndexHandler(data: DataMessage): void {\n        setSubtitleStreamIndex(\n            PlaybackManager.playbackState,\n            (data.options as SetIndexRequest).index\n        );\n    }\n\n    // VolumeUp, VolumeDown and ToggleMute commands seem to be handled on the sender in the current implementation.\n    // From what I can tell there's no convenient way for the receiver to get its own volume.\n    // We should probably remove these commands in the future.\n    static VolumeUpHandler(): void {\n        console.log('VolumeUp handler not implemented');\n    }\n\n    static VolumeDownHandler(): void {\n        console.log('VolumeDown handler not implemented');\n    }\n\n    static ToggleMuteHandler(): void {\n        console.log('ToggleMute handler not implemented');\n    }\n\n    static SetVolumeHandler(): void {\n        // This is now implemented on the sender\n        console.log('SetVolume handler not implemented');\n    }\n\n    static IdentifyHandler(): void {\n        if (!PlaybackManager.isPlaying()) {\n            if (!PlaybackManager.isBuffering()) {\n                DocumentManager.setAppStatus(AppStatus.Waiting);\n            }\n\n            DocumentManager.startBackdropInterval();\n        } else {\n            // When a client connects send back the initial device state (volume etc) via a playbackstop message\n            reportPlaybackProgress(\n                PlaybackManager.playbackState,\n                getReportingParams(PlaybackManager.playbackState),\n                true,\n                'playbackstop'\n            );\n        }\n    }\n\n    static SeekHandler(data: DataMessage): void {\n        seek(\n            PlaybackManager.playbackState,\n            (data.options as SeekRequest).position * TicksPerSecond\n        );\n    }\n\n    static MuteHandler(): void {\n        // CommandHandler is now implemented on the sender\n        console.log('Mute handler not implemented');\n    }\n\n    static UnmuteHandler(): void {\n        // CommandHandler is now implemented on the sender\n        console.log('Unmute handler not implemented');\n    }\n\n    static StopHandler(): void {\n        this.playerManager.stop();\n    }\n\n    static PlayPauseHandler(): void {\n        if (\n            this.playerManager.getPlayerState() ===\n            cast.framework.messages.PlayerState.PAUSED\n        ) {\n            this.playerManager.play();\n        } else {\n            this.playerManager.pause();\n        }\n    }\n\n    static PauseHandler(): void {\n        this.playerManager.pause();\n    }\n\n    static SetRepeatModeHandler(data: DataMessage): void {\n        window.repeatMode = (data.options as SetRepeatModeRequest).RepeatMode;\n        window.reportEventType = 'repeatmodechange';\n    }\n\n    static UnpauseHandler(): void {\n        this.playerManager.play();\n    }\n\n    // We should avoid using a defaulthandler that has a purpose other than informing the dev/user\n    // Currently all unhandled commands will be treated as play commands.\n    static defaultHandler(data: DataMessage): void {\n        translateItems(data, data.options as PlayRequest, 'play');\n    }\n\n    static processMessage(data: DataMessage, command: string): void {\n        const commandHandler = this.supportedCommands[command];\n\n        if (typeof commandHandler === 'function') {\n            console.debug(\n                `Command \"${command}\" received. Identified handler, calling identified handler.`\n            );\n            commandHandler.bind(this)(data);\n        } else {\n            console.log(\n                `Command \"${command}\" received. Could not identify handler, calling default handler.`\n            );\n            this.defaultHandler(data);\n        }\n    }\n}\n"
  },
  {
    "path": "src/components/deviceprofileBuilder.ts",
    "content": "import {\n    VideoRangeType,\n    type CodecProfile,\n    type ContainerProfile,\n    type DeviceProfile,\n    type DirectPlayProfile,\n    type ProfileCondition,\n    type SubtitleProfile,\n    type TranscodingProfile\n} from '@jellyfin/sdk/lib/generated-client';\nimport { CodecType } from '@jellyfin/sdk/lib/generated-client/models/codec-type';\nimport { DlnaProfileType } from '@jellyfin/sdk/lib/generated-client/models/dlna-profile-type';\nimport { EncodingContext } from '@jellyfin/sdk/lib/generated-client/models/encoding-context';\nimport { ProfileConditionType } from '@jellyfin/sdk/lib/generated-client/models/profile-condition-type';\nimport { ProfileConditionValue } from '@jellyfin/sdk/lib/generated-client/models/profile-condition-value';\nimport { SubtitleDeliveryMethod } from '@jellyfin/sdk/lib/generated-client/models/subtitle-delivery-method';\nimport {\n    hasTextTrackSupport,\n    getSupportedWebMVideoCodecs,\n    getSupportedMP4VideoCodecs,\n    getSupportedMP4AudioCodecs,\n    getSupportedHLSVideoCodecs,\n    getSupportedHLSAudioCodecs,\n    getSupportedWebMAudioCodecs,\n    getSupportedAudioCodecs,\n    hasVideoSupport,\n    getSupportedVideoCodecs,\n    getVideoProfileSupport,\n    getVideoCodecHighestLevelSupport,\n    getVideoCodecHighestBitDepthSupport,\n    type Resolution,\n    getMaxResolutionSupported,\n    getVideoCodecMinimumBitDepth,\n    getVideoRangeSupport\n} from './codecSupportHelper';\n\n/**\n * Create and return a new ProfileCondition\n * @param Property - What property the condition should test.\n * @param Condition - The condition to test the values for.\n * @param Value - The value to compare against.\n * @param [IsRequired] - Don't permit unknown values\n * @returns A profile condition created from the parameters.\n */\nfunction createProfileCondition(\n    Property: ProfileConditionValue,\n    Condition: ProfileConditionType,\n    Value: string,\n    IsRequired = false\n): ProfileCondition {\n    return {\n        Condition,\n        IsRequired,\n        Property,\n        Value\n    };\n}\n\n/**\n * Get container profiles\n * @todo Why does this always return an empty array?\n * @returns Container profiles.\n */\nfunction getContainerProfiles(): ContainerProfile[] {\n    return [];\n}\n\n/**\n * Get direct play profiles\n * @returns Direct play profiles.\n */\nfunction getDirectPlayProfiles(): DirectPlayProfile[] {\n    const DirectPlayProfiles: DirectPlayProfile[] = [];\n\n    if (hasVideoSupport()) {\n        const mp4VideoCodecs = getSupportedMP4VideoCodecs();\n        const mp4AudioCodecs = getSupportedMP4AudioCodecs();\n        const webmVideoCodecs = getSupportedWebMVideoCodecs();\n        const webmAudioCodecs = getSupportedWebMAudioCodecs();\n\n        for (const codec of webmVideoCodecs) {\n            DirectPlayProfiles.push({\n                AudioCodec: webmAudioCodecs.join(','),\n                Container: 'webm',\n                Type: DlnaProfileType.Video,\n                VideoCodec: codec\n            });\n        }\n\n        DirectPlayProfiles.push({\n            AudioCodec: mp4AudioCodecs.join(','),\n            Container: 'mp4,m4v',\n            Type: DlnaProfileType.Video,\n            VideoCodec: mp4VideoCodecs.join(',')\n        });\n    }\n\n    const supportedAudio = getSupportedAudioCodecs();\n\n    // N.B. Supported audio formats and containers can be found here:\n    // https://developers.google.com/cast/docs/media#mp4_audio_only\n    for (const audioFormat of supportedAudio) {\n        switch (audioFormat.toLowerCase()) {\n            case 'mp3':\n                DirectPlayProfiles.push({\n                    AudioCodec: audioFormat,\n                    Container: 'mp3,mp4',\n                    Type: DlnaProfileType.Audio\n                });\n                break;\n            case 'opus':\n            case 'vorbis':\n                DirectPlayProfiles.push({\n                    AudioCodec: audioFormat,\n                    Container: 'ogg,webm',\n                    Type: DlnaProfileType.Audio\n                });\n                break;\n            case 'aac':\n                DirectPlayProfiles.push({\n                    AudioCodec: audioFormat,\n                    Container: 'm4a',\n                    Type: DlnaProfileType.Audio\n                });\n                break;\n            case 'flac':\n            case 'wav':\n            default:\n                DirectPlayProfiles.push({\n                    AudioCodec: audioFormat,\n                    Container: audioFormat,\n                    Type: DlnaProfileType.Audio\n                });\n                break;\n        }\n    }\n\n    return DirectPlayProfiles;\n}\n\n/**\n * Get codec profiles\n * @returns Codec profiles.\n */\nfunction getCodecProfiles(): CodecProfile[] {\n    const codecProfiles: CodecProfile[] = [];\n    const deviceHasVideo = hasVideoSupport();\n\n    const audioConditions: CodecProfile = {\n        Codec: 'flac',\n        Conditions: [\n            createProfileCondition(\n                ProfileConditionValue.AudioSampleRate,\n                ProfileConditionType.LessThanEqual,\n                '96000'\n            ),\n            createProfileCondition(\n                ProfileConditionValue.AudioBitDepth,\n                ProfileConditionType.LessThanEqual,\n                '24'\n            )\n        ],\n        Type: CodecType.Audio\n    };\n\n    codecProfiles.push(audioConditions);\n\n    // Google Cast does not support AAC 5.1, as officially stated by the Google team.\n    // Additionally, the Cast SDK seems to silently downmix anything that isn't Opus or Dolby codecs\n    // to stereo.\n    //\n    // Let the server decide how to handle the downmixing vs. transcoding trade-off instead by\n    // transmitting these limitations.\n    //\n    // See: https://issuetracker.google.com/issues/69112577#comment20\n    // See: https://issuetracker.google.com/issues/330548743\n    for (const audioCodec of getSupportedAudioCodecs()) {\n        switch (audioCodec) {\n            case 'opus':\n            case 'eac3':\n            case 'ac3':\n                continue;\n        }\n\n        const profileConditions: ProfileCondition[] = [\n            createProfileCondition(\n                ProfileConditionValue.AudioChannels,\n                ProfileConditionType.LessThanEqual,\n                '2'\n            )\n        ];\n\n        codecProfiles.push({\n            Codec: audioCodec,\n            Conditions: profileConditions,\n            Type: CodecType.Audio\n        });\n\n        if (deviceHasVideo) {\n            codecProfiles.push({\n                Codec: audioCodec,\n                Conditions: profileConditions,\n                Type: CodecType.VideoAudio\n            });\n        }\n    }\n\n    // If device is audio only, don't add all the video related stuff\n    if (!deviceHasVideo) {\n        return codecProfiles;\n    }\n\n    for (const videoCodec of getSupportedVideoCodecs()) {\n        const videoProfiles = getVideoProfileSupport(videoCodec);\n\n        if (videoProfiles.length === 0) {\n            continue;\n        }\n\n        const maxLevels: number[] = [];\n        const minBitDepths: number[] = [];\n        const maxBitDepths: number[] = [];\n        const maxResolutions: Resolution[] = [];\n        const videoRangeSets: Set<VideoRangeType>[] = [];\n\n        for (const videoProfile of videoProfiles) {\n            const maxVideoLevel =\n                getVideoCodecHighestLevelSupport(videoCodec, videoProfile) ?? 0;\n\n            const minBitDepth = getVideoCodecMinimumBitDepth(\n                videoCodec,\n                videoProfile\n            );\n\n            const maxBitDepth =\n                getVideoCodecHighestBitDepthSupport(\n                    videoCodec,\n                    videoProfile,\n                    maxVideoLevel\n                ) ?? 0;\n\n            const maxResolution = getMaxResolutionSupported(\n                videoCodec,\n                videoProfile,\n                maxVideoLevel,\n                maxBitDepth\n            );\n\n            const videoRangeSupport = getVideoRangeSupport(\n                videoCodec,\n                videoProfile,\n                maxVideoLevel\n            );\n\n            maxLevels.push(maxVideoLevel);\n            minBitDepths.push(minBitDepth);\n            maxBitDepths.push(maxBitDepth);\n            maxResolutions.push(maxResolution);\n            videoRangeSets.push(videoRangeSupport);\n        }\n\n        // If all other constraints are equal, merge into one condition. This\n        // is pretty common.\n        if (\n            maxLevels.every((l) => l === maxLevels[0]) &&\n            minBitDepths.every((b) => b === minBitDepths[0]) &&\n            maxBitDepths.every((b) => b === maxBitDepths[0]) &&\n            maxResolutions.every((r) => r.equals(maxResolutions[0])) &&\n            videoRangeSets.every(\n                (r) =>\n                    r.size === videoRangeSets[0].size &&\n                    [...r].every((v) => videoRangeSets[0].has(v))\n            )\n        ) {\n            const maxLevel = maxLevels[0];\n            const minBitDepth = minBitDepths[0];\n            const maxBitDepth = maxBitDepths[0];\n            const maxResolution = maxResolutions[0];\n            const videoRanges = videoRangeSets[0];\n\n            const profileConditions = [\n                createProfileCondition(\n                    ProfileConditionValue.IsAnamorphic,\n                    ProfileConditionType.NotEquals,\n                    'true'\n                ),\n                createProfileCondition(\n                    ProfileConditionValue.VideoProfile,\n                    ProfileConditionType.EqualsAny,\n                    videoProfiles.join('|')\n                ),\n                createProfileCondition(\n                    ProfileConditionValue.VideoLevel,\n                    ProfileConditionType.LessThanEqual,\n                    maxLevel.toString()\n                ),\n                createProfileCondition(\n                    ProfileConditionValue.VideoBitDepth,\n                    ProfileConditionType.GreaterThanEqual,\n                    minBitDepth.toString()\n                ),\n                createProfileCondition(\n                    ProfileConditionValue.VideoBitDepth,\n                    ProfileConditionType.LessThanEqual,\n                    maxBitDepth.toString()\n                ),\n                createProfileCondition(\n                    ProfileConditionValue.Width,\n                    ProfileConditionType.LessThanEqual,\n                    maxResolution.width.toString()\n                ),\n                createProfileCondition(\n                    ProfileConditionValue.Height,\n                    ProfileConditionType.LessThanEqual,\n                    maxResolution.height.toString()\n                ),\n                createProfileCondition(\n                    ProfileConditionValue.VideoRangeType,\n                    ProfileConditionType.EqualsAny,\n                    [...videoRanges].join('|')\n                )\n            ];\n\n            codecProfiles.push({\n                Codec: videoCodec,\n                Conditions: profileConditions,\n                Type: CodecType.Video\n            });\n        } else {\n            // Different profiles of the same codec have different video profile\n            // constraints. Create a new codec profile for each.\n\n            for (let i = 0; i < videoProfiles.length; i++) {\n                const videoProfile = videoProfiles[i];\n                const maxLevel = maxLevels[i];\n                const minBitDepth = minBitDepths[i];\n                const maxBitDepth = maxBitDepths[i];\n                const maxResolution = maxResolutions[i];\n                const videoRanges = videoRangeSets[i];\n\n                const profileConditions = [\n                    createProfileCondition(\n                        ProfileConditionValue.IsAnamorphic,\n                        ProfileConditionType.NotEquals,\n                        'true'\n                    ),\n                    createProfileCondition(\n                        ProfileConditionValue.VideoProfile,\n                        ProfileConditionType.Equals,\n                        videoProfile\n                    ),\n                    createProfileCondition(\n                        ProfileConditionValue.VideoLevel,\n                        ProfileConditionType.LessThanEqual,\n                        maxLevel.toString()\n                    ),\n                    createProfileCondition(\n                        ProfileConditionValue.VideoBitDepth,\n                        ProfileConditionType.GreaterThanEqual,\n                        minBitDepth.toString()\n                    ),\n                    createProfileCondition(\n                        ProfileConditionValue.VideoBitDepth,\n                        ProfileConditionType.LessThanEqual,\n                        maxBitDepth.toString()\n                    ),\n                    createProfileCondition(\n                        ProfileConditionValue.Width,\n                        ProfileConditionType.LessThanEqual,\n                        maxResolution.width.toString()\n                    ),\n                    createProfileCondition(\n                        ProfileConditionValue.Height,\n                        ProfileConditionType.LessThanEqual,\n                        maxResolution.height.toString()\n                    ),\n                    createProfileCondition(\n                        ProfileConditionValue.VideoRangeType,\n                        ProfileConditionType.EqualsAny,\n                        [...videoRanges].join('|')\n                    )\n                ];\n\n                codecProfiles.push({\n                    Codec: videoCodec,\n                    Conditions: profileConditions,\n                    Type: CodecType.Video\n                });\n            }\n        }\n    }\n\n    const videoAudioConditions: CodecProfile = {\n        Conditions: [\n            // Apparently something like an audiotrack from a second source, not in the current mediasource.\n            // Input from multiple sources is not supported, so this feature is not allowed.\n            createProfileCondition(\n                ProfileConditionValue.IsSecondaryAudio,\n                ProfileConditionType.Equals,\n                'false'\n            )\n        ],\n        Type: CodecType.VideoAudio\n    };\n\n    codecProfiles.push(videoAudioConditions);\n\n    return codecProfiles;\n}\n\n/**\n * Get transcoding profiles\n * @returns Transcoding profiles.\n */\nfunction getTranscodingProfiles(): TranscodingProfile[] {\n    const transcodingProfiles: TranscodingProfile[] = [];\n\n    const hlsAudioCodecs = getSupportedHLSAudioCodecs();\n\n    transcodingProfiles.push({\n        AudioCodec: hlsAudioCodecs.join(','),\n        BreakOnNonKeyFrames: false,\n        Container: 'ts',\n        Context: EncodingContext.Streaming,\n        MinSegments: 1,\n        Protocol: 'hls',\n        Type: DlnaProfileType.Audio\n    });\n\n    const supportedAudio = getSupportedAudioCodecs();\n\n    // audio only profiles here\n    for (const audioFormat of supportedAudio) {\n        transcodingProfiles.push({\n            AudioCodec: audioFormat,\n            Container: audioFormat,\n            Context: EncodingContext.Streaming,\n            Protocol: 'http',\n            Type: DlnaProfileType.Audio\n        });\n    }\n\n    // If device is audio only, don't add all the video related stuff\n    if (!hasVideoSupport()) {\n        return transcodingProfiles;\n    }\n\n    const hlsVideoCodecs = getSupportedHLSVideoCodecs();\n\n    if (hlsVideoCodecs.length > 0 && hlsAudioCodecs.length > 0) {\n        transcodingProfiles.push({\n            AudioCodec: hlsAudioCodecs.join(','),\n            BreakOnNonKeyFrames: false,\n            Container: 'mp4',\n            Context: EncodingContext.Streaming,\n            MinSegments: 1,\n            Protocol: 'hls',\n            Type: DlnaProfileType.Video,\n            VideoCodec: hlsVideoCodecs.map((codec) => codec as string).join(',')\n        });\n\n        // Currently, if there are any HLS codecs, stop early. This mimics the web client's\n        // behavior and works around a bug where the server may pick other single-codec containers\n        // because the audio codec needs less transcoding.\n        //\n        // In reality, we're only really losing out on the VPx codecs, which have middling compute\n        // to efficiency ratios anyways.\n        return transcodingProfiles;\n    }\n\n    const mp4VideoCodecs = getSupportedMP4VideoCodecs();\n    const mp4AudioCodecs = getSupportedMP4AudioCodecs();\n\n    if (mp4AudioCodecs.length > 0 && mp4VideoCodecs.length > 0) {\n        transcodingProfiles.push({\n            AudioCodec: mp4AudioCodecs.join(','),\n            Container: 'mp4',\n            Context: EncodingContext.Streaming,\n            MinSegments: 1,\n            Protocol: 'http',\n            Type: DlnaProfileType.Video,\n            VideoCodec: mp4VideoCodecs.join(',')\n        });\n    }\n\n    const webmAudioCodecs = getSupportedWebMAudioCodecs();\n    const webmVideoCodecs = getSupportedWebMVideoCodecs();\n\n    if (webmAudioCodecs.length > 0 && hlsVideoCodecs.length > 0) {\n        transcodingProfiles.push({\n            AudioCodec: webmAudioCodecs.join(','),\n            Container: 'webm',\n            Context: EncodingContext.Streaming,\n            Protocol: 'http',\n            Type: DlnaProfileType.Video,\n            VideoCodec: webmVideoCodecs.join(',')\n        });\n    }\n\n    return transcodingProfiles;\n}\n\n/**\n * Get subtitle profiles\n * @returns Subtitle profiles.\n */\nfunction getSubtitleProfiles(): SubtitleProfile[] {\n    const subProfiles: SubtitleProfile[] = [];\n\n    if (hasTextTrackSupport()) {\n        subProfiles.push({\n            Format: 'vtt',\n            Method: SubtitleDeliveryMethod.External\n        });\n\n        subProfiles.push({\n            Format: 'vtt',\n            Method: SubtitleDeliveryMethod.Hls\n        });\n    }\n\n    return subProfiles;\n}\n\n/**\n * Creates a device profile containing supported codecs for the active Cast device.\n * @param maxBitrate - maximum bitrate to be used by the server when streaming data\n * @returns Device profile.\n */\nexport function getDeviceProfile(maxBitrate: number): DeviceProfile {\n    // MaxStaticBitrate seems to be for offline sync only\n    const profile: DeviceProfile = {\n        MaxStaticBitrate: maxBitrate,\n        MaxStreamingBitrate: maxBitrate,\n        MusicStreamingTranscodingBitrate: Math.min(maxBitrate, 192000)\n    };\n\n    profile.DirectPlayProfiles = getDirectPlayProfiles();\n    profile.TranscodingProfiles = getTranscodingProfiles();\n    profile.ContainerProfiles = getContainerProfiles();\n    profile.CodecProfiles = getCodecProfiles();\n    profile.SubtitleProfiles = getSubtitleProfiles();\n\n    return profile;\n}\n"
  },
  {
    "path": "src/components/documentManager.ts",
    "content": "import type { BaseItemDto } from '@jellyfin/sdk/lib/generated-client';\nimport { getItemsApi, getUserLibraryApi } from '@jellyfin/sdk/lib/utils/api';\nimport { AppStatus } from '../types/appStatus';\nimport { parseISO8601Date, TicksPerSecond, ticksToSeconds } from '../helpers';\nimport { JellyfinApi } from './jellyfinApi';\nimport { hasVideoSupport } from './codecSupportHelper';\n\n// eslint-disable-next-line @typescript-eslint/no-extraneous-class\nexport abstract class DocumentManager {\n    // Duration between each backdrop switch in ms\n    private static backdropPeriodMs = 30000;\n    // Timer state - so that we don't start the interval more than necessary\n    private static backdropTimer: number | null = null;\n\n    private static status = AppStatus.Unset;\n\n    /**\n     * Hide the document body on chromecast audio to save resources\n     */\n    public static initialize(): void {\n        if (!hasVideoSupport()) {\n            document.body.style.display = 'none';\n        }\n    }\n\n    /**\n     * Set the background image for a html element, without preload.\n     * You should do the preloading first with preloadImage.\n     * @param element - HTML Element\n     * @param src - URL to the image or null to remove the active one\n     */\n    private static setBackgroundImage(\n        element: HTMLElement,\n        src: string | null\n    ): void {\n        if (src) {\n            element.style.backgroundImage = `url(${src})`;\n        } else {\n            element.style.backgroundImage = '';\n        }\n    }\n\n    /**\n     * Preload an image\n     * @param src - URL to the image or null\n     * @returns wait for the preload and return the url to use. Might be nulled after loading error.\n     */\n    private static preloadImage(src: string | null): Promise<string | null> {\n        if (src) {\n            return new Promise((resolve, reject) => {\n                const preload = new Image();\n\n                preload.src = src;\n                preload.addEventListener('load', () => {\n                    resolve(src);\n                });\n                preload.addEventListener('error', () => {\n                    // might also resolve and return null here, to have the caller take away the background.\n                    reject();\n                });\n            });\n        } else {\n            return Promise.resolve(null);\n        }\n    }\n\n    /**\n     * Get url for primary image for a given item\n     * @param item - to look up\n     * @returns url to image after preload\n     */\n    private static getPrimaryImageUrl(\n        item: BaseItemDto\n    ): Promise<string | null> {\n        let src: string | null = null;\n\n        if (item.AlbumPrimaryImageTag && item.AlbumId) {\n            src = JellyfinApi.createImageUrl(\n                item.AlbumId,\n                'Primary',\n                item.AlbumPrimaryImageTag\n            );\n        } else if (item.ImageTags?.Primary && item.Id) {\n            src = JellyfinApi.createImageUrl(\n                item.Id,\n                'Primary',\n                item.ImageTags.Primary\n            );\n        }\n\n        if (\n            item?.UserData?.PlayedPercentage &&\n            item?.UserData?.PlayedPercentage < 100 &&\n            !item.IsFolder &&\n            src != null\n        ) {\n            src += `&PercentPlayed=${item.UserData.PlayedPercentage}`;\n        }\n\n        return this.preloadImage(src);\n    }\n\n    /**\n     * Get url for logo image for a given item\n     * @param item - to look up\n     * @returns url to logo image after preload\n     */\n    private static getLogoUrl(item: BaseItemDto): Promise<string | null> {\n        let src: string | null = null;\n\n        if (item.ImageTags?.Logo && item.Id) {\n            src = JellyfinApi.createImageUrl(\n                item.Id,\n                'Logo',\n                item.ImageTags.Logo\n            );\n        } else if (item.ParentLogoItemId && item.ParentLogoImageTag) {\n            src = JellyfinApi.createImageUrl(\n                item.ParentLogoItemId,\n                'Logo',\n                item.ParentLogoImageTag\n            );\n        }\n\n        return this.preloadImage(src);\n    }\n\n    /**\n     * This fucntion takes an item and shows details about it\n     * on the details page. This happens when no media is playing,\n     * and the connected client is browsing the library.\n     * @param item - to show information about\n     * @returns for the page to load\n     */\n    public static async showItem(item: BaseItemDto): Promise<void> {\n        // no showItem for cc audio\n        if (!hasVideoSupport()) {\n            return;\n        }\n\n        // stop cycling backdrops\n        this.clearBackdropInterval();\n\n        const promises = [\n            this.getWaitingBackdropUrl(item),\n            this.getPrimaryImageUrl(item),\n            this.getLogoUrl(item)\n        ];\n\n        const urls = await Promise.all(promises);\n\n        requestAnimationFrame(() => {\n            this.setWaitingBackdrop(urls[0], item);\n            this.setDetailImage(urls[1]);\n            this.setLogo(urls[2]);\n\n            this.setOverview(item.Overview ?? null);\n            this.setGenres(item?.Genres?.join(' / ') ?? null);\n            this.setDisplayName(item);\n            this.setMiscInfo(item);\n\n            this.setRating(item);\n\n            if (item?.UserData?.Played) {\n                this.setPlayedIndicator(true);\n            } else if (item?.UserData?.UnplayedItemCount) {\n                this.setPlayedIndicator(item?.UserData?.UnplayedItemCount);\n            } else {\n                this.setPlayedIndicator(false);\n            }\n\n            if (\n                item?.UserData?.PlayedPercentage &&\n                item?.UserData?.PlayedPercentage < 100 &&\n                !item.IsFolder\n            ) {\n                this.setHasPlayedPercentage(false);\n                this.setPlayedPercentage(item.UserData.PlayedPercentage);\n            } else {\n                this.setHasPlayedPercentage(false);\n                this.setPlayedPercentage(0);\n            }\n\n            // Switch visible view!\n            this.setAppStatus(AppStatus.Details);\n        });\n    }\n\n    /**\n     * Set value of played indicator\n     * @param value - True = played, false = not visible, number = number of unplayed items\n     */\n    private static setPlayedIndicator(value: boolean | number): void {\n        const playedIndicatorOk = this.getElementById('played-indicator-ok');\n        const playedIndicatorValue = this.getElementById(\n            'played-indicator-value'\n        );\n\n        if (value === true) {\n            // All items played\n            this.setVisibility(playedIndicatorValue, false);\n            this.setVisibility(playedIndicatorOk, true);\n        } else if (value === false) {\n            // No indicator\n            this.setVisibility(playedIndicatorValue, false);\n            this.setVisibility(playedIndicatorOk, false);\n        } else {\n            // number\n            playedIndicatorValue.innerHTML = value.toString();\n            this.setVisibility(playedIndicatorValue, true);\n            this.setVisibility(playedIndicatorOk, false);\n        }\n    }\n\n    /**\n     * Show item, but from just the id number, not an actual item.\n     * Looks up the item and then calls showItem\n     * @param itemId - id of item to look up\n     * @returns promise that resolves when the item is shown\n     */\n    public static async showItemId(itemId: string): Promise<void> {\n        // no showItemId for cc audio\n        if (!hasVideoSupport()) {\n            return;\n        }\n\n        const response = await getUserLibraryApi(\n            JellyfinApi.jellyfinApi\n        ).getItem({\n            itemId\n        });\n\n        DocumentManager.showItem(response.data);\n    }\n\n    /**\n     * Update item rating elements\n     * @param item - to look up\n     */\n    private static setRating(item: BaseItemDto): void {\n        const starRating = this.getElementById('star-rating');\n        const starRatingValue = this.getElementById('star-rating-value');\n\n        if (item.CommunityRating != null) {\n            starRatingValue.innerHTML = item.CommunityRating.toFixed(1);\n            this.setVisibility(starRating, true);\n            this.setVisibility(starRatingValue, true);\n        } else {\n            this.setVisibility(starRating, false);\n            this.setVisibility(starRatingValue, false);\n        }\n\n        const criticRating = this.getElementById('critic-rating');\n        const criticRatingValue = this.getElementById('critic-rating-value');\n\n        if (item.CriticRating != null) {\n            const verdict = item.CriticRating >= 60 ? 'fresh' : 'rotten';\n\n            criticRating.classList.add(verdict);\n            criticRating.classList.remove(\n                verdict == 'fresh' ? 'rotten' : 'fresh'\n            );\n\n            criticRatingValue.innerHTML = item.CriticRating.toString();\n\n            this.setVisibility(criticRating, true);\n            this.setVisibility(criticRatingValue, true);\n        } else {\n            this.setVisibility(criticRating, false);\n            this.setVisibility(criticRatingValue, false);\n        }\n    }\n\n    /**\n     * Set the status of the app, and switch the visible view\n     * to the corresponding one.\n     * @param status - to set\n     */\n    public static setAppStatus(status: AppStatus): void {\n        this.status = status;\n        document.body.className = status;\n    }\n\n    /**\n     * Get the status of the app\n     * @returns app status\n     */\n    public static getAppStatus(): AppStatus {\n        return this.status;\n    }\n\n    // BACKDROP LOGIC\n\n    /**\n     * Get url to the backdrop image, and return a preload promise.\n     * @param item - Item to use for waiting backdrop, null to remove it.\n     * @returns promise for the preload to complete\n     */\n    public static getWaitingBackdropUrl(\n        item: BaseItemDto | null\n    ): Promise<string | null> {\n        // no backdrop as a fallback\n        let src: string | null = null;\n\n        if (item != null) {\n            if (item.BackdropImageTags?.length && item.Id) {\n                // get first backdrop of image if applicable\n                src = JellyfinApi.createImageUrl(\n                    item.Id,\n                    'Backdrop',\n                    item.BackdropImageTags[0]\n                );\n            } else if (\n                item.ParentBackdropItemId &&\n                item.ParentBackdropImageTags?.length\n            ) {\n                // otherwise get first backdrop from parent\n                src = JellyfinApi.createImageUrl(\n                    item.ParentBackdropItemId,\n                    'Backdrop',\n                    item.ParentBackdropImageTags[0]\n                );\n            }\n        }\n\n        return this.preloadImage(src);\n    }\n\n    /**\n     * Backdrops are set on the waiting container.\n     * They are switched around every 30 seconds by default\n     * (governed by startBackdropInterval)\n     * @param src - Url to image\n     * @param item - Item to use for waiting backdrop, null to remove it.\n     */\n    public static async setWaitingBackdrop(\n        src: string | null,\n        item: BaseItemDto | null\n    ): Promise<void> {\n        let element: HTMLElement = this.querySelector(\n            '#waiting-container-backdrop'\n        );\n\n        this.setBackgroundImage(element, src);\n\n        element = this.getElementById('waiting-description');\n        element.innerHTML = item?.Name ?? '';\n    }\n\n    /**\n     * Set a random backdrop on the waiting container\n     * @returns promise waiting for the backdrop to be set\n     */\n    private static async setRandomUserBackdrop(): Promise<void> {\n        const response = await getItemsApi(JellyfinApi.jellyfinApi).getItems({\n            imageTypes: ['Backdrop'],\n            includeItemTypes: ['Movie', 'Series'],\n            limit: 1,\n            // Although we're limiting to what the user has access to,\n            // not everyone will want to see adult backdrops rotating on their TV.\n            maxOfficialRating: 'PG-13',\n            recursive: true,\n            sortBy: ['Random']\n        });\n\n        const result = response.data;\n\n        let src: string | null = null;\n        let item: BaseItemDto | null = null;\n\n        if (result.Items?.[0]) {\n            item = result.Items[0];\n            src = await DocumentManager.getWaitingBackdropUrl(item);\n        }\n\n        requestAnimationFrame(() => {\n            DocumentManager.setWaitingBackdrop(src, item);\n        });\n    }\n\n    /**\n     * Stop the backdrop rotation\n     */\n    public static clearBackdropInterval(): void {\n        if (this.backdropTimer !== null) {\n            clearInterval(this.backdropTimer);\n            this.backdropTimer = null;\n        }\n    }\n\n    /**\n     * Start the backdrop rotation, restart if running, stop if disabled\n     * @returns promise for the first backdrop to be set\n     */\n    public static async startBackdropInterval(): Promise<void> {\n        // no backdrop rotation for cc audio\n        if (!hasVideoSupport()) {\n            return;\n        }\n\n        // avoid running it multiple times\n        this.clearBackdropInterval();\n\n        this.backdropTimer = window.setInterval(\n            () => DocumentManager.setRandomUserBackdrop(),\n            this.backdropPeriodMs\n        );\n\n        await this.setRandomUserBackdrop();\n    }\n\n    /**\n     * Set background behind the media player,\n     * this is shown while the media is loading.\n     * @param item - to get backdrop from\n     */\n    public static setPlayerBackdrop(item: BaseItemDto): void {\n        // no backdrop rotation for cc audio\n        if (!hasVideoSupport()) {\n            return;\n        }\n\n        let backdropUrl: string | null = null;\n\n        if (item.BackdropImageTags?.length && item.Id) {\n            backdropUrl = JellyfinApi.createImageUrl(\n                item.Id,\n                'Backdrop',\n                item.BackdropImageTags[0]\n            );\n        } else if (\n            item.ParentBackdropItemId &&\n            item.ParentBackdropImageTags?.length\n        ) {\n            backdropUrl = JellyfinApi.createImageUrl(\n                item.ParentBackdropItemId,\n                'Backdrop',\n                item.ParentBackdropImageTags[0]\n            );\n        }\n\n        if (backdropUrl != null) {\n            window.mediaElement?.style.setProperty(\n                '--background-image',\n                `url(\"${backdropUrl}\")`\n            );\n        } else {\n            window.mediaElement?.style.removeProperty('--background-image');\n        }\n    }\n    /* /BACKDROP LOGIC */\n\n    /**\n     * Set the URL to the item logo, or null to remove it\n     * @param src - Source url or null\n     */\n    public static setLogo(src: string | null): void {\n        const element: HTMLElement = this.querySelector('.detailLogo');\n\n        this.setBackgroundImage(element, src);\n    }\n\n    /**\n     * Set the URL to the item banner image (I think?),\n     * or null to remove it\n     * @param src - Source url or null\n     */\n    public static setDetailImage(src: string | null): void {\n        const element: HTMLElement = this.querySelector('.detailImage');\n\n        this.setBackgroundImage(element, src);\n    }\n\n    /**\n     * Set the human readable name for an item\n     *\n     * This combines the old statement setDisplayName(getDisplayName(item))\n     * into setDisplayName(item).\n     * @param item - source for the displayed name\n     */\n    private static setDisplayName(item: BaseItemDto): void {\n        // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n        const name: string = item.EpisodeTitle ?? item.Name!;\n\n        let displayName: string = name;\n\n        if (item.Type == 'TvChannel') {\n            if (item.Number) {\n                displayName = `${item.Number} ${name}`;\n            }\n        } else if (\n            item.Type == 'Episode' &&\n            item.IndexNumber != null &&\n            item.ParentIndexNumber != null\n        ) {\n            let episode = `S${item.ParentIndexNumber}, E${item.IndexNumber}`;\n\n            if (item.IndexNumberEnd) {\n                episode += `-${item.IndexNumberEnd}`;\n            }\n\n            displayName = `${episode} - ${name}`;\n        }\n\n        const element = this.querySelector('.displayName');\n\n        element.innerHTML = displayName || '';\n    }\n\n    /**\n     * Set the html of the genres container\n     * @param name - String/html for genres box, null to empty\n     */\n    private static setGenres(name: string | null): void {\n        const element = this.querySelector('.genres');\n\n        element.innerHTML = name ?? '';\n    }\n\n    /**\n     * Set the html of the overview container\n     * @param name - string or html to insert\n     */\n    private static setOverview(name: string | null): void {\n        const element = this.querySelector('.overview');\n\n        element.innerHTML = name ?? '';\n    }\n\n    /**\n     * Set the progress of the progress bar in the\n     * item details page. (Not the same as the playback ui)\n     * @param value - Percentage to set\n     */\n    private static setPlayedPercentage(value = 0): void {\n        const element = this.querySelector(\n            '.itemProgressBar'\n        ) as HTMLProgressElement;\n\n        element.value = value;\n    }\n\n    /**\n     * Set the visibility of the item progress bar in the\n     * item details page\n     * @param value - If true, show progress on details page\n     */\n    private static setHasPlayedPercentage(value: boolean): void {\n        const element = this.querySelector('.detailImageProgressContainer');\n\n        if (value) {\n            element.classList.remove('d-none');\n        } else {\n            element.classList.add('d-none');\n        }\n    }\n\n    /**\n     * Get a human readable representation of the current position\n     * in ticks\n     * @param ticks - tick position\n     * @returns human readable position\n     */\n    private static formatRunningTime(ticks: number): string {\n        const ticksPerMinute = TicksPerSecond * 60;\n        const ticksPerHour = ticksPerMinute * 60;\n\n        const parts: string[] = [];\n\n        const hours: number = Math.floor(ticks / ticksPerHour);\n\n        if (hours) {\n            parts.push(hours.toString());\n        }\n\n        ticks -= hours * ticksPerHour;\n\n        const minutes: number = Math.floor(ticks / ticksPerMinute);\n\n        ticks -= minutes * ticksPerMinute;\n\n        if (minutes < 10 && hours) {\n            parts.push(`0${minutes.toString()}`);\n        } else {\n            parts.push(minutes.toString());\n        }\n\n        const seconds: number = Math.floor(ticksToSeconds(ticks));\n\n        if (seconds < 10) {\n            parts.push(`0${seconds.toString()}`);\n        } else {\n            parts.push(seconds.toString());\n        }\n\n        return parts.join(':');\n    }\n\n    /**\n     * Set information about mostly episodes or series\n     * on the item details page\n     * @param item - to look up\n     */\n    private static setMiscInfo(item: BaseItemDto): void {\n        const info: string[] = [];\n\n        if (item.Type == 'Episode') {\n            if (item.PremiereDate) {\n                try {\n                    info.push(\n                        parseISO8601Date(item.PremiereDate).toLocaleDateString()\n                    );\n                } catch {\n                    console.log(`Error parsing date: ${item.PremiereDate}`);\n                }\n            }\n        }\n\n        if (item.StartDate) {\n            try {\n                info.push(\n                    parseISO8601Date(item.StartDate).toLocaleDateString()\n                );\n            } catch {\n                console.log(`Error parsing date: ${item.PremiereDate}`);\n            }\n        }\n\n        if (item.ProductionYear && item.Type == 'Series') {\n            if (item.Status == 'Continuing') {\n                info.push(`${item.ProductionYear}-Present`);\n            } else if (item.ProductionYear) {\n                let text: string = item.ProductionYear.toString();\n\n                if (item.EndDate) {\n                    try {\n                        const endYear = parseISO8601Date(\n                            item.EndDate\n                        ).getFullYear();\n\n                        if (endYear != item.ProductionYear) {\n                            text += `-${parseISO8601Date(\n                                item.EndDate\n                            ).getFullYear()}`;\n                        }\n                    } catch {\n                        console.log(`Error parsing date: ${item.EndDate}`);\n                    }\n                }\n\n                info.push(text);\n            }\n        }\n\n        if (item.Type != 'Series' && item.Type != 'Episode') {\n            if (item.ProductionYear) {\n                info.push(item.ProductionYear.toString());\n            } else if (item.PremiereDate) {\n                try {\n                    info.push(\n                        parseISO8601Date(item.PremiereDate)\n                            .getFullYear()\n                            .toString()\n                    );\n                } catch {\n                    console.log(`Error parsing date: ${item.PremiereDate}`);\n                }\n            }\n        }\n\n        let minutes;\n\n        if (item.RunTimeTicks && item.Type != 'Series') {\n            if (item.Type == 'Audio') {\n                info.push(this.formatRunningTime(item.RunTimeTicks));\n            } else {\n                minutes = item.RunTimeTicks / 600000000;\n                minutes = minutes || 1;\n                info.push(`${Math.round(minutes)}min`);\n            }\n        }\n\n        if (\n            item.OfficialRating &&\n            item.Type !== 'Season' &&\n            item.Type !== 'Episode'\n        ) {\n            info.push(item.OfficialRating);\n        }\n\n        if (item.Video3DFormat) {\n            info.push('3D');\n        }\n\n        const element = this.getElementById('miscInfo');\n\n        element.innerHTML = info.join('&nbsp;&nbsp;&nbsp;&nbsp;');\n    }\n\n    // Generic / Helper functions\n    /**\n     * Set the visibility of an element\n     * @param element - Element to set visibility on\n     * @param visible - True if the element should be visible.\n     */\n    private static setVisibility(element: HTMLElement, visible: boolean): void {\n        if (visible) {\n            element.classList.remove('d-none');\n        } else {\n            element.classList.add('d-none');\n        }\n    }\n\n    /**\n     * Get a HTMLElement from id or throw an error\n     * @param id - ID to look up\n     * @returns HTML Element\n     */\n    private static getElementById(id: string): HTMLElement {\n        const element = document.getElementById(id);\n\n        if (!element) {\n            throw new ReferenceError(`Cannot find element ${id} by id`);\n        }\n\n        return element;\n    }\n\n    /**\n     * Get a HTMLElement by class\n     * @param cls - Class to look up\n     * @returns HTML Element\n     */\n    private static querySelector(cls: string): HTMLElement {\n        const element: HTMLElement | null = document.querySelector(cls);\n\n        if (!element) {\n            throw new ReferenceError(`Cannot find element ${cls} by class`);\n        }\n\n        return element;\n    }\n}\n\ndocument.addEventListener('load', () => DocumentManager.initialize());\n"
  },
  {
    "path": "src/components/jellyfinActions.ts",
    "content": "import type {\n    BaseItemDto,\n    DeviceProfile,\n    LiveStreamResponse,\n    MediaSourceInfo,\n    PlaybackInfoDto,\n    PlaybackInfoResponse,\n    PlaybackProgressInfo\n} from '@jellyfin/sdk/lib/generated-client';\nimport {\n    getHlsSegmentApi,\n    getMediaInfoApi,\n    getPlaystateApi\n} from '@jellyfin/sdk/lib/utils/api';\nimport { getSenderReportingData, broadcastToMessageBus } from '../helpers';\nimport { AppStatus } from '../types/appStatus';\nimport { JellyfinApi } from './jellyfinApi';\nimport { DocumentManager } from './documentManager';\nimport { PlaybackManager, type PlaybackState } from './playbackManager';\nimport type {\n    BusMessageType,\n    JellyfinMediaInformationCustomData\n} from '~/types/global';\n\nlet pingInterval: number;\nlet lastTranscoderPing = 0;\n\n/**\n * Start the transcoder pinging.\n *\n * This is used to keep the transcode available during pauses\n * @param reportingParams - parameters to report to the server\n */\nfunction restartPingInterval(reportingParams: PlaybackProgressInfo): void {\n    stopPingInterval();\n\n    if (reportingParams.PlayMethod == 'Transcode') {\n        pingInterval = window.setInterval(() => {\n            if (reportingParams.PlaySessionId) {\n                pingTranscoder(reportingParams.PlaySessionId);\n            }\n        }, 1000);\n    }\n}\n\n/**\n * Stop the transcoder ping\n *\n * Needed to stop the pinging when it's not needed anymore\n */\nexport function stopPingInterval(): void {\n    if (pingInterval !== 0) {\n        clearInterval(pingInterval);\n        pingInterval = 0;\n    }\n}\n\n/**\n * Report to the server that playback has started.\n * @param state - playback state.\n * @param reportingParams - parameters to send to the server\n * @returns promise to wait for the request\n */\nexport async function reportPlaybackStart(\n    state: PlaybackState,\n    reportingParams: PlaybackProgressInfo\n): Promise<void> {\n    // it's just \"reporting\" that the playback is starting\n    // but it's also disabling the rotating backdrops\n    // in the line below.\n    // TODO move the responsibility to the caller.\n    DocumentManager.clearBackdropInterval();\n\n    broadcastToMessageBus({\n        //TODO: convert these to use a defined type in the type field\n        data: getSenderReportingData(state, reportingParams),\n        type: 'playbackstart'\n    });\n\n    restartPingInterval(reportingParams);\n\n    await getPlaystateApi(JellyfinApi.jellyfinApi).reportPlaybackStart({\n        playbackStartInfo: reportingParams\n    });\n}\n\n/**\n * Report to the server the progress of the playback.\n * @param state - playback state.\n * @param reportingParams - parameters for jellyfin\n * @param reportToServer - if jellyfin should be informed\n * @param broadcastEventName - name of event to send to the cast sender\n * @returns Promise for the http request\n */\nexport async function reportPlaybackProgress(\n    state: PlaybackState,\n    reportingParams: PlaybackProgressInfo,\n    reportToServer = true,\n    broadcastEventName: BusMessageType = 'playbackprogress'\n): Promise<void> {\n    broadcastToMessageBus({\n        data: getSenderReportingData(state, reportingParams),\n        type: broadcastEventName\n    });\n\n    if (reportToServer === false) {\n        return Promise.resolve();\n    }\n\n    restartPingInterval(reportingParams);\n    lastTranscoderPing = new Date().getTime();\n\n    await getPlaystateApi(JellyfinApi.jellyfinApi).reportPlaybackProgress({\n        playbackProgressInfo: reportingParams\n    });\n}\n\n/**\n * Report to the server that playback has stopped.\n * @param state - playback state.\n * @param reportingParams - parameters to send to the server\n * @returns promise for waiting for the request\n */\nexport async function reportPlaybackStopped(\n    state: PlaybackState,\n    reportingParams: PlaybackProgressInfo\n): Promise<void> {\n    stopPingInterval();\n\n    broadcastToMessageBus({\n        data: getSenderReportingData(state, reportingParams),\n        type: 'playbackstop'\n    });\n\n    await getPlaystateApi(JellyfinApi.jellyfinApi).reportPlaybackStopped({\n        playbackStopInfo: reportingParams\n    });\n}\n\n/**\n * This keeps the session alive when playback is paused by refreshing the server.\n * /Sessions/Playing/Progress does work but may not be called during pause.\n * The web client calls that during pause, but this endpoint gets the job done\n * as well.\n * @param playSessionId - the playback session ID to ping\n * @returns promise for waiting for the request\n */\nexport async function pingTranscoder(playSessionId: string): Promise<void> {\n    const now = new Date().getTime();\n\n    // 10s is the timeout value, so use half that to report often enough\n    if (now - lastTranscoderPing < 5000) {\n        console.debug('Skipping ping due to recent progress check-in');\n\n        return new Promise((resolve) => {\n            resolve(undefined);\n        });\n    }\n\n    lastTranscoderPing = new Date().getTime();\n\n    await getPlaystateApi(JellyfinApi.jellyfinApi).pingPlaybackSession({\n        playSessionId: playSessionId\n    });\n}\n\n/**\n * Update the context about the item we are playing.\n * @param customData - data to set on playback state.\n * @param serverItem - item that is playing\n */\nexport function load(\n    customData: JellyfinMediaInformationCustomData,\n    serverItem: BaseItemDto\n): void {\n    PlaybackManager.resetPlaybackScope();\n\n    const state = PlaybackManager.playbackState;\n\n    // These are set up in maincontroller.createMediaInformation\n    state.playSessionId = customData.playSessionId;\n    state.audioStreamIndex = customData.audioStreamIndex;\n    state.subtitleStreamIndex = customData.subtitleStreamIndex;\n    state.startPositionTicks = customData.startPositionTicks;\n    state.canSeek = customData.canSeek;\n    state.itemId = customData.itemId;\n    state.liveStreamId = customData.liveStreamId;\n    state.mediaSourceId = customData.mediaSourceId;\n    state.playMethod = customData.playMethod;\n    state.runtimeTicks = customData.runtimeTicks;\n\n    state.item = serverItem;\n\n    DocumentManager.setAppStatus(AppStatus.Backdrop);\n    state.mediaType = serverItem?.MediaType;\n}\n\n/**\n * Tell the media manager to play and switch back into the correct view for Audio at least\n * It's really weird and I don't get the 20ms delay.\n *\n * I also don't get doing nothing based on the currently visible app status\n *\n * TODO: rename these\n * @param state - playback state.\n */\nexport function play(state: PlaybackState): void {\n    if (\n        DocumentManager.getAppStatus() == AppStatus.Backdrop ||\n        DocumentManager.getAppStatus() == AppStatus.PlayingWithControls ||\n        DocumentManager.getAppStatus() == AppStatus.Audio\n    ) {\n        setTimeout(() => {\n            window.playerManager.play();\n\n            if (state.mediaType == 'Audio') {\n                DocumentManager.setAppStatus(AppStatus.Audio);\n            } else {\n                DocumentManager.setAppStatus(AppStatus.PlayingWithControls);\n            }\n        }, 20);\n    }\n}\n\n/**\n * get PlaybackInfo\n * @param item - item\n * @param maxBitrate - maxBitrate\n * @param deviceProfile - deviceProfile\n * @param startPosition - startPosition\n * @param mediaSourceId - mediaSourceId\n * @param audioStreamIndex - audioStreamIndex\n * @param subtitleStreamIndex - subtitleStreamIndex\n * @param liveStreamId - liveStreamId\n * @returns promise\n */\nexport async function getPlaybackInfo(\n    item: BaseItemDto,\n    maxBitrate: number,\n    deviceProfile: DeviceProfile,\n    startPosition: number | null,\n    mediaSourceId: string | null,\n    audioStreamIndex: number | null,\n    subtitleStreamIndex: number | null,\n    liveStreamId: string | null = null\n): Promise<PlaybackInfoResponse> {\n    if (!item.Id) {\n        console.error('getPlaybackInfo: Item ID not provided');\n\n        return Promise.reject('Item ID not available.');\n    }\n\n    const query: PlaybackInfoDto = {\n        DeviceProfile: deviceProfile,\n        MaxStreamingBitrate: maxBitrate,\n        StartTimeTicks: startPosition ?? 0\n    };\n\n    if (audioStreamIndex != null) {\n        query.AudioStreamIndex = audioStreamIndex;\n    }\n\n    if (subtitleStreamIndex != null) {\n        query.SubtitleStreamIndex = subtitleStreamIndex;\n    }\n\n    if (mediaSourceId) {\n        query.MediaSourceId = mediaSourceId;\n    }\n\n    if (liveStreamId) {\n        query.LiveStreamId = liveStreamId;\n    }\n\n    const response = await getMediaInfoApi(\n        JellyfinApi.jellyfinApi\n    ).getPostedPlaybackInfo({\n        itemId: item.Id,\n        playbackInfoDto: query\n    });\n\n    return response.data;\n}\n\n/**\n * get LiveStream\n * @param item - item\n * @param playSessionId - playSessionId\n * @param maxBitrate - maxBitrate\n * @param deviceProfile - deviceProfile\n * @param startPosition - startPosition\n * @param mediaSource - mediaSource\n * @param audioStreamIndex - audioStreamIndex\n * @param subtitleStreamIndex - subtitleStreamIndex\n * @returns promise\n */\nexport async function getLiveStream(\n    item: BaseItemDto,\n    playSessionId: string,\n    maxBitrate: number,\n    deviceProfile: DeviceProfile,\n    startPosition: number | null,\n    mediaSource: MediaSourceInfo,\n    audioStreamIndex: number | null,\n    subtitleStreamIndex: number | null\n): Promise<LiveStreamResponse> {\n    const liveStreamResponse = await getMediaInfoApi(\n        JellyfinApi.jellyfinApi\n    ).openLiveStream({\n        openLiveStreamDto: {\n            AudioStreamIndex: audioStreamIndex,\n            DeviceProfile: deviceProfile,\n            ItemId: item.Id,\n            MaxStreamingBitrate: maxBitrate,\n            OpenToken: mediaSource.OpenToken,\n            PlaySessionId: playSessionId,\n            StartTimeTicks: startPosition ?? 0,\n            SubtitleStreamIndex: subtitleStreamIndex\n        }\n    });\n\n    return liveStreamResponse.data;\n}\n\n/**\n * Get download speed based on the jellyfin bitratetest api.\n * The API has a 10MB limit.\n * @param byteSize - number of bytes to request\n * @returns the bitrate in bits/s\n */\nexport async function getDownloadSpeed(byteSize: number): Promise<number> {\n    const now = new Date().getTime();\n\n    const response = await getMediaInfoApi(\n        JellyfinApi.jellyfinApi\n    ).getBitrateTestBytes(\n        {\n            size: byteSize\n        },\n        {\n            timeout: 5000\n        }\n    );\n\n    // Force javascript to download the whole response before calculating bitrate\n    await response.data;\n\n    const responseTimeSeconds = (new Date().getTime() - now) / 1000;\n    const bytesPerSecond = byteSize / responseTimeSeconds;\n    const bitrate = Math.round(bytesPerSecond * 8);\n\n    return bitrate;\n}\n\n/**\n * Function to detect the bitrate.\n * It starts at 500kB and doubles it every time it takes under 2s, for max 10MB.\n * This should get an accurate bitrate relatively fast on any connection\n * @param numBytes - Number of bytes to start with, default 500k\n * @returns bitrate in bits/s\n */\nexport async function detectBitrate(numBytes = 500000): Promise<number> {\n    // Jellyfin has a 10MB limit on the test size\n    const byteLimit = 10000000;\n\n    if (numBytes > byteLimit) {\n        numBytes = byteLimit;\n    }\n\n    const bitrate = await getDownloadSpeed(numBytes);\n\n    if (bitrate * (2 / 8.0) < numBytes || numBytes >= byteLimit) {\n        // took > 2s, or numBytes hit the limit\n        return Math.round(bitrate * 0.8);\n    } else {\n        // If that produced a fairly high speed, try again with a larger size to get a more accurate result\n        return await detectBitrate(numBytes * 2);\n    }\n}\n\n/**\n * Tell Jellyfin to kill off our active transcoding session\n * @param playSessionId - the play session ID to stop encoding\n * @returns Promise for the http request to go through\n */\nexport async function stopActiveEncodings(\n    playSessionId: string\n): Promise<void> {\n    await getHlsSegmentApi(JellyfinApi.jellyfinApi).stopEncodingProcess({\n        deviceId: JellyfinApi.deviceId,\n        playSessionId: playSessionId\n    });\n}\n"
  },
  {
    "path": "src/components/jellyfinApi.ts",
    "content": "import { Api, Jellyfin } from '@jellyfin/sdk';\nimport { version as packageVersion } from '../../package.json';\n\n// eslint-disable-next-line @typescript-eslint/no-extraneous-class\nexport abstract class JellyfinApi {\n    // Security token to prove authentication\n    public static accessToken: string | undefined;\n\n    // Address of server\n    public static serverAddress: string | undefined;\n\n    // device name\n    public static deviceName = 'Google Cast';\n\n    // unique id\n    public static deviceId = '';\n\n    // Jellyfin SDK\n    private static jellyfinSdk: Jellyfin | undefined;\n\n    // Jellyfin API\n    public static jellyfinApi: Api;\n\n    public static setServerInfo(\n        accessToken?: string,\n        serverAddress?: string,\n        receiverName = ''\n    ): void {\n        const regenApi =\n            this.accessToken !== accessToken ||\n            this.serverAddress !== serverAddress;\n\n        console.debug(\n            `JellyfinApi.setServerInfo: token:${accessToken}, server:${serverAddress}, name:${receiverName}`\n        );\n        this.accessToken = accessToken;\n        this.serverAddress = serverAddress;\n\n        if (receiverName) {\n            // remove special characters from the receiver name\n            receiverName = receiverName.replace(/[^\\w\\s]/gi, '');\n\n            this.deviceName = receiverName;\n            // deviceId just needs to be unique-ish\n            this.deviceId = btoa(receiverName);\n        } else {\n            const senders =\n                cast.framework.CastReceiverContext.getInstance().getSenders();\n\n            this.deviceName = 'Google Cast';\n            this.deviceId =\n                senders.length !== 0 && senders[0].id\n                    ? senders[0].id\n                    : new Date().getTime().toString();\n        }\n\n        this.jellyfinSdk ??= new Jellyfin({\n            clientInfo: {\n                name: 'Chromecast',\n                version: packageVersion\n            },\n            deviceInfo: {\n                id: this.deviceId,\n                name: this.deviceName\n            }\n        });\n\n        if (!this.jellyfinApi || regenApi) {\n            if (serverAddress && accessToken) {\n                this.jellyfinApi = this.jellyfinSdk.createApi(\n                    serverAddress,\n                    accessToken\n                );\n            } else {\n                console.error(\n                    'Server address or access token not provided - could not create instance of Jellyfin API'\n                );\n            }\n        }\n    }\n\n    // Create a basic url.\n    // Cannot start with /.\n    public static createUrl(path: string): string {\n        if (this.serverAddress === undefined) {\n            console.error('JellyfinApi.createUrl: no server address present');\n\n            return '';\n        }\n\n        // Remove leading slashes\n        while (path.startsWith('/')) {\n            path = path.substring(1);\n        }\n\n        return `${this.serverAddress}/${path}`;\n    }\n\n    /**\n     * Create url to image\n     * @param itemId - Item id\n     * @param imgType - Image type: Primary, Logo, Backdrop\n     * @param imgTag - Image tag\n     * @param imgIdx - Image index, default 0\n     * @returns URL\n     */\n    public static createImageUrl(\n        itemId: string,\n        imgType: string,\n        imgTag: string,\n        imgIdx = 0\n    ): string {\n        return this.createUrl(\n            `Items/${itemId}/Images/${imgType}/${imgIdx.toString()}?tag=${imgTag}`\n        );\n    }\n}\n"
  },
  {
    "path": "src/components/maincontroller.ts",
    "content": "import type {\n    BaseItemDto,\n    MediaStream,\n    MediaSourceInfo\n} from '@jellyfin/sdk/lib/generated-client';\nimport { getSessionApi, getUserLibraryApi } from '@jellyfin/sdk/lib/utils/api';\nimport {\n    getCurrentPositionTicks,\n    getReportingParams,\n    getMetadata,\n    getStreamByIndex,\n    getShuffleItems,\n    getInstantMixItems,\n    translateRequestedItems,\n    broadcastToMessageBus,\n    ticksToSeconds,\n    TicksPerSecond\n} from '../helpers';\nimport {\n    reportPlaybackStart,\n    reportPlaybackProgress,\n    reportPlaybackStopped,\n    play,\n    detectBitrate\n} from './jellyfinActions';\nimport { getDeviceProfile } from './deviceprofileBuilder';\nimport { JellyfinApi } from './jellyfinApi';\nimport { PlaybackManager, type PlaybackState } from './playbackManager';\nimport { CommandHandler } from './commandHandler';\nimport { getMaxBitrateSupport } from './codecSupportHelper';\nimport type { BusMessageType, PlayRequest, StreamInfo } from '~/types/global';\n\nwindow.castReceiverContext = cast.framework.CastReceiverContext.getInstance();\nwindow.playerManager = window.castReceiverContext.getPlayerManager();\n\nPlaybackManager.setPlayerManager(window.playerManager);\n\nCommandHandler.configure(window.playerManager);\n\nPlaybackManager.resetPlaybackScope();\n\nlet broadcastToServer = new Date();\n\nlet hasReportedCapabilities = false;\n\n/**\n * onMediaElementTimeUpdate\n */\nexport function onMediaElementTimeUpdate(): void {\n    if (PlaybackManager.playbackState.isChangingStream) {\n        return;\n    }\n\n    const now = new Date();\n\n    const elapsed = now.valueOf() - broadcastToServer.valueOf();\n    const playbackState = PlaybackManager.playbackState;\n\n    if (elapsed > 5000) {\n        // TODO use status as input\n        reportPlaybackProgress(\n            playbackState,\n            getReportingParams(playbackState)\n        );\n        broadcastToServer = now;\n    } else if (elapsed > 1500) {\n        // TODO use status as input\n        reportPlaybackProgress(\n            playbackState,\n            getReportingParams(playbackState),\n            false\n        );\n    }\n}\n\n/**\n * onMediaElementPause\n */\nexport function onMediaElementPause(): void {\n    if (PlaybackManager.playbackState.isChangingStream) {\n        return;\n    }\n\n    reportEvent('playstatechange', true);\n}\n\n/**\n * onMediaElementPlaying\n */\nexport function onMediaElementPlaying(): void {\n    if (PlaybackManager.playbackState.isChangingStream) {\n        return;\n    }\n\n    reportEvent('playstatechange', true);\n}\n\n/**\n * onMediaElementVolumeChange\n * @param event - event\n */\nfunction onMediaElementVolumeChange(event: framework.system.Event): void {\n    window.volume = (event as framework.system.SystemVolumeChangedEvent).data;\n    console.log(`Received volume update: ${window.volume.level}`);\n\n    if (JellyfinApi.serverAddress !== null) {\n        reportEvent('volumechange', true);\n    }\n}\n\n/**\n * enableTimeUpdateListener\n */\nexport function enableTimeUpdateListener(): void {\n    window.playerManager.addEventListener(\n        cast.framework.events.EventType.TIME_UPDATE,\n        onMediaElementTimeUpdate\n    );\n    window.castReceiverContext.addEventListener(\n        cast.framework.system.EventType.SYSTEM_VOLUME_CHANGED,\n        onMediaElementVolumeChange\n    );\n    window.playerManager.addEventListener(\n        cast.framework.events.EventType.PAUSE,\n        onMediaElementPause\n    );\n    window.playerManager.addEventListener(\n        cast.framework.events.EventType.PLAYING,\n        onMediaElementPlaying\n    );\n}\n\n/**\n * disableTimeUpdateListener\n */\nexport function disableTimeUpdateListener(): void {\n    window.playerManager.removeEventListener(\n        cast.framework.events.EventType.TIME_UPDATE,\n        onMediaElementTimeUpdate\n    );\n    window.castReceiverContext.removeEventListener(\n        cast.framework.system.EventType.SYSTEM_VOLUME_CHANGED,\n        onMediaElementVolumeChange\n    );\n    window.playerManager.removeEventListener(\n        cast.framework.events.EventType.PAUSE,\n        onMediaElementPause\n    );\n    window.playerManager.removeEventListener(\n        cast.framework.events.EventType.PLAYING,\n        onMediaElementPlaying\n    );\n}\n\nenableTimeUpdateListener();\n\nwindow.addEventListener('beforeunload', () => {\n    disableTimeUpdateListener();\n});\n\nwindow.playerManager.addEventListener(\n    cast.framework.events.EventType.PLAY,\n    (): void => {\n        const playbackState = PlaybackManager.playbackState;\n\n        play(playbackState);\n        reportPlaybackProgress(\n            playbackState,\n            getReportingParams(playbackState)\n        );\n    }\n);\n\nwindow.playerManager.addEventListener(\n    cast.framework.events.EventType.PAUSE,\n    (): void => {\n        const playbackState = PlaybackManager.playbackState;\n\n        reportPlaybackProgress(\n            playbackState,\n            getReportingParams(playbackState)\n        );\n    }\n);\n\n/**\n * defaultOnStop\n */\nfunction defaultOnStop(): void {\n    PlaybackManager.onStop();\n}\n\nwindow.playerManager.addEventListener(\n    cast.framework.events.EventType.MEDIA_FINISHED,\n    async (mediaFinishedEvent): Promise<void> => {\n        const playbackState = PlaybackManager.playbackState;\n\n        // Don't notify server or client if changing streams, but notify next time.\n        if (!playbackState.isChangingStream) {\n            await reportPlaybackStopped(playbackState, {\n                ...getReportingParams(playbackState),\n                PositionTicks:\n                    (mediaFinishedEvent.currentMediaTime ??\n                        getCurrentPositionTicks(playbackState)) * TicksPerSecond\n            });\n\n            defaultOnStop();\n        } else {\n            playbackState.isChangingStream = false;\n        }\n    }\n);\n\nwindow.playerManager.addEventListener(\n    cast.framework.events.EventType.ABORT,\n    defaultOnStop\n);\n\nwindow.playerManager.addEventListener(\n    cast.framework.events.EventType.ENDED,\n    (): void => {\n        const playbackState = PlaybackManager.playbackState;\n\n        // If we're changing streams, do not report playback ended.\n        if (playbackState.isChangingStream) {\n            return;\n        }\n\n        PlaybackManager.resetPlaybackScope();\n\n        if (!PlaybackManager.playNextItem()) {\n            PlaybackManager.resetPlaylist();\n            PlaybackManager.onStop();\n        }\n    }\n);\n\n// Notify of playback start as soon as the media is playing. Only then is the tick position good.\nwindow.playerManager.addEventListener(\n    cast.framework.events.EventType.PLAYING,\n    (): void => {\n        reportPlaybackStart(\n            PlaybackManager.playbackState,\n            getReportingParams(PlaybackManager.playbackState)\n        );\n    }\n);\n\n// Set the active subtitle track once the player has loaded\nwindow.playerManager.addEventListener(\n    cast.framework.events.EventType.PLAYER_LOAD_COMPLETE,\n    () => {\n        setTextTrack(\n            window.playerManager.getMediaInformation()?.customData\n                ?.subtitleStreamIndex ?? null\n        );\n    }\n);\n\n/**\n * reportDeviceCapabilities\n * @returns Promise\n */\nexport async function reportDeviceCapabilities(): Promise<void> {\n    const maxBitrate = await getMaxBitrate();\n    const deviceProfile = getDeviceProfile(maxBitrate);\n\n    hasReportedCapabilities = true;\n\n    await getSessionApi(JellyfinApi.jellyfinApi).postFullCapabilities({\n        clientCapabilitiesDto: {\n            DeviceProfile: deviceProfile,\n            PlayableMediaTypes: ['Audio', 'Video'],\n            SupportsMediaControl: true,\n            SupportsPersistentIdentifier: false\n        }\n    });\n}\n\n/**\n * processMessage\n * @param data - data\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport function processMessage(data: any): void {\n    if (!data.command || !data.serverAddress || !data.accessToken) {\n        console.log('Invalid message sent from sender. Sending error response');\n\n        broadcastToMessageBus({\n            message:\n                'Missing one or more required params - command,options,userId,accessToken,serverAddress',\n            type: 'error'\n        });\n\n        return;\n    }\n\n    data.options = data.options ?? {};\n\n    // Items will have properties - Id, Name, Type, MediaType, IsFolder\n\n    JellyfinApi.setServerInfo(\n        data.accessToken,\n        data.serverAddress,\n        data.receiverName\n    );\n\n    if (data.subtitleAppearance) {\n        window.subtitleAppearance = data.subtitleAppearance;\n    }\n\n    if (data.maxBitrate) {\n        window.MaxBitrate = data.maxBitrate;\n    }\n\n    // Report device capabilities\n    if (!hasReportedCapabilities) {\n        reportDeviceCapabilities();\n    }\n\n    CommandHandler.processMessage(data, data.command);\n\n    if (window.reportEventType) {\n        const playbackState = PlaybackManager.playbackState;\n\n        const report = (): void => {\n            reportPlaybackProgress(\n                playbackState,\n                getReportingParams(playbackState)\n            );\n        };\n\n        reportPlaybackProgress(\n            playbackState,\n            getReportingParams(playbackState),\n            true,\n            window.reportEventType\n        );\n\n        setTimeout(report, 100);\n        setTimeout(report, 500);\n    }\n}\n\n/**\n * reportEvent\n * @param name - name\n * @param reportToServer - reportToServer\n * @returns Promise\n */\nexport function reportEvent(\n    name: BusMessageType,\n    reportToServer: boolean\n): Promise<void> {\n    const playbackState = PlaybackManager.playbackState;\n\n    return reportPlaybackProgress(\n        playbackState,\n        getReportingParams(playbackState),\n        reportToServer,\n        name\n    );\n}\n\n/**\n * setSubtitleStreamIndex\n * @param state - playback state.\n * @param index - index\n */\nexport function setSubtitleStreamIndex(\n    state: PlaybackState,\n    index: number\n): void {\n    console.log(`setSubtitleStreamIndex. index: ${index}`);\n\n    let positionTicks;\n\n    // FIXME: Possible index error when MediaStreams is undefined.\n    const currentSubtitleStream = state.mediaSource?.MediaStreams?.find(\n        (m: MediaStream) => {\n            return m.Index == state.subtitleStreamIndex && m.Type == 'Subtitle';\n        }\n    );\n\n    const currentDeliveryMethod = currentSubtitleStream\n        ? currentSubtitleStream.DeliveryMethod\n        : null;\n\n    if (index == -1 || index == null) {\n        // Need to change the stream to turn off the subs\n        if (currentDeliveryMethod == 'Encode') {\n            console.log('setSubtitleStreamIndex video url change required');\n            positionTicks = getCurrentPositionTicks(state);\n            changeStream(state, positionTicks, {\n                SubtitleStreamIndex: -1\n            });\n        } else {\n            state.subtitleStreamIndex = -1;\n            setTextTrack(null);\n        }\n\n        return;\n    }\n\n    const mediaStreams = state.PlaybackMediaSource?.MediaStreams ?? [];\n\n    const subtitleStream = getStreamByIndex(mediaStreams, 'Subtitle', index);\n\n    if (!subtitleStream) {\n        console.log(\n            'setSubtitleStreamIndex error condition - subtitle stream not found.'\n        );\n\n        return;\n    }\n\n    console.log(\n        `setSubtitleStreamIndex DeliveryMethod:${subtitleStream.DeliveryMethod}`\n    );\n\n    if (\n        subtitleStream.DeliveryMethod == 'External' ||\n        currentDeliveryMethod == 'Encode'\n    ) {\n        let textStreamUrl;\n\n        if (subtitleStream.IsExternal && subtitleStream.DeliveryUrl) {\n            textStreamUrl = subtitleStream.DeliveryUrl;\n        } else if (subtitleStream.DeliveryUrl) {\n            textStreamUrl = JellyfinApi.createUrl(subtitleStream.DeliveryUrl);\n        }\n\n        console.log(`Subtitle url: ${textStreamUrl}`);\n        setTextTrack(index);\n        state.subtitleStreamIndex = subtitleStream.Index ?? null;\n\n        return;\n    } else {\n        console.log('setSubtitleStreamIndex video url change required');\n        positionTicks = getCurrentPositionTicks(state);\n        changeStream(state, positionTicks, {\n            SubtitleStreamIndex: index\n        });\n    }\n}\n\n/**\n * setAudioStreamIndex\n * @param state - playback state.\n * @param index - index\n * @returns promise\n */\nexport function setAudioStreamIndex(\n    state: PlaybackState,\n    index: number\n): Promise<void> {\n    const positionTicks = getCurrentPositionTicks(state);\n\n    return changeStream(state, positionTicks, {\n        AudioStreamIndex: index\n    });\n}\n\n/**\n * seek\n * @param state - playback state.\n * @param ticks - ticks\n * @returns promise\n */\nexport function seek(state: PlaybackState, ticks: number): Promise<void> {\n    return changeStream(state, ticks);\n}\n\n/**\n * changeStream\n * @param state - playback state.\n * @param ticks - ticks\n * @param params - params\n * @returns promise\n */\nexport async function changeStream(\n    state: PlaybackState,\n    ticks: number,\n    params: any = undefined // eslint-disable-line @typescript-eslint/no-explicit-any\n): Promise<void> {\n    if (\n        window.playerManager.getMediaInformation()?.customData?.canClientSeek &&\n        params == null\n    ) {\n        window.playerManager.seek(ticksToSeconds(ticks));\n        reportPlaybackProgress(state, getReportingParams(state));\n\n        return Promise.resolve();\n    }\n\n    params = params ?? {};\n\n    // TODO Could be useful for garbage collection.\n    //      It needs to predict if the server side transcode needs\n    //      to restart.\n    //      Possibility: Always assume it will. Downside: VTT subs switching doesn't\n    //      need to restart the transcode.\n    //const requiresStoppingTranscoding = false;\n    //\n    //if (requiresStoppingTranscoding) {\n    //    window.playerManager.pause();\n    //    await stopActiveEncodings($scope.playSessionId);\n    //}\n\n    state.isChangingStream = true;\n\n    // @ts-expect-error is possible here\n    return await PlaybackManager.playItemInternal(state.item, {\n        audioStreamIndex: params.AudioStreamIndex ?? state.audioStreamIndex,\n        liveStreamId: state.liveStreamId,\n        mediaSourceId: state.mediaSourceId,\n        startPositionTicks: ticks,\n        subtitleStreamIndex:\n            params.SubtitleStreamIndex ?? state.subtitleStreamIndex\n    });\n}\n\n// Create a message handler for the custome namespace channel\n// TODO save namespace somewhere global?\nwindow.castReceiverContext.addCustomMessageListener(\n    'urn:x-cast:com.connectsdk',\n    // eslint-disable-next-line @typescript-eslint/no-explicit-any\n    (evt: any) => {\n        let data = evt.data;\n\n        // Apparently chromium likes to pass it as json, not as object.\n        // chrome on android works fine\n        if (typeof data === 'string') {\n            console.log('Event data is a string.. Chromium detected..');\n            data = JSON.parse(data);\n        }\n\n        data.options = data.options ?? {};\n        data.options.senderId = evt.senderId;\n        // TODO set it somewhere better perhaps\n        window.senderId = evt.senderId;\n\n        console.log(`Received message: ${JSON.stringify(data)}`);\n        processMessage(data);\n    }\n);\n\n/**\n * translateItems\n * @param data - data\n * @param options - options\n * @param method - method\n * @returns promise\n */\nexport async function translateItems(\n    data: any, // eslint-disable-line @typescript-eslint/no-explicit-any\n    options: PlayRequest,\n    method: string\n): Promise<void> {\n    const playNow = method != 'PlayNext' && method != 'PlayLast';\n\n    const result = await translateRequestedItems(options.items, playNow);\n\n    if (result.Items) {\n        options.items = result.Items;\n    }\n\n    if (method == 'PlayNext' || method == 'PlayLast') {\n        for (let i = 0, length = options.items.length; i < length; i++) {\n            PlaybackManager.enqueue(options.items[i]);\n        }\n    } else {\n        PlaybackManager.playFromOptions(data.options);\n    }\n}\n\n/**\n * instantMix\n * @param data - data\n * @param options - options\n * @param item - item\n * @returns promise\n */\nexport async function instantMix(\n    // eslint-disable-next-line @typescript-eslint/no-explicit-any\n    data: any,\n    options: PlayRequest,\n    item: BaseItemDto\n): Promise<void> {\n    const result = await getInstantMixItems(item);\n\n    options.items = result.Items ?? [];\n    PlaybackManager.playFromOptions(data.options);\n}\n\n/**\n * shuffle\n * @param data - data\n * @param options - options\n * @param item - item\n * @returns promise\n */\nexport async function shuffle(\n    // eslint-disable-next-line @typescript-eslint/no-explicit-any\n    data: any,\n    options: PlayRequest,\n    item: BaseItemDto\n): Promise<void> {\n    const result = await getShuffleItems(item);\n\n    options.items = result.Items ?? [];\n    PlaybackManager.playFromOptions(data.options);\n}\n\n/**\n * onStopPlayerBeforePlaybackDone\n * This function fetches the full information of an item before playing it.\n * Only item.Id needs to be set.\n * @param item - Item to look up\n * @param options - Extra information about how it should be played back.\n * @returns Promise waiting for the item to be loaded for playback\n */\nexport async function onStopPlayerBeforePlaybackDone(\n    item: BaseItemDto,\n    options: PlayRequest\n): Promise<void> {\n    if (item.Id) {\n        const response = await getUserLibraryApi(\n            JellyfinApi.jellyfinApi\n        ).getItem({\n            itemId: item.Id\n        });\n\n        PlaybackManager.playItemInternal(response.data, options);\n    }\n}\n\nlet lastBitrateDetect = 0;\nlet detectedBitrate = 0;\n/**\n * getMaxBitrate\n * @returns promise\n */\nexport async function getMaxBitrate(): Promise<number> {\n    console.log('getMaxBitrate');\n\n    if (window.MaxBitrate) {\n        console.log(`bitrate is set to ${window.MaxBitrate}`);\n\n        return window.MaxBitrate;\n    }\n\n    if (detectedBitrate && new Date().getTime() - lastBitrateDetect < 600000) {\n        console.log(\n            `returning previous detected bitrate of ${detectedBitrate}`\n        );\n\n        return detectedBitrate;\n    }\n\n    console.log('detecting bitrate');\n\n    const bitrate = await detectBitrate();\n\n    try {\n        console.log(`Max bitrate auto detected to ${bitrate}`);\n        lastBitrateDetect = new Date().getTime();\n        detectedBitrate = bitrate;\n\n        return Math.min(detectedBitrate, getMaxBitrateSupport());\n    } catch {\n        // The client can set this number\n        console.log('Error detecting bitrate, will return device maximum.');\n\n        return getMaxBitrateSupport();\n    }\n}\n\n/**\n * showPlaybackInfoErrorMessage\n * @param error - error\n */\nexport function showPlaybackInfoErrorMessage(error: string): void {\n    broadcastToMessageBus({ message: error, type: 'playbackerror' });\n}\n\n/**\n * getOptimalMediaSource\n * @param versions - versions\n * @returns stream\n */\nexport function getOptimalMediaSource(\n    versions: MediaSourceInfo[]\n): MediaSourceInfo | null {\n    let optimalVersion = versions.find((v) => {\n        checkDirectPlay(v);\n\n        return v.SupportsDirectPlay;\n    });\n\n    optimalVersion ??= versions.find((v) => {\n        return v.SupportsDirectStream;\n    });\n\n    return (\n        optimalVersion ??\n        versions.find((s) => {\n            return s.SupportsTranscoding;\n        }) ??\n        null\n    );\n}\n\n// Disable direct play on non-http sources\n/**\n * checkDirectPlay\n * @param mediaSource - mediaSource\n */\nexport function checkDirectPlay(mediaSource: MediaSourceInfo): void {\n    if (\n        mediaSource.SupportsDirectPlay &&\n        mediaSource.Protocol == 'Http' &&\n        !mediaSource.RequiredHttpHeaders?.length\n    ) {\n        return;\n    }\n\n    mediaSource.SupportsDirectPlay = false;\n}\n\n/**\n * setTextTrack\n * @param index - index\n */\nexport function setTextTrack(index: number | null): void {\n    try {\n        const textTracksManager = window.playerManager.getTextTracksManager();\n\n        if (index == null) {\n            textTracksManager.setActiveByIds(null);\n\n            return;\n        }\n\n        const subtitleTrack = textTracksManager.getTrackById(index);\n\n        if (subtitleTrack?.trackId !== undefined) {\n            textTracksManager.setActiveByIds([subtitleTrack.trackId]);\n\n            const subtitleAppearance = window.subtitleAppearance;\n\n            if (subtitleAppearance) {\n                const textTrackStyle =\n                    new cast.framework.messages.TextTrackStyle();\n\n                if (subtitleAppearance.dropShadow != null) {\n                    // Empty string is DROP_SHADOW\n                    textTrackStyle.edgeType =\n                        subtitleAppearance.dropShadow ||\n                        cast.framework.messages.TextTrackEdgeType.DROP_SHADOW;\n                    textTrackStyle.edgeColor = '#000000FF';\n                }\n\n                if (subtitleAppearance.font) {\n                    textTrackStyle.fontFamily = subtitleAppearance.font;\n                }\n\n                if (subtitleAppearance.textColor) {\n                    // Append the transparency, hardcoded to 100%\n                    textTrackStyle.foregroundColor = `${subtitleAppearance.textColor}FF`;\n                }\n\n                if (subtitleAppearance.textBackground === 'transparent') {\n                    textTrackStyle.backgroundColor = '#00000000'; // RGBA\n                }\n\n                switch (subtitleAppearance.textSize) {\n                    case 'smaller':\n                        textTrackStyle.fontScale = 0.6;\n                        break;\n                    case 'small':\n                        textTrackStyle.fontScale = 0.8;\n                        break;\n                    case 'large':\n                        textTrackStyle.fontScale = 1.15;\n                        break;\n                    case 'larger':\n                        textTrackStyle.fontScale = 1.3;\n                        break;\n                    case 'extralarge':\n                        textTrackStyle.fontScale = 1.45;\n                        break;\n                    default:\n                        textTrackStyle.fontScale = 1.0;\n                        break;\n                }\n\n                textTracksManager.setTextTrackStyle(textTrackStyle);\n            }\n        }\n    } catch (e) {\n        console.log(`Setting subtitle track failed: ${e}`);\n    }\n}\n\n/**\n * createMediaInformation\n * @param playSessionId - playSessionId\n * @param item - item\n * @param streamInfo - streamInfo\n * @returns media information\n */\nexport function createMediaInformation(\n    playSessionId: string,\n    item: BaseItemDto,\n    streamInfo: StreamInfo\n): framework.messages.MediaInformation {\n    const mediaInfo = new cast.framework.messages.MediaInformation();\n\n    mediaInfo.contentId = streamInfo.url;\n    mediaInfo.contentType = streamInfo.contentType;\n    mediaInfo.customData = {\n        audioStreamIndex: streamInfo.audioStreamIndex,\n        canClientSeek: streamInfo.canClientSeek,\n        canSeek: streamInfo.canSeek,\n        itemId: item.Id,\n        liveStreamId: streamInfo.mediaSource?.LiveStreamId ?? null,\n        mediaSourceId: streamInfo.mediaSource?.Id ?? null,\n        playMethod: streamInfo.isStatic ? 'DirectStream' : 'Transcode',\n        playSessionId: playSessionId,\n        runtimeTicks: streamInfo.mediaSource?.RunTimeTicks ?? null,\n        startPositionTicks: streamInfo.startPositionTicks ?? 0,\n        subtitleStreamIndex: streamInfo.subtitleStreamIndex\n    };\n\n    mediaInfo.metadata = getMetadata(item);\n\n    mediaInfo.streamType = cast.framework.messages.StreamType.BUFFERED;\n    mediaInfo.tracks = streamInfo.tracks;\n\n    if (streamInfo.mediaSource?.RunTimeTicks) {\n        mediaInfo.duration = Math.floor(\n            ticksToSeconds(streamInfo.mediaSource.RunTimeTicks)\n        );\n    }\n\n    // If the client actually sets startPosition:\n    // if(streamInfo.startPosition)\n    //     mediaInfo.customData.startPositionTicks = streamInfo.startPosition\n\n    return mediaInfo;\n}\n\n// Set the available buttons in the UI controls.\nconst controls = cast.framework.ui.Controls.getInstance();\n\ncontrols.clearDefaultSlotAssignments();\n\n/* Disabled for now, dynamically set controls for each media type in the future.\n// Assign buttons to control slots.\ncontrols.assignButton(\n    cast.framework.ui.ControlsSlot.SLOT_SECONDARY_1,\n    cast.framework.ui.ControlsButton.CAPTIONS\n);*/\n\ncontrols.assignButton(\n    cast.framework.ui.ControlsSlot.SLOT_PRIMARY_1,\n    cast.framework.ui.ControlsButton.SEEK_BACKWARD_15\n);\ncontrols.assignButton(\n    cast.framework.ui.ControlsSlot.SLOT_PRIMARY_2,\n    cast.framework.ui.ControlsButton.SEEK_FORWARD_15\n);\n\nconst options = new cast.framework.CastReceiverOptions();\n\n// Global variable set by Vite\nif (!import.meta.env.PROD) {\n    window.castReceiverContext.setLoggerLevel(cast.framework.LoggerLevel.DEBUG);\n    // Don't time out on me :(\n    // This is only normally allowed for non media apps, but in this case\n    // it's for debugging purposes.\n    options.disableIdleTimeout = true;\n    // This alternative seems to close sooner; I think it\n    // quits once the client closes the connection.\n    // options.maxInactivity = 3600;\n\n    options.shakaVariant = cast.framework.ShakaVariant.DEBUG;\n\n    window.playerManager.addEventListener(\n        cast.framework.events.category.CORE,\n        (event: framework.events.Event) => {\n            console.log(`Core event: ${event.type}`);\n            console.log(event);\n        }\n    );\n} else {\n    window.castReceiverContext.setLoggerLevel(cast.framework.LoggerLevel.NONE);\n}\n\noptions.useShakaForHls = true;\noptions.playbackConfig = new cast.framework.PlaybackConfig();\n// Set the player to start playback as soon as there are five seconds of\n// media content buffered. Default is 10.\noptions.playbackConfig.autoResumeDuration = 5;\noptions.supportedCommands = cast.framework.messages.Command.ALL_BASIC_MEDIA;\n\nconsole.log('Application is ready, starting system');\nwindow.castReceiverContext.start(options);\n"
  },
  {
    "path": "src/components/playbackManager.ts",
    "content": "import type {\n    BaseItemDto,\n    MediaSourceInfo,\n    PlaybackInfoResponse,\n    PlayMethod\n} from '@jellyfin/sdk/lib/generated-client';\nimport { RepeatMode } from '@jellyfin/sdk/lib/generated-client';\nimport { AppStatus } from '../types/appStatus';\nimport {\n    broadcastConnectionErrorMessage,\n    createStreamInfo,\n    ticksToSeconds\n} from '../helpers';\nimport { DocumentManager } from './documentManager';\nimport { getDeviceProfile } from './deviceprofileBuilder';\nimport {\n    getPlaybackInfo,\n    getLiveStream,\n    load,\n    stopPingInterval\n} from './jellyfinActions';\nimport {\n    onStopPlayerBeforePlaybackDone,\n    getMaxBitrate,\n    getOptimalMediaSource,\n    showPlaybackInfoErrorMessage,\n    checkDirectPlay,\n    createMediaInformation\n} from './maincontroller';\nimport type { ItemIndex, PlayRequest } from '~/types/global';\n\nexport interface PlaybackState {\n    startPositionTicks: number;\n    mediaType: string | null | undefined;\n    itemId: string | undefined;\n\n    audioStreamIndex: number | null;\n    subtitleStreamIndex: number | null;\n    mediaSource: MediaSourceInfo | null;\n    mediaSourceId: string | null;\n    PlaybackMediaSource: MediaSourceInfo | null;\n\n    playMethod: PlayMethod | undefined;\n    canSeek: boolean;\n    isChangingStream: boolean;\n    playNextItemBool: boolean;\n\n    item: BaseItemDto | null;\n    liveStreamId: string | null;\n    playSessionId: string;\n\n    runtimeTicks: number | null;\n}\n\n// eslint-disable-next-line @typescript-eslint/no-extraneous-class\nexport abstract class PlaybackManager {\n    private static playerManager: framework.PlayerManager;\n    private static activePlaylist: BaseItemDto[];\n    private static activePlaylistIndex: number;\n\n    static playbackState: PlaybackState = {\n        audioStreamIndex: null,\n        canSeek: false,\n        isChangingStream: false,\n        item: null,\n        itemId: '',\n        liveStreamId: '',\n        mediaSource: null,\n        mediaSourceId: '',\n        mediaType: '',\n        PlaybackMediaSource: null,\n        playMethod: undefined,\n        playNextItemBool: true,\n        playSessionId: '',\n        runtimeTicks: 0,\n        startPositionTicks: 0,\n        subtitleStreamIndex: null\n    };\n\n    static setPlayerManager(playerManager: framework.PlayerManager): void {\n        // Parameters\n        this.playerManager = playerManager;\n        this.resetPlaylist();\n    }\n\n    /* This is used to check if we can switch to\n     * some other info overlay.\n     *\n     * Returns true when playing or paused.\n     * (before: true only when playing)\n     */\n    static isPlaying(): boolean {\n        return (\n            this.playerManager.getPlayerState() ===\n                cast.framework.messages.PlayerState.PLAYING ||\n            this.playerManager.getPlayerState() ===\n                cast.framework.messages.PlayerState.PAUSED\n        );\n    }\n\n    static isBuffering(): boolean {\n        return (\n            this.playerManager.getPlayerState() ===\n            cast.framework.messages.PlayerState.BUFFERING\n        );\n    }\n\n    static isIdle(): boolean {\n        return (\n            this.playerManager.getPlayerState() ===\n            cast.framework.messages.PlayerState.IDLE\n        );\n    }\n\n    static async playFromOptions(options: PlayRequest): Promise<void> {\n        const firstItem = options.items[0];\n\n        if (options.startPositionTicks || firstItem.MediaType !== 'Video') {\n            return this.playFromOptionsInternal(options);\n        }\n\n        return this.playFromOptionsInternal(options);\n    }\n\n    private static playFromOptionsInternal(\n        options: PlayRequest\n    ): Promise<void> {\n        const stopPlayer =\n            this.activePlaylist && this.activePlaylist.length > 0;\n\n        this.activePlaylist = options.items;\n        this.activePlaylistIndex = options.startIndex ?? 0;\n\n        console.log('Loaded new playlist:', this.activePlaylist);\n\n        // When starting playback initially, don't use\n        // the next item facility.\n        return this.playItem(options, stopPlayer);\n    }\n\n    // add item to playlist\n    static enqueue(item: BaseItemDto): void {\n        this.activePlaylist.push(item);\n    }\n\n    static resetPlaylist(): void {\n        this.activePlaylistIndex = -1;\n        this.activePlaylist = [];\n    }\n\n    // If there are items in the queue after the current one\n    static hasNextItem(): boolean {\n        return this.activePlaylistIndex < this.activePlaylist.length - 1;\n    }\n\n    // If there are items in the queue before the current one\n    static hasPrevItem(): boolean {\n        return this.activePlaylistIndex > 0;\n    }\n\n    static playNextItem(stopPlayer = false): boolean {\n        const nextItemInfo = this.getNextPlaybackItemInfo();\n\n        if (nextItemInfo) {\n            this.activePlaylistIndex = nextItemInfo.index;\n            this.playItem({ items: [] }, stopPlayer);\n\n            return true;\n        }\n\n        return false;\n    }\n\n    static playPreviousItem(): boolean {\n        if (this.activePlaylist && this.activePlaylistIndex > 0) {\n            this.activePlaylistIndex--;\n            this.playItem({ items: [] }, true);\n\n            return true;\n        }\n\n        return false;\n    }\n\n    // play item from playlist\n    private static async playItem(\n        options: PlayRequest,\n        stopPlayer = false\n    ): Promise<void> {\n        if (stopPlayer) {\n            this.stop();\n        }\n\n        const item = this.activePlaylist[this.activePlaylistIndex];\n\n        console.log(`Playing index ${this.activePlaylistIndex}`, item);\n\n        return await onStopPlayerBeforePlaybackDone(item, options);\n    }\n\n    // Would set private, but some refactorings need to happen first.\n    static async playItemInternal(\n        item: BaseItemDto,\n        options: PlayRequest\n    ): Promise<void> {\n        DocumentManager.setAppStatus(AppStatus.Loading);\n\n        const maxBitrate = await getMaxBitrate();\n        const deviceProfile = getDeviceProfile(maxBitrate);\n        let playbackInfo: PlaybackInfoResponse = {};\n\n        try {\n            playbackInfo = await getPlaybackInfo(\n                item,\n                maxBitrate,\n                deviceProfile,\n                options.startPositionTicks ?? null,\n                options.mediaSourceId ?? null,\n                options.audioStreamIndex ?? null,\n                options.subtitleStreamIndex ?? null,\n                options.liveStreamId\n            );\n        } catch {\n            broadcastConnectionErrorMessage();\n        }\n\n        if (playbackInfo.ErrorCode) {\n            return showPlaybackInfoErrorMessage(playbackInfo.ErrorCode);\n        }\n\n        const mediaSource = getOptimalMediaSource(\n            playbackInfo.MediaSources ?? []\n        );\n\n        if (!mediaSource) {\n            return showPlaybackInfoErrorMessage('NoCompatibleStream');\n        }\n\n        let itemToPlay = mediaSource;\n\n        if (mediaSource.RequiresOpening && playbackInfo.PlaySessionId) {\n            const openLiveStreamResult = await getLiveStream(\n                item,\n                playbackInfo.PlaySessionId,\n                maxBitrate,\n                deviceProfile,\n                options.startPositionTicks ?? null,\n                mediaSource,\n                null,\n                null\n            );\n\n            if (openLiveStreamResult.MediaSource) {\n                checkDirectPlay(openLiveStreamResult.MediaSource);\n                itemToPlay = openLiveStreamResult.MediaSource;\n            }\n        }\n\n        if (playbackInfo.PlaySessionId) {\n            this.playMediaSource(\n                playbackInfo.PlaySessionId,\n                item,\n                itemToPlay,\n                options\n            );\n        }\n    }\n\n    private static playMediaSource(\n        playSessionId: string,\n        item: BaseItemDto,\n        mediaSource: MediaSourceInfo,\n        options: PlayRequest\n    ): void {\n        DocumentManager.setAppStatus(AppStatus.Loading);\n\n        const streamInfo = createStreamInfo(\n            item,\n            mediaSource,\n            options.startPositionTicks ?? null\n        );\n\n        const mediaInfo = createMediaInformation(\n            playSessionId,\n            item,\n            streamInfo\n        );\n        const loadRequestData = new cast.framework.messages.LoadRequestData();\n\n        loadRequestData.media = mediaInfo;\n        loadRequestData.autoplay = true;\n\n        const startPositionTicks =\n            mediaInfo.customData?.startPositionTicks ?? -1;\n\n        // If we should seek at the start, translate it\n        // to seconds and give it to loadRequestData :)\n        if (startPositionTicks > 0) {\n            loadRequestData.currentTime = ticksToSeconds(startPositionTicks);\n        }\n\n        const isChangingStream = this.playbackState.isChangingStream;\n\n        if (mediaInfo.customData) {\n            load(mediaInfo.customData, item);\n        }\n\n        this.playbackState.isChangingStream = isChangingStream;\n        this.playerManager.load(loadRequestData);\n\n        this.playbackState.PlaybackMediaSource = mediaSource;\n\n        console.log(`setting src to ${streamInfo.url}`);\n        this.playbackState.mediaSource = mediaSource;\n\n        DocumentManager.setPlayerBackdrop(item);\n\n        this.playbackState.audioStreamIndex = streamInfo.audioStreamIndex;\n        this.playbackState.subtitleStreamIndex = streamInfo.subtitleStreamIndex;\n\n        // We use false as we do not want to broadcast the new status yet\n        // we will broadcast manually when the media has been loaded, this\n        // is to be sure the duration has been updated in the media element\n        this.playerManager.setMediaInformation(mediaInfo, false);\n    }\n\n    /**\n     * stop playback, as requested by the client\n     */\n    static stop(): void {\n        this.playerManager.stop();\n        // onStop will be called when playback comes to a halt.\n    }\n\n    /**\n     * Called when media stops playing.\n     * TODO avoid doing this between tracks in a playlist\n     */\n    static onStop(): void {\n        if (this.getNextPlaybackItemInfo()) {\n            this.playbackState.playNextItemBool = true;\n        } else {\n            this.playbackState.playNextItemBool = false;\n\n            DocumentManager.setAppStatus(AppStatus.Waiting);\n\n            stopPingInterval();\n\n            DocumentManager.startBackdropInterval();\n        }\n    }\n\n    /**\n     * Get information about the next item to play from window.playlist\n     * @returns item and index, or null to end playback\n     */\n    static getNextPlaybackItemInfo(): ItemIndex | null {\n        if (this.activePlaylist.length < 1) {\n            return null;\n        }\n\n        let newIndex: number;\n\n        if (this.activePlaylistIndex < 0) {\n            // negative = play the first item\n            newIndex = 0;\n        } else {\n            switch (window.repeatMode) {\n                case RepeatMode.RepeatOne:\n                    newIndex = this.activePlaylistIndex;\n                    break;\n                case RepeatMode.RepeatAll:\n                    newIndex = this.activePlaylistIndex + 1;\n\n                    if (newIndex >= this.activePlaylist.length) {\n                        newIndex = 0;\n                    }\n\n                    break;\n                default:\n                    newIndex = this.activePlaylistIndex + 1;\n                    break;\n            }\n        }\n\n        if (newIndex < this.activePlaylist.length) {\n            return {\n                index: newIndex,\n                item: this.activePlaylist[newIndex]\n            };\n        }\n\n        return null;\n    }\n\n    /**\n     * Attempt to clean the receiver state.\n     */\n    static resetPlaybackScope(): void {\n        DocumentManager.setAppStatus(AppStatus.Waiting);\n\n        this.playbackState.startPositionTicks = 0;\n        DocumentManager.setWaitingBackdrop(null, null);\n        this.playbackState.mediaType = '';\n        this.playbackState.itemId = '';\n\n        this.playbackState.audioStreamIndex = null;\n        this.playbackState.subtitleStreamIndex = null;\n        this.playbackState.mediaSource = null;\n        this.playbackState.mediaSourceId = '';\n        this.playbackState.PlaybackMediaSource = null;\n\n        this.playbackState.playMethod = undefined;\n        this.playbackState.canSeek = false;\n        this.playbackState.isChangingStream = false;\n        this.playbackState.playNextItemBool = true;\n\n        this.playbackState.item = null;\n        this.playbackState.liveStreamId = '';\n        this.playbackState.playSessionId = '';\n\n        // Detail content\n        DocumentManager.setLogo(null);\n        DocumentManager.setDetailImage(null);\n    }\n}\n"
  },
  {
    "path": "src/css/jellyfin.css",
    "content": "html,\nbody {\n    height: 100%;\n    width: 100%;\n}\n\nbody {\n    font-family: Quicksand, sans-serif;\n    font-weight: 300;\n    color: #ddd;\n    background-color: #000;\n    margin: 0;\n    padding: 0;\n}\n\n#waiting-container,\n#waiting-container-backdrop,\n.waiting > #video-player,\n.details > #video-player,\n.detailContent,\n.detailLogo {\n    /* There is an open bug on the chromecast, transitions are buggy and sometimes are not triggered.\n    opacity: 0;\n    -webkit-transition: opacity .25s ease-in-out;\n    transition: opacity .25s ease-in-out;\n    */\n    display: none;\n}\n\n.d-none {\n    display: none !important;\n}\n\n#waiting-container-backdrop {\n    position: absolute;\n    inset: 0;\n    background-color: #000;\n    background-position: center;\n    background-size: cover;\n    background-repeat: no-repeat;\n}\n\n#waiting-container {\n    background-position: center;\n    background-size: cover;\n    background-repeat: no-repeat;\n\n    /* Layer on top of the backdrop image: */\n    background-color: rgb(15 15 15 / 60%);\n    position: absolute;\n    inset: 0;\n    padding: 18px 32px;\n}\n\n.detailContent {\n    background-position: center;\n    background-size: cover;\n    background-repeat: no-repeat;\n    position: absolute;\n    inset: 0;\n    background-color: rgb(15 15 15 / 82%);\n}\n\n.detailLogo {\n    height: 50px;\n    width: 300px;\n    background-position: left top;\n    background-size: contain;\n    background-repeat: no-repeat;\n    position: absolute;\n    top: 35px;\n    left: 50px;\n}\n\n.detailImage {\n    background-position: left top;\n    background-size: contain;\n    background-repeat: no-repeat;\n    position: absolute;\n    top: 22%;\n    height: 63%;\n    left: 8%;\n    width: 20%;\n}\n\n.playedIndicator {\n    display: block;\n    position: absolute;\n    top: 5px;\n    right: 5px;\n    text-align: center;\n    width: 1.8vw;\n    height: 1.6vw;\n    padding-top: 0.1vw;\n    border-radius: 50%;\n    color: #fff;\n    background: rgb(0 128 0 / 80%);\n    font-size: 1.1vw;\n}\n\n.playedIndicator img {\n    display: block;\n    width: 100%;\n    height: 100%;\n}\n\n.detailImageProgressContainer {\n    position: absolute;\n    bottom: 10px;\n    right: 0;\n    left: 0;\n    text-align: center;\n}\n\n.detailImageProgressContainer progress {\n    width: 100%;\n    margin: 0 auto;\n    height: 6px;\n}\n\n/* Chrome */\n.itemProgressBar::-webkit-progress-value {\n    border-radius: 0;\n    background-image: none;\n    background-color: #52b54b;\n}\n\n/* Polyfill */\n.itemProgressBar[aria-valuenow]::before {\n    border-radius: 0;\n    background-image: none;\n    background-color: #52b54b;\n}\n\n.itemProgressBar {\n    background: #000 !important;\n    appearance: none;\n    border: 0;\n    border: 0 solid #222;\n    border-radius: 0;\n}\n\n.detailInfo {\n    position: absolute;\n    top: 22%;\n    height: 63%;\n    left: 30.5%;\n    font-size: 1.2vw;\n    width: 60%;\n}\n\n.detailInfo p {\n    margin: 10px 0;\n}\n\n.detailRating {\n    margin: -4px 0 0;\n}\n\n.displayNameContainer {\n    margin-top: -6px !important;\n}\n\n.displayName {\n    font-size: 3vw;\n}\n\n#miscInfo {\n    font-size: 1.5vw;\n    margin-left: 2vw;\n}\n\n.starRating {\n    background-image: url('../img/stars.svg');\n    background-position: left center;\n    background-repeat: no-repeat;\n    background-size: cover;\n    width: 1.6vw;\n    height: 1.4vw;\n    display: inline-block;\n    vertical-align: text-bottom;\n    top: 6px;\n}\n\n.starRatingValue {\n    display: inline-block;\n    margin-left: 1px;\n}\n\n.rottentomatoesicon {\n    display: inline-block;\n    width: 1.4vw;\n    height: 1.4vw;\n    background-size: cover;\n    background-position: left center;\n    background-repeat: no-repeat;\n    vertical-align: text-bottom;\n    top: 6px;\n}\n\n.starRatingValue + .rottentomatoesicon {\n    margin-left: 1em;\n}\n\n.fresh {\n    background-image: url('../img/fresh.svg');\n}\n\n.rotten {\n    background-image: url('../img/rotten.svg');\n}\n\n.metascorehigh {\n    background-color: rgb(102 204 51 / 70%);\n}\n\n.metascoremid {\n    background-color: rgb(255 204 51 / 70%);\n}\n\n.metascorelow {\n    background-color: rgb(240 0 0 / 70%);\n}\n\n.criticRating + .metascore,\n.starRatingValue + .metascore {\n    margin-left: 1em;\n}\n\n.criticRating {\n    display: inline-block;\n    margin-left: 1px;\n}\n\n.overview {\n    max-height: 350px;\n    overflow: hidden;\n    text-overflow: ellipsis;\n}\n\n/* Container for \"ready to cast\" and the logo */\n.waitingContent {\n    position: fixed;\n    bottom: 0;\n    left: 0;\n    text-align: center;\n    font-size: 3vw;\n    margin-bottom: 3%;\n    margin-left: 5%;\n}\n\n/* Container for backdrop description */\n.waitingDescription {\n    position: fixed;\n    bottom: 0;\n    right: 0;\n    margin-right: 5%;\n    margin-bottom: 3%;\n    font-size: 1.5vw;\n}\n\n#waiting-container h1,\n#waiting-container h2 {\n    margin: 25px 0;\n}\n\n#waiting-container h1 {\n    font-size: 45px;\n    font-weight: 300;\n}\n\n/* stylelint-disable no-descending-specificity */\n.error-container h2,\n#waiting-container h2 {\n    font-size: 30px;\n    font-weight: 300;\n}\n/* stylelint-enable no-descending-specificity */\n\n/* jellyfin logo in the waiting container */\n#waiting-container .logo {\n    width: 4vw;\n    display: inline-block;\n    vertical-align: text-bottom;\n}\n\n.waiting > #waiting-container-backdrop,\n.waiting > #waiting-container,\n.details .detailContent,\n.details .detailLogo,\n.details #waiting-container-backdrop {\n    /* opacity: 1; */\n    display: initial;\n}\n\n/* stylelint-disable selector-type-no-unknown */\ncast-media-player {\n    --spinner-image: url('../img/spinner.png');\n    --playback-logo-image: url('../img/banner.svg');\n    --watermark-image: url('../img/banner.svg');\n    --watermark-size: 225px;\n    --watermark-position: top right;\n    --theme-hue: 195.3; /* Jellyfin blue */\n    --progress-color: #00a4dc;\n}\n/* stylelint-enable selector-type-no-unknown */\n"
  },
  {
    "path": "src/helpers.ts",
    "content": "import type {\n    BaseItemDtoQueryResult,\n    PlaybackProgressInfo,\n    MediaSourceInfo,\n    MediaStream,\n    BaseItemDto,\n    BaseItemPerson,\n    TvShowsApiGetEpisodesRequest,\n    UserDto,\n    InstantMixApiGetInstantMixFromAlbumRequest,\n    InstantMixApiGetInstantMixFromPlaylistRequest,\n    InstantMixApiGetInstantMixFromArtistsRequest,\n    InstantMixApiGetInstantMixFromSongRequest,\n    ItemFields,\n    ItemsApiGetItemsRequest\n} from '@jellyfin/sdk/lib/generated-client';\nimport {\n    getInstantMixApi,\n    getItemsApi,\n    getTvShowsApi,\n    getUserApi\n} from '@jellyfin/sdk/lib/utils/api';\nimport type {\n    GenericMediaMetadata,\n    MovieMediaMetadata,\n    MusicTrackMediaMetadata,\n    PhotoMediaMetadata,\n    TvShowMediaMetadata\n} from 'chromecast-caf-receiver/cast.framework.messages';\nimport { JellyfinApi } from './components/jellyfinApi';\nimport {\n    PlaybackManager,\n    type PlaybackState\n} from './components/playbackManager';\nimport type { BusMessage, StreamInfo } from './types/global';\n\ntype InstantMixApiRequest =\n    | InstantMixApiGetInstantMixFromAlbumRequest\n    | InstantMixApiGetInstantMixFromArtistsRequest\n    | InstantMixApiGetInstantMixFromSongRequest\n    | InstantMixApiGetInstantMixFromPlaylistRequest;\n\nexport const TicksPerSecond = 10000000;\n\n/**\n * Get current playback position in ticks, adjusted for server seeking\n * @param state - playback state.\n * @returns position in ticks\n */\nexport function getCurrentPositionTicks(state: PlaybackState): number {\n    let positionTicks =\n        window.playerManager.getCurrentTimeSec() * TicksPerSecond;\n    const mediaInformation = window.playerManager.getMediaInformation();\n\n    if (mediaInformation && !mediaInformation.customData?.canClientSeek) {\n        positionTicks += state.startPositionTicks || 0;\n    }\n\n    return positionTicks;\n}\n\n/**\n * Get parameters used for playback reporting\n * @param state - playback state.\n * @returns progress information for use with the reporting APIs\n */\nexport function getReportingParams(state: PlaybackState): PlaybackProgressInfo {\n    /* Math.round() calls:\n     * on 10.7, any floating point will give an API error,\n     * so it's actually really important to make sure that\n     * those fields are always rounded.\n     */\n    return {\n        AudioStreamIndex: state.audioStreamIndex,\n        CanSeek: state.canSeek,\n        IsMuted: window.volume?.muted ?? false,\n        IsPaused:\n            window.playerManager.getPlayerState() ===\n            cast.framework.messages.PlayerState.PAUSED,\n        ItemId: state.itemId,\n        LiveStreamId: state.liveStreamId,\n        MediaSourceId: state.mediaSourceId,\n        PlayMethod: state.playMethod,\n        PlaySessionId: state.playSessionId,\n        PositionTicks: Math.round(getCurrentPositionTicks(state)),\n        RepeatMode: window.repeatMode,\n        SubtitleStreamIndex: state.subtitleStreamIndex,\n        VolumeLevel: Math.round((window.volume?.level ?? 0) * 100)\n    };\n}\n\n/**\n * getSenderReportingData\n * This is used in playback reporting to find out information\n * about the item that is currently playing. This is sent over the cast protocol over to\n * the connected client (or clients?).\n * @param playbackState - playback state.\n * @param reportingData - object full of random information\n * @returns lots of data for the connected client\n */\nexport function getSenderReportingData(\n    playbackState: PlaybackState,\n    reportingData: PlaybackProgressInfo\n    // eslint-disable-next-line @typescript-eslint/no-explicit-any\n): any {\n    // eslint-disable-next-line @typescript-eslint/no-explicit-any\n    const state: any = {\n        ItemId: reportingData.ItemId,\n        PlayState: reportingData\n    };\n\n    state.NowPlayingItem = {\n        Id: reportingData.ItemId,\n        RunTimeTicks: playbackState.runtimeTicks\n    };\n\n    const item = playbackState.item;\n\n    if (item) {\n        const nowPlayingItem = state.NowPlayingItem;\n\n        nowPlayingItem.ServerId = item.ServerId;\n        nowPlayingItem.Chapters = item.Chapters ?? [];\n\n        const mediaSource = item.MediaSources?.find((m: MediaSourceInfo) => {\n            return m.Id == reportingData.MediaSourceId;\n        });\n\n        nowPlayingItem.MediaStreams = mediaSource\n            ? mediaSource.MediaStreams\n            : [];\n\n        nowPlayingItem.MediaType = item.MediaType;\n        nowPlayingItem.Type = item.Type;\n        nowPlayingItem.Name = item.Name;\n\n        nowPlayingItem.IndexNumber = item.IndexNumber;\n        nowPlayingItem.IndexNumberEnd = item.IndexNumberEnd;\n        nowPlayingItem.ParentIndexNumber = item.ParentIndexNumber;\n        nowPlayingItem.ProductionYear = item.ProductionYear;\n        nowPlayingItem.PremiereDate = item.PremiereDate;\n        nowPlayingItem.SeriesName = item.SeriesName;\n        nowPlayingItem.Album = item.Album;\n        nowPlayingItem.Artists = item.Artists;\n\n        const imageTags = item.ImageTags ?? {};\n\n        if (item.SeriesPrimaryImageTag) {\n            nowPlayingItem.PrimaryImageItemId = item.SeriesId;\n            nowPlayingItem.PrimaryImageTag = item.SeriesPrimaryImageTag;\n        } else if (imageTags.Primary) {\n            nowPlayingItem.PrimaryImageItemId = item.Id;\n            nowPlayingItem.PrimaryImageTag = imageTags.Primary;\n        } else if (item.AlbumPrimaryImageTag) {\n            nowPlayingItem.PrimaryImageItemId = item.AlbumId;\n            nowPlayingItem.PrimaryImageTag = item.AlbumPrimaryImageTag;\n        }\n\n        if (item.BackdropImageTags?.length) {\n            nowPlayingItem.BackdropItemId = item.Id;\n            nowPlayingItem.BackdropImageTag = item.BackdropImageTags[0];\n        } else if (item.ParentBackdropImageTags?.length) {\n            nowPlayingItem.BackdropItemId = item.ParentBackdropItemId;\n            nowPlayingItem.BackdropImageTag = item.ParentBackdropImageTags[0];\n        }\n\n        if (imageTags.Thumb) {\n            nowPlayingItem.ThumbItemId = item.Id;\n            nowPlayingItem.ThumbImageTag = imageTags.Thumb;\n        }\n\n        if (imageTags.Logo) {\n            nowPlayingItem.LogoItemId = item.Id;\n            nowPlayingItem.LogoImageTag = imageTags.Logo;\n        } else if (item.ParentLogoImageTag) {\n            nowPlayingItem.LogoItemId = item.ParentLogoItemId;\n            nowPlayingItem.LogoImageTag = item.ParentLogoImageTag;\n        }\n\n        if (playbackState.playNextItemBool) {\n            const nextItemInfo = PlaybackManager.getNextPlaybackItemInfo();\n\n            if (nextItemInfo) {\n                state.NextMediaType = nextItemInfo.item.MediaType;\n            }\n        }\n    }\n\n    return state;\n}\n\n/**\n * Create CAF-native metadata for a given item\n * @param item - item to look up\n * @returns one of the metadata classes in cast.framework.messages.*Metadata\n */\nexport function getMetadata(\n    item: BaseItemDto\n):\n    | GenericMediaMetadata\n    | MovieMediaMetadata\n    | MusicTrackMediaMetadata\n    | PhotoMediaMetadata\n    | TvShowMediaMetadata {\n    let metadata:\n        | GenericMediaMetadata\n        | MovieMediaMetadata\n        | MusicTrackMediaMetadata\n        | PhotoMediaMetadata\n        | TvShowMediaMetadata;\n    let posterUrl = '';\n\n    if (item.SeriesPrimaryImageTag) {\n        posterUrl = JellyfinApi.createUrl(\n            `Items/${item.SeriesId}/Images/Primary?tag=${item.SeriesPrimaryImageTag}`\n        );\n    } else if (item.AlbumPrimaryImageTag) {\n        posterUrl = JellyfinApi.createUrl(\n            `Items/${item.AlbumId}/Images/Primary?tag=${item.AlbumPrimaryImageTag}`\n        );\n    } else if (item.ImageTags?.Primary) {\n        posterUrl = JellyfinApi.createUrl(\n            `Items/${item.Id}/Images/Primary?tag=${item.ImageTags.Primary}`\n        );\n    }\n\n    if (item.Type == 'Episode') {\n        const tvShowMedata = new cast.framework.messages.TvShowMediaMetadata();\n\n        tvShowMedata.seriesTitle = item.SeriesName ?? undefined;\n\n        if (item.PremiereDate) {\n            tvShowMedata.originalAirdate = parseISO8601Date(\n                item.PremiereDate\n            ).toISOString();\n        }\n\n        if (item.IndexNumber != null) {\n            tvShowMedata.episode = item.IndexNumber;\n        }\n\n        if (item.ParentIndexNumber != null) {\n            tvShowMedata.season = item.ParentIndexNumber;\n        }\n\n        metadata = tvShowMedata;\n    } else if (item.Type == 'Photo') {\n        const photoMetadata = new cast.framework.messages.PhotoMediaMetadata();\n\n        if (item.PremiereDate) {\n            photoMetadata.creationDateTime = parseISO8601Date(\n                item.PremiereDate\n            ).toISOString();\n        }\n        // TODO more metadata?\n\n        metadata = photoMetadata;\n    } else if (item.Type == 'Audio') {\n        const musicTrackMetadata =\n            new cast.framework.messages.MusicTrackMediaMetadata();\n\n        musicTrackMetadata.songName = item.Name ?? undefined;\n        musicTrackMetadata.artist = item.Artists?.length\n            ? item.Artists.join(', ')\n            : '';\n        musicTrackMetadata.albumArtist = item.AlbumArtist ?? undefined;\n        musicTrackMetadata.albumName = item.Album ?? undefined;\n\n        if (item.PremiereDate) {\n            musicTrackMetadata.releaseDate = parseISO8601Date(\n                item.PremiereDate\n            ).toISOString();\n        }\n\n        if (item.IndexNumber != null) {\n            musicTrackMetadata.trackNumber = item.IndexNumber;\n        }\n\n        if (item.ParentIndexNumber != null) {\n            musicTrackMetadata.discNumber = item.ParentIndexNumber;\n        }\n\n        // previously: p.PersonType == 'Type'.. wtf?\n        const composer = (item.People ?? []).find(\n            (p: BaseItemPerson) => p.Type == 'Composer'\n        );\n\n        if (composer?.Name) {\n            musicTrackMetadata.composer = composer.Name;\n        }\n\n        metadata = musicTrackMetadata;\n    } else if (item.Type == 'Movie') {\n        const movieMetadata = new cast.framework.messages.MovieMediaMetadata();\n\n        if (item.PremiereDate) {\n            movieMetadata.releaseDate = parseISO8601Date(\n                item.PremiereDate\n            ).toISOString();\n        }\n\n        if (item.Studios?.length && item.Studios[0].Name) {\n            movieMetadata.studio = item.Studios[0].Name;\n        }\n\n        metadata = movieMetadata;\n    } else {\n        const genericMetadata =\n            new cast.framework.messages.GenericMediaMetadata();\n\n        if (item.PremiereDate) {\n            genericMetadata.releaseDate = parseISO8601Date(\n                item.PremiereDate\n            ).toISOString();\n        }\n\n        metadata = genericMetadata;\n    }\n\n    metadata.title = item.Name ?? '????';\n    metadata.images = [new cast.framework.messages.Image(posterUrl)];\n\n    return metadata;\n}\n\n/**\n * Check if a media source is an HLS stream\n * @param mediaSource - mediaSource\n * @returns boolean\n */\nexport function isHlsStream(mediaSource: MediaSourceInfo): boolean {\n    return mediaSource.TranscodingSubProtocol == 'hls';\n}\n\n/**\n * Create the necessary information about an item\n * needed for playback\n * @param item - Item to play\n * @param mediaSource - MediaSourceInfo for the item\n * @param startPosition - Where to seek to (possibly server seeking)\n * @returns object with enough information to start playback\n */\nexport function createStreamInfo(\n    item: BaseItemDto,\n    mediaSource: MediaSourceInfo,\n    startPosition: number | null\n): StreamInfo {\n    let mediaUrl;\n    let contentType;\n\n    // server seeking\n    const startPositionInSeekParam = startPosition\n        ? ticksToSeconds(startPosition)\n        : 0;\n    const seekParam = startPositionInSeekParam\n        ? `#t=${startPositionInSeekParam}`\n        : '';\n\n    let isStatic = false;\n    let streamContainer = mediaSource.Container;\n\n    let playerStartPositionTicks = 0;\n\n    const type = item.MediaType?.toLowerCase();\n\n    if (type == 'video') {\n        contentType = `video/${mediaSource.Container}`;\n\n        if (mediaSource.SupportsDirectPlay && mediaSource.Path) {\n            mediaUrl = mediaSource.Path;\n            isStatic = true;\n        } else if (mediaSource.SupportsDirectStream) {\n            mediaUrl = JellyfinApi.createUrl(\n                `videos/${item.Id}/stream.${mediaSource.Container}?mediaSourceId=${mediaSource.Id}&api_key=${JellyfinApi.accessToken}&static=true${seekParam}`\n            );\n            isStatic = true;\n            playerStartPositionTicks = startPosition ?? 0;\n        } else {\n            // TODO deal with !TranscodingUrl\n            // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n            mediaUrl = JellyfinApi.createUrl(mediaSource.TranscodingUrl!);\n\n            if (isHlsStream(mediaSource)) {\n                mediaUrl += seekParam;\n                playerStartPositionTicks = startPosition ?? 0;\n                contentType = 'application/x-mpegURL';\n                streamContainer = 'm3u8';\n            } else {\n                contentType = `video/${mediaSource.TranscodingContainer}`;\n                streamContainer = mediaSource.TranscodingContainer;\n\n                if (mediaUrl.toLowerCase().includes('copytimestamps=true')) {\n                    startPosition = 0;\n                }\n            }\n        }\n    } else {\n        contentType = `audio/${mediaSource.Container}`;\n\n        if (mediaSource.SupportsDirectPlay && mediaSource.Path) {\n            mediaUrl = mediaSource.Path;\n            isStatic = true;\n            playerStartPositionTicks = startPosition ?? 0;\n        } else {\n            const isDirectStream = mediaSource.SupportsDirectStream;\n\n            if (isDirectStream) {\n                const outputContainer = (\n                    mediaSource.Container ?? ''\n                ).toLowerCase();\n\n                mediaUrl = JellyfinApi.createUrl(\n                    `Audio/${item.Id}/stream.${outputContainer}?mediaSourceId=${mediaSource.Id}&api_key=${JellyfinApi.accessToken}&static=true${seekParam}`\n                );\n                isStatic = true;\n            } else {\n                streamContainer = mediaSource.TranscodingContainer;\n                contentType = `audio/${mediaSource.TranscodingContainer}`;\n\n                // TODO deal with !TranscodingUrl\n                // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n                mediaUrl = JellyfinApi.createUrl(mediaSource.TranscodingUrl!);\n            }\n        }\n    }\n\n    // TODO: Remove the second half of the expression by supporting changing the mediaElement src dynamically.\n    // It is a pain and will require unbinding all event handlers during the operation\n    const canSeek = (mediaSource.RunTimeTicks ?? 0) > 0;\n\n    const info: StreamInfo = {\n        audioStreamIndex: mediaSource.DefaultAudioStreamIndex ?? null,\n        canClientSeek: isStatic || (canSeek && streamContainer == 'm3u8'),\n        canSeek: canSeek,\n        contentType: contentType,\n        isStatic: isStatic,\n        mediaSource: mediaSource,\n        playerStartPositionTicks: playerStartPositionTicks,\n        startPositionTicks: startPosition,\n        streamContainer: streamContainer,\n        subtitleStreamIndex: mediaSource.DefaultSubtitleStreamIndex ?? null,\n        url: mediaUrl\n    };\n\n    const subtitleStreams =\n        mediaSource.MediaStreams?.filter((stream: MediaStream) => {\n            return stream.Type === 'Subtitle';\n        }) ?? [];\n    const subtitleTracks: framework.messages.Track[] = [];\n\n    subtitleStreams.forEach((subtitleStream) => {\n        if (subtitleStream.DeliveryUrl === undefined) {\n            /* The CAF v3 player only supports vtt currently,\n             * SRT subs can be \"transcoded\" to vtt by jellyfin.\n             * The server will do that in accordance with the device profiles and\n             * give us a DeliveryUrl if that is the case.\n             * Support for more could be added with a custom implementation\n             */\n            return;\n        }\n\n        if (!info.subtitleStreamIndex) {\n            return;\n        }\n\n        const track = new cast.framework.messages.Track(\n            info.subtitleStreamIndex,\n            cast.framework.messages.TrackType.TEXT\n        );\n\n        if (subtitleStream.IsExternal && subtitleStream.DeliveryUrl) {\n            track.trackContentId = subtitleStream.DeliveryUrl;\n        } else if (subtitleStream.DeliveryUrl) {\n            track.trackContentId = JellyfinApi.createUrl(\n                subtitleStream.DeliveryUrl\n            );\n        }\n\n        if (subtitleStream.Index) {\n            track.trackId = subtitleStream.Index;\n        }\n\n        if (subtitleStream.Language) {\n            track.language = subtitleStream.Language;\n        }\n\n        if (subtitleStream.DisplayTitle) {\n            track.name = subtitleStream.DisplayTitle;\n        }\n\n        // TODO this should not be hardcoded but we only support VTT currently\n        track.trackContentType = 'text/vtt';\n        track.subtype = cast.framework.messages.TextTrackType.SUBTITLES;\n        subtitleTracks.push(track);\n        console.log(`Subtitle url: ${info.subtitleStreamUrl}`);\n    });\n\n    info.tracks = subtitleTracks;\n\n    return info;\n}\n\n/**\n * Get stream by its index while making a type assertion\n * @param streams - array streams to consider\n * @param type - type of stream\n * @param index - index of stream\n * @returns first matching stream\n */\nexport function getStreamByIndex(\n    streams: MediaStream[],\n    type: string,\n    index: number\n): MediaStream {\n    return (\n        streams.find((s) => {\n            return s.Type == type && s.Index == index;\n        }) ?? {}\n    );\n}\n\n// defined for use in the 3 next functions\nconst requiredItemFields: ItemFields[] = ['MediaSources', 'Chapters'];\n\n/**\n * Get a random selection of items given one item,\n * this item can be a music artist item, or a music genre item,\n * or something else. If something else it searches for child items\n * of the provided one.\n *\n * It's used only in maincomponents.shuffle.\n * @param item - Parent item of shuffle search\n * @returns items for the queue\n */\nexport function getShuffleItems(\n    item: BaseItemDto\n): Promise<BaseItemDtoQueryResult> {\n    let query: ItemsApiGetItemsRequest = {\n        fields: requiredItemFields,\n        filters: ['IsNotFolder'],\n        limit: 50,\n        recursive: true,\n        sortBy: ['Random']\n    };\n\n    if (item.Type == 'MusicArtist') {\n        query = {\n            ...query,\n            artistIds: item.Id ? [item.Id] : undefined,\n            mediaTypes: ['Audio']\n        };\n    } else if (item.Type == 'MusicGenre') {\n        query = {\n            ...query,\n            genres: item.Name ? [item.Name] : undefined,\n            mediaTypes: ['Audio']\n        };\n    } else {\n        query = {\n            ...query,\n            parentId: item.Id\n        };\n    }\n\n    return getItemsForPlayback(query);\n}\n\n/**\n * Get an \"Instant Mix\" given an item, which can be a\n * music artist, genre, album, playlist\n * @param item - Parent item of the search\n * @returns items for the queue\n */\nexport async function getInstantMixItems(\n    item: BaseItemDto\n): Promise<BaseItemDtoQueryResult> {\n    if (item.Id === undefined) {\n        throw new Error('Item ID not provided');\n    }\n\n    const query: InstantMixApiRequest = {\n        fields: ['MediaSources', 'Chapters'],\n        itemId: item.Id,\n        limit: 50\n    };\n\n    const instantMixApi = getInstantMixApi(JellyfinApi.jellyfinApi);\n\n    if (item.Type == 'MusicArtist') {\n        return (await instantMixApi.getInstantMixFromArtists(query)).data;\n    } else if (item.Type == 'MusicGenre') {\n        return (\n            await instantMixApi.getInstantMixFromMusicGenreById({\n                ...query,\n                id: item.Id\n            })\n        ).data;\n    } else if (item.Type == 'MusicAlbum') {\n        return (await instantMixApi.getInstantMixFromAlbum(query)).data;\n    } else if (item.Type == 'Audio') {\n        return (await instantMixApi.getInstantMixFromSong(query)).data;\n    } else if (item.Type == 'Playlist') {\n        return (await instantMixApi.getInstantMixFromPlaylist(query)).data;\n    }\n\n    throw new Error(`InstantMix: Unknown item type: ${item.Type}`);\n}\n\n/**\n * Get items to be played back\n * @param query - specification on what to search for\n * @returns items to be played back\n */\nexport async function getItemsForPlayback(\n    query: ItemsApiGetItemsRequest\n): Promise<BaseItemDtoQueryResult> {\n    const response = await getItemsApi(JellyfinApi.jellyfinApi).getItems({\n        ...query,\n        excludeLocationTypes: ['Virtual'],\n        fields: requiredItemFields,\n        limit: query.limit ?? 100\n    });\n\n    return response.data;\n}\n\n/**\n * Get episodes for a show given by seriesId\n * @param query - query parameters to build on\n * @returns episode items\n */\nexport async function getEpisodesForPlayback(\n    query: TvShowsApiGetEpisodesRequest\n): Promise<BaseItemDtoQueryResult> {\n    const response = await getTvShowsApi(JellyfinApi.jellyfinApi).getEpisodes(\n        query\n    );\n\n    return response.data;\n}\n\n/**\n * Get user object for the current user\n * @returns user object\n */\nexport async function getUser(): Promise<UserDto> {\n    const response = await getUserApi(JellyfinApi.jellyfinApi).getCurrentUser();\n\n    return response.data;\n}\n\n/**\n * Process a list of items for playback\n * by resolving things like folders to playable items.\n * @param items - items to resolve\n * @param smart - If enabled it will try to find the next episode given the current one,\n * if the connected user has enabled that in their settings\n * @returns Promise for search result containing items to play\n */\nexport async function translateRequestedItems(\n    items: BaseItemDto[],\n    smart = false\n): Promise<BaseItemDtoQueryResult> {\n    const firstItem = items[0];\n\n    if (firstItem.Type == 'Playlist') {\n        return await getItemsForPlayback({\n            parentId: firstItem.Id\n        });\n    } else if (firstItem.Type == 'MusicArtist') {\n        return await getItemsForPlayback({\n            artistIds: firstItem.Id ? [firstItem.Id] : undefined,\n            filters: ['IsNotFolder'],\n            mediaTypes: ['Audio'],\n            recursive: true,\n            sortBy: ['SortName']\n        });\n    } else if (firstItem.Type == 'MusicGenre') {\n        return await getItemsForPlayback({\n            filters: ['IsNotFolder'],\n            genres: firstItem.Name ? [firstItem.Name] : undefined,\n            mediaTypes: ['Audio'],\n            recursive: true,\n            sortBy: ['SortName']\n        });\n    } else if (firstItem.IsFolder) {\n        return await getItemsForPlayback({\n            filters: ['IsNotFolder'],\n            mediaTypes: ['Audio', 'Video'],\n            parentId: firstItem.Id,\n            recursive: true,\n            sortBy: ['SortName']\n        });\n    } else if (smart && firstItem.Type == 'Episode' && items.length == 1) {\n        const user = await getUser();\n\n        if (!user.Configuration?.EnableNextEpisodeAutoPlay) {\n            return {\n                Items: items\n            };\n        }\n\n        const result = await getItemsForPlayback({\n            ids: firstItem.Id ? [firstItem.Id] : undefined\n        });\n\n        if (!result.Items || result.Items.length < 1) {\n            return result;\n        }\n\n        const episode = result.Items[0];\n\n        if (!episode.SeriesId) {\n            return result;\n        }\n\n        const episodesResult = await getEpisodesForPlayback({\n            isMissing: false,\n            seriesId: episode.SeriesId\n        });\n\n        let foundItem = false;\n\n        episodesResult.Items = episodesResult.Items?.filter(\n            (e: BaseItemDto) => {\n                if (foundItem) {\n                    return true;\n                }\n\n                if (e.Id == episode.Id) {\n                    foundItem = true;\n\n                    return true;\n                }\n\n                return false;\n            }\n        );\n\n        episodesResult.TotalRecordCount = episodesResult.Items?.length ?? 0;\n\n        return episodesResult;\n    }\n\n    return {\n        Items: items\n    };\n}\n\n/**\n * Parse a date.. Just a wrapper around new Date,\n * but could be useful to deal with weird date strings\n * in the future.\n * @param date - string date to parse\n * @returns date object\n */\nexport function parseISO8601Date(date: string): Date {\n    return new Date(date);\n}\n\n/**\n * Convert ticks to seconds\n * @param ticks - number of ticks to convert\n * @returns number of seconds\n */\nexport function ticksToSeconds(ticks: number): number {\n    return ticks / TicksPerSecond;\n}\n\n/**\n * Send a message over the custom message transport\n * @param message - to send\n */\nexport function broadcastToMessageBus(message: BusMessage): void {\n    window.castReceiverContext.sendCustomMessage(\n        'urn:x-cast:com.connectsdk',\n        window.senderId,\n        message\n    );\n}\n\n/**\n * Inform the cast sender that we couldn't connect\n */\nexport function broadcastConnectionErrorMessage(): void {\n    broadcastToMessageBus({ message: '', type: 'connectionerror' });\n}\n"
  },
  {
    "path": "src/index.html",
    "content": "<!doctype html>\n<html>\n    <head>\n        <meta charset=\"utf-8\" />\n        <title>Jellyfin</title>\n        <style>\n            @import url('https://fonts.googleapis.com/css?family=Quicksand');\n        </style>\n        <script src=\"//www.gstatic.com/cast/sdk/libs/caf_receiver/v3/cast_receiver_framework.js\"></script>\n        <script type=\"module\" src=\"./app.ts\"></script>\n    </head>\n    <body>\n        <div id=\"waiting-container-backdrop\"></div>\n        <div id=\"waiting-container\">\n            <div class=\"waitingContent\">\n                <img class=\"logo\" src=\"img/logo.svg\" />\n                Ready to cast\n            </div>\n            <div id=\"waiting-description\" class=\"waitingDescription\"></div>\n        </div>\n        <cast-media-player\n            id=\"video-player\"\n            crossorigin=\"anonymous\"\n            preload=\"auto\"\n        ></cast-media-player>\n        <div class=\"detailContent\">\n            <div class=\"detailImage\">\n                <div id=\"played-indicator-ok\" class=\"playedIndicator\">\n                    <img src=\"./img/check.svg\" />\n                </div>\n                <div\n                    id=\"played-indicator-value\"\n                    class=\"playedIndicator d-none\"\n                ></div>\n\n                <div class=\"detailImageProgressContainer\">\n                    <progress class=\"itemProgressBar\" max=\"100\"></progress>\n                </div>\n            </div>\n\n            <div class=\"detailInfo\">\n                <p class=\"displayNameContainer\">\n                    <span class=\"displayName\"></span><span id=\"miscInfo\"></span>\n                </p>\n                <div class=\"detailRating\">\n                    <div id=\"star-rating\" class=\"starRating\"></div>\n                    <div id=\"star-rating-value\" class=\"starRatingValue\"></div>\n                    <div id=\"critic-rating\" class=\"rottentomatoesicon\"></div>\n                    <div id=\"critic-rating-value\" class=\"criticRating\"></div>\n                </div>\n                <p class=\"genres\"></p>\n                <p class=\"overview\"></p>\n            </div>\n        </div>\n        <div class=\"detailLogo\"></div>\n    </body>\n</html>\n"
  },
  {
    "path": "src/types/appStatus.ts",
    "content": "export enum AppStatus {\n    Audio = 'audio',\n    Backdrop = 'backdrop',\n    Details = 'details',\n    Loading = 'loading',\n    PlayingWithControls = 'playing-with-controls',\n    Unset = '',\n    Waiting = 'waiting'\n}\n"
  },
  {
    "path": "src/types/global.d.ts",
    "content": "import {\n    CastReceiverContext,\n    PlayerManager\n} from 'chromecast-caf-receiver/cast.framework';\nimport { SystemVolumeData } from 'chromecast-caf-receiver/cast.framework.system';\nimport type {\n    BaseItemDto,\n    MediaSourceInfo,\n    PlayMethod,\n    RepeatMode\n} from '@jellyfin/sdk/lib/generated-client';\nimport type {\n    TextTrackEdgeType,\n    Track\n} from 'chromecast-caf-receiver/cast.framework.messages';\n\ntype BusMessageType =\n    | 'connectionerror'\n    | 'error'\n    | 'playbackerror'\n    | 'playbackprogress'\n    | 'playbackstart'\n    | 'playbackstop'\n    | 'playstatechange'\n    | 'repeatmodechange'\n    | 'volumechange';\n\n// Messagebus message\nexport interface BusMessage {\n    type: BusMessageType;\n    message?: string;\n    data?: string;\n}\n\n//\n// For the old queue stuff\n//\nexport interface ItemIndex {\n    item: BaseItemDto;\n    index: number;\n}\n\n// From commandHandler\nexport interface PlayRequest {\n    startIndex?: number;\n    items: BaseItemDto[];\n    startPositionTicks?: number;\n    mediaSourceId?: string;\n    audioStreamIndex?: number;\n    subtitleStreamIndex?: number;\n    liveStreamId?: string;\n}\n\nexport interface DisplayRequest {\n    ItemId: string;\n}\n\nexport interface SetIndexRequest {\n    index: number;\n}\n\nexport interface SetRepeatModeRequest {\n    RepeatMode: RepeatMode;\n}\nexport interface SeekRequest {\n    position: number; // seconds\n}\n\nexport interface DataMessage {\n    options:\n        | PlayRequest\n        | DisplayRequest\n        | SetIndexRequest\n        | SetRepeatModeRequest\n        | SeekRequest;\n    command: string;\n}\n\ntype SupportedCommands = Record<string, (data: DataMessage) => void>;\n// /From commandHandler\n\ninterface SubtitleAppearance {\n    dropShadow: TextTrackEdgeType;\n    font: string;\n    textColor: string;\n    textBackground: string;\n    textSize: 'smaller' | 'small' | 'large' | 'larger' | 'extralarge';\n}\n\ninterface StreamInfo {\n    tracks?: Track[];\n    audioStreamIndex: number | null;\n    canClientSeek: boolean;\n    canSeek: boolean;\n    contentType: string;\n    isStatic: boolean;\n    mediaSource?: MediaSourceInfo;\n    playerStartPositionTicks?: number;\n    startPositionTicks: number | null;\n    streamContainer?: string | null;\n    subtitleStreamIndex: number | null;\n    subtitleStreamUrl?: string;\n    url: string;\n}\n\ndeclare global {\n    export interface Window {\n        mediaElement: HTMLElement | null;\n        playerManager: PlayerManager;\n        castReceiverContext: CastReceiverContext;\n        repeatMode: RepeatMode;\n        reportEventType: 'repeatmodechange';\n        subtitleAppearance: SubtitleAppearance;\n        MaxBitrate: number | undefined;\n        senderId: string | undefined;\n        volume: SystemVolumeData;\n    }\n}\n\ndeclare module 'chromecast-caf-receiver/cast.framework.messages' {\n    // eslint-disable-next-line @typescript-eslint/no-empty-object-type\n    interface MediaInformationCustomData\n        extends JellyfinMediaInformationCustomData {}\n}\n\ninterface JellyfinMediaInformationCustomData {\n    audioStreamIndex: number | null;\n    canClientSeek: boolean;\n    canSeek: boolean;\n    itemId: string | undefined;\n    liveStreamId: string | null;\n    mediaSourceId: string | null;\n    playMethod: PlayMethod;\n    playSessionId: string;\n    runtimeTicks: number | null;\n    startPositionTicks: number;\n    subtitleStreamIndex: number | null;\n}\n"
  },
  {
    "path": "stylelint.config.js",
    "content": "module.exports = {\n    extends: ['stylelint-config-standard'],\n    rules: {\n        'at-rule-no-unknown': null,\n        'selector-class-pattern': null,\n        'selector-id-pattern': null\n    },\n    syntax: 'css'\n};\n"
  },
  {
    "path": "tsconfig.json",
    "content": "{\n    \"compilerOptions\": {\n        \"target\": \"ES2015\",\n        \"module\": \"ESNext\",\n        \"moduleResolution\": \"bundler\",\n        \"lib\": [\"dom\", \"ES2015\"],\n        \"skipLibCheck\": true,\n        \"resolveJsonModule\": true,\n        \"allowSyntheticDefaultImports\": true,\n        \"allowJs\": true,\n        \"sourceMap\": true,\n        \"outDir\": \"./dist/\",\n        \"strict\": true,\n        \"paths\": {\n            \"~/*\": [\"./src/*\"]\n        },\n        \"types\": [\"@types/chromecast-caf-receiver\", \"vite/client\"]\n    }\n}\n"
  },
  {
    "path": "vite.config.ts",
    "content": "/* eslint-disable sort-keys */\n\nimport { defineConfig } from 'vite';\n\nexport default defineConfig({\n    root: 'src',\n    base: './',\n    build: {\n        outDir: '../dist',\n        emptyOutDir: true,\n        target: 'es2015',\n        assetsInlineLimit: 0\n    },\n    server: {\n        port: 9000\n    }\n});\n"
  }
]