Repository: jellyfin/jellyfin-chromecast
Branch: master
Commit: 85a4f5ab3cac
Files: 34
Total size: 207.5 KB
Directory structure:
gitextract_y7ozec1y/
├── .editorconfig
├── .gitattributes
├── .github/
│ ├── ISSUE_TEMPLATE/
│ │ └── bug_report.md
│ └── workflows/
│ ├── lint.yaml
│ ├── publish.yaml
│ └── test.yaml
├── .gitignore
├── .npmrc
├── .prettierrc.yaml
├── .stylelintrc.json
├── CONTRIBUTING.md
├── LICENSE.md
├── README.md
├── eslint.config.mjs
├── package.json
├── renovate.json
├── src/
│ ├── app.ts
│ ├── components/
│ │ ├── __tests__/
│ │ │ └── jellyfinApi.test.ts
│ │ ├── codecSupportHelper.ts
│ │ ├── commandHandler.ts
│ │ ├── deviceprofileBuilder.ts
│ │ ├── documentManager.ts
│ │ ├── jellyfinActions.ts
│ │ ├── jellyfinApi.ts
│ │ ├── maincontroller.ts
│ │ └── playbackManager.ts
│ ├── css/
│ │ └── jellyfin.css
│ ├── helpers.ts
│ ├── index.html
│ └── types/
│ ├── appStatus.ts
│ └── global.d.ts
├── stylelint.config.js
├── tsconfig.json
└── vite.config.ts
================================================
FILE CONTENTS
================================================
================================================
FILE: .editorconfig
================================================
root = true
[*]
charset = utf-8
end_of_line = lf
indent_style = space
indent_size = 4
insert_final_newline = true
trim_trailing_whitespace = true
================================================
FILE: .gitattributes
================================================
* text=auto eol=lf
*.{cmd,[cC][mM][dD]} text eol=crlf
*.{bat,[bB][aA][tT]} text eol=crlf
CONTRIBUTORS.md merge=union
README.md text
LICENSE text
*.css text
*.eot binary
*.gif binary
*.html text diff=html
*.ico binary
*.*ignore text
*.jpg binary
*.js text
*.json text
*.lock text -diff
*.map text -diff
*.md text
*.otf binary
*.png binary
*.py text diff=python
*.svg binary
*.ts text
*.ttf binary
*.sass text
*.webp binary
*.woff binary
*.woff2 binary
.editorconfig text
.gitattributes export-ignore
.gitignore export-ignore
*.gitattributes linguist-language=gitattributes
locales/*.json merge=union
================================================
FILE: .github/ISSUE_TEMPLATE/bug_report.md
================================================
---
name: Bug report
about: Create a bug report
title: ''
labels: bug
assignees: ''
---
**Describe the bug**
**To Reproduce**
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
**Logs**
**Screenshots**
**System (please complete the following information):**
- OS: [e.g. Docker, Debian, Windows]
- Browser: [e.g. Firefox, Chrome, Safari]
- Jellyfin version: [e.g. 10.0.1]
- Cast Receiver version: [e.g. Stable, Unstable]
- Cast client: [e.g Ultra]
**Additional context**
================================================
FILE: .github/workflows/lint.yaml
================================================
name: Lint
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
lint:
name: Lint TS and CSS
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Setup node env
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
with:
node-version: 20
- name: Install dependencies
run: npm ci --no-audit
- name: Build for production
run: npm run build
- name: Run ESLint
run: npm run lint
================================================
FILE: .github/workflows/publish.yaml
================================================
name: Publish
on:
push:
branches:
- master
tags:
- '*'
pull_request:
branches:
- master
jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Setup node env
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
with:
node-version: 20
- name: Install dependencies
run: npm ci --no-audit
- name: Update version in package.json
run: |
PACKAGE_JSON=$(jq --indent 4 ".version += \"-$GITHUB_SHA\"" package.json)
echo $PACKAGE_JSON > package.json
- name: Build
run: npm run build
- name: Prepare artifacts
run: |
test -d dist
mv dist jellyfin-chromecast
zip -r "jellyfin-chromecast.zip" "jellyfin-chromecast"
- name: Upload artifacts
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: jellyfin-chromecast
path: jellyfin-chromecast.zip
if-no-files-found: error
publish:
name: Publish
runs-on: ubuntu-latest
if: ${{ contains(github.repository_owner, 'jellyfin') && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags')) }}
needs: [build]
steps:
- name: Set JELLYFIN_VERSION to git tag
if: ${{ startsWith(github.ref, 'refs/tags') }}
run: echo "JELLYFIN_VERSION=$(echo ${GITHUB_REF#refs/tags/v} | tr / -)" >> $GITHUB_ENV
- name: Set JELLYFIN_VERSION to unstable
if: ${{ github.ref == 'refs/heads/master' }}
run: echo "JELLYFIN_VERSION=unstable" >> $GITHUB_ENV
- name: Download artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
name: jellyfin-chromecast
- name: Upload release archive to GitHub release
if: ${{ startsWith(github.ref, 'refs/tags') }}
uses: alexellis/upload-assets@13926a61cdb2cb35f5fdef1c06b8b591523236d3 # 0.4.1
env:
GITHUB_TOKEN: ${{ secrets.JF_BOT_TOKEN }}
with:
asset_paths: '["jellyfin-chromecast.zip"]'
- name: Upload release archive to repo.jellyfin.org
uses: burnett01/rsync-deployments@33214bd98ba4ac2be90f5976672b3f030fce9ce4 # 7.1.0
with:
switches: -vrptz
path: jellyfin-chromecast.zip
remote_path: /srv/incoming/chromecast/${{ env.JELLYFIN_VERSION }}/
remote_host: ${{ secrets.REPO_HOST }}
remote_user: ${{ secrets.REPO_USER }}
remote_key: ${{ secrets.REPO_KEY }}
- name: Update repo.jellyfin.org symlinks
uses: appleboy/ssh-action@0ff4204d59e8e51228ff73bce53f80d53301dee2 # v1.2.5
with:
host: ${{ secrets.REPO_HOST }}
username: ${{ secrets.REPO_USER }}
key: ${{ secrets.REPO_KEY }}
envs: JELLYFIN_VERSION
script_stop: true
script: |
if [ -d "/srv/repository/main/client/chromecast/versions/${{ env.JELLYFIN_VERSION }}" ] && [ -n "${{ env.JELLYFIN_VERSION }}" ]; then
sudo rm -r /srv/repository/main/client/chromecast/versions/${{ env.JELLYFIN_VERSION }};
fi
sudo mv /srv/incoming/chromecast/${{ env.JELLYFIN_VERSION }} /srv/repository/main/client/chromecast/versions/${{ env.JELLYFIN_VERSION }};
cd /srv/repository/main/client/chromecast;
sudo rm -rf *.zip;
sudo ln -s versions/${JELLYFIN_VERSION}/jellyfin-chromecast-${JELLYFIN_VERSION}.zip .;
deploy:
name: Deploy
runs-on: ubuntu-latest
if: ${{ contains(github.repository_owner, 'jellyfin') && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags')) }}
needs: [build]
steps:
- name: Download Artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
name: jellyfin-chromecast
- name: Unzip artifact
run: unzip jellyfin-chromecast.zip -d .
- name: Deploy to apps.jellyfin.org/chromecast/unstable
uses: burnett01/rsync-deployments@33214bd98ba4ac2be90f5976672b3f030fce9ce4 # 7.1.0
with:
switches: -vrptz
path: jellyfin-chromecast/
remote_path: /srv/chromecast/unstable/
remote_host: ${{ secrets.DEPLOY_APPS_HOST }}
remote_user: ${{ secrets.DEPLOY_APPS_USER }}
remote_key: ${{ secrets.DEPLOY_APPS_KEY }}
- name: Deploy to apps.jellyfin.org/chromecast/stable
if: ${{ startsWith(github.ref, 'refs/tags') }}
uses: burnett01/rsync-deployments@33214bd98ba4ac2be90f5976672b3f030fce9ce4 # 7.1.0
with:
switches: -vrptz
path: jellyfin-chromecast/
remote_path: /srv/chromecast/stable/
remote_host: ${{ secrets.DEPLOY_APPS_HOST }}
remote_user: ${{ secrets.DEPLOY_APPS_USER }}
remote_key: ${{ secrets.DEPLOY_APPS_KEY }}
================================================
FILE: .github/workflows/test.yaml
================================================
name: Test
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
jest:
name: Jest
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Setup node env
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
with:
node-version: 20
- name: Install dependencies
run: npm ci --no-audit
- name: Run tests
run: npm run test
================================================
FILE: .gitignore
================================================
# ide
.idea
tags
# npm/yarn
node_modules
dist
yarn-error.log
================================================
FILE: .npmrc
================================================
fund=false
================================================
FILE: .prettierrc.yaml
================================================
semi: true
singleQuote: true
tabWidth: 4
trailingComma: none
================================================
FILE: .stylelintrc.json
================================================
{
"extends": ["stylelint-config-standard"],
"rules": {
"selector-class-pattern": null,
"selector-id-pattern": null
}
}
================================================
FILE: CONTRIBUTING.md
================================================
# Contributing
## Development
### Development Environment
The development environment is setup with editorconfig. Code style is enforced by prettier and eslint for Javascript/Typescript linting
- [editorconfig](https://editorconfig.org/)
- [prettier](https://prettier.io/)
- [eslint](https://eslint.org/)
### Environment variables
| name | required | description | default if not set |
| ------------- | -------- | --------------------------------------------------------- | ------------------ |
| RECEIVER_PORT | No | The port used for the dev server when `npm start` is used | 9000 |
### Building/Using
`npm start` - Build a development version and start a dev server
`npm run build` - Build a production version
`npm run test` - Run tests
`npm run lint` - Run linting and prettier
1. Register a new [application](https://developers.google.com/cast/docs/registration). It is important that you choose a "Custom application", the rest of the details are up to you (name, description, etc). You will need a web server to host the files on.
2. Ensure that you can use this app:
#### For versions 10.8.x and earlier:
- Set up a local copy of [jellyfin-web](https://github.com/jellyfin/jellyfin-web).
- Change `applicationStable` and `applicationUnstable` in `jellyfin-web/src/plugins/chromecastPlayer/plugin.js` to your own application ID.
- Run the local copy of jellyfin-web using the provided instructions in the repo.
#### For versions 10.9.x and beyond:
- Add your `CastReceiverApplication` `ID` and `Name` to the jellyfin `system.xml` in the `configuration` folder.
- Your custom hosted application is now available to select next to `stable` and `unstable`. From the client of your choice.
5. Clone this repo and run `npm install`. This will install all dependencies, run tests and build a production build by default.
6. Make changes and build with `npm run build`.
7. Before pushing your changes, make sure to run `npm run test` and `npm run lint`.
> NOTE: It is recommended to symlink the `dist` folder pointing to a location on your web server hosting the files. That way you can refresh the cast receiver via the Chrome Remote Debugger and see your changes without having to manually copy after each build.
## Pull Requests
This project uses the standard Github Fork and PR flow
================================================
FILE: LICENSE.md
================================================
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Lesser General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.
To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.
We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.
Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.
Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and
modification follow.
GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.
c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.
In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.
If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.
5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.
7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.
10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
{{description}}
Copyright (C) {{year}} {{fullname}}
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Also add information on how to contact you by electronic and paper mail.
If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:
Gnomovision version 69, Copyright (C) year name of author
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, the commands you use may
be called something other than `show w' and `show c'; they could even be
mouse-clicks or menu items--whatever suits your program.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
`Gnomovision' (which makes passes at compilers) written by James Hacker.
{signature of Ty Coon}, 1 April 1989
Ty Coon, President of Vice
This General Public License does not permit incorporating your program into
proprietary programs. If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License.
================================================
FILE: README.md
================================================
Jellyfin Cast Web Receiver
The Jellyfin Cast Web Receiver is the frontend used when casting to a Google Cast capable device. It is used by default when casting from the Jellyfin Android app or Jellyfin web client.
### How do I use it?
A `stable` and `unstable` version of this app are already included in the Jellyfin server. There is no need to separately install this project. To host your own version (for developing) see `CONTRIBUTING.md`.
The `stable` version is the latest released version. `unstable` is updated automatically from the `master` branch.
### What does it do?
This is a `web receiver` as defined in the [Google Cast architecture](https://developers.google.com/cast/docs/overview).
As soon as you press the "cast" button on your client this application will start on you cast-capable device and handle playback functionality.
### What doesn't it do?
Anything related to your non-cast device (e.g. your phone, browser, other device) or anything about the inclusion of casting for a specific client (e.g. casting from the iOS app).
Any issues/features related to that: check the respective repository.
### Something not working right?
First check if the issue is actually Google Cast related. So answer the question:
`"Can I reproduce the issue on any other way then when casting to a Google Cast capable device?"`
If yes: The issue probably lies somewhere else.
If no: [Open an issue on GitHub](https://github.com/jellyfin/jellyfin-chromecast/issues/new/choose).
### Testing
Jellyfin allows switching between a `stable` and `unstable` version of the client. Go the client of your choice and: `user` -> `settings` -> `playback` -> `Google Cast version`.
Note that this setting is set per-user.
================================================
FILE: eslint.config.mjs
================================================
import jsdoc from 'eslint-plugin-jsdoc';
import promise from 'eslint-plugin-promise';
import importPlugin from 'eslint-plugin-import';
import globals from 'globals';
import eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended';
import eslint from '@eslint/js';
import tseslint from 'typescript-eslint';
import json from 'eslint-plugin-json';
export default [
eslint.configs.recommended,
jsdoc.configs['flat/recommended'],
eslintPluginPrettierRecommended,
...tseslint.configs.strict,
...tseslint.configs.stylisticTypeChecked,
promise.configs['flat/recommended'],
importPlugin.flatConfigs.errors,
importPlugin.flatConfigs.warnings,
{
ignores: ['dist/*']
},
{
settings: {
'import/resolver': {
typescript: {
alwaysTryTypes: true
}
}
}
},
{
files: ['**/*.json'],
...json.configs['recommended'],
...tseslint.configs.disableTypeChecked
},
{
files: ['**/*.ts'],
...importPlugin.flatConfigs.typescript,
languageOptions: {
parser: tseslint.parser
}
},
{
files: ['eslint.config.mjs'],
...tseslint.configs.disableTypeChecked
},
{
files: ['**/*.ts', '**/*.js'],
languageOptions: {
globals: {
...globals.browser,
...globals.es2015
},
parserOptions: {
projectService: true,
tsconfigRootDir: import.meta.dirname
}
},
rules: {
'@typescript-eslint/explicit-function-return-type': 'error',
'@typescript-eslint/no-explicit-any': 'warn',
'@typescript-eslint/no-unnecessary-type-assertion': 'error',
'@typescript-eslint/no-unused-expressions': 'warn',
'@typescript-eslint/no-unused-vars': 'error',
'@typescript-eslint/prefer-ts-expect-error': 'error',
curly: 'error',
'import/newline-after-import': 'error',
'import/order': 'error',
'jsdoc/check-indentation': 'error',
'jsdoc/check-param-names': 'error',
'jsdoc/check-property-names': 'error',
'jsdoc/check-syntax': 'error',
'jsdoc/check-tag-names': 'error',
'jsdoc/no-types': 'error',
'jsdoc/require-description': 'warn',
'jsdoc/require-hyphen-before-param-description': 'error',
'jsdoc/require-jsdoc': 'error',
'jsdoc/require-param-description': 'warn',
//TypeScript and IntelliSense already provides us information about the function typings while hovering and
// eslint-jsdoc doesn't detect a mismatch between what's declared in the function and what's declared in
// JSDOC.
'jsdoc/require-param-type': 'off',
'jsdoc/require-returns-type': 'off',
'jsdoc/valid-types': 'off',
'padding-line-between-statements': [
'error',
// Always require blank lines after directives (like 'use-strict'), except between directives
{ blankLine: 'always', next: '*', prev: 'directive' },
{ blankLine: 'any', next: 'directive', prev: 'directive' },
// Always require blank lines after import, except between imports
{ blankLine: 'always', next: '*', prev: 'import' },
{ blankLine: 'any', next: 'import', prev: 'import' },
// Always require blank lines before and after every sequence of variable declarations and export
{
blankLine: 'always',
next: ['const', 'let', 'var', 'export'],
prev: '*'
},
{
blankLine: 'always',
next: '*',
prev: ['const', 'let', 'var', 'export']
},
{
blankLine: 'any',
next: ['const', 'let', 'var', 'export'],
prev: ['const', 'let', 'var', 'export']
},
// Always require blank lines before and after class declaration, if, do/while, switch, try
{
blankLine: 'always',
next: [
'if',
'class',
'for',
'do',
'while',
'switch',
'try'
],
prev: '*'
},
{
blankLine: 'always',
next: '*',
prev: ['if', 'class', 'for', 'do', 'while', 'switch', 'try']
},
// Always require blank lines before return statements
{ blankLine: 'always', next: 'return', prev: '*' }
],
'prefer-arrow-callback': 'error',
'prefer-template': 'error',
'promise/no-nesting': 'error',
'promise/no-return-in-finally': 'error',
'promise/prefer-await-to-callbacks': 'error',
'promise/prefer-await-to-then': 'error',
'sort-keys': [
'error',
'asc',
{ caseSensitive: false, minKeys: 2, natural: true }
],
'sort-vars': 'error'
}
},
{
files: ['*.js'],
languageOptions: {
globals: {
...globals.node
}
}
}
];
================================================
FILE: package.json
================================================
{
"name": "jellyfin-chromecast",
"description": "Cast receiver for Jellyfin",
"version": "3.0.0",
"type": "module",
"bugs": {
"url": "https://github.com/jellyfin/jellyfin-chromecast/issues"
},
"dependencies": {
"@jellyfin/sdk": "0.12.0"
},
"devDependencies": {
"@types/chromecast-caf-receiver": "6.0.26",
"@types/node": "24.12.2",
"eslint": "9.39.4",
"eslint-config-prettier": "10.1.8",
"eslint-import-resolver-typescript": "4.4.4",
"eslint-plugin-import": "2.32.0",
"eslint-plugin-jsdoc": "61.7.1",
"eslint-plugin-json": "4.0.1",
"eslint-plugin-prettier": "5.5.5",
"eslint-plugin-promise": "7.3.0",
"prettier": "3.6.2",
"stylelint": "16.26.1",
"stylelint-config-standard": "39.0.1",
"typescript": "6.0.3",
"typescript-eslint": "8.59.1",
"vite": "8.0.9",
"vitest": "4.1.5"
},
"homepage": "https://jellyfin.org/",
"license": "GPL-2.0-or-later",
"repository": {
"type": "git",
"url": "git+https://github.com/jellyfin/jellyfin-chromecast.git"
},
"scripts": {
"start": "vite",
"build": "vite build",
"test": "vitest",
"lint": "npm run lint:code && npm run lint:ts && npm run lint:css",
"lint:code": "eslint",
"lint:ts": "tsc --noEmit",
"lint:css": "stylelint src/**/*.css"
}
}
================================================
FILE: renovate.json
================================================
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"github>jellyfin/.github//renovate-presets/nodejs",
":dependencyDashboard"
]
}
================================================
FILE: src/app.ts
================================================
import { RepeatMode } from '@jellyfin/sdk/lib/generated-client/models/repeat-mode';
import './components/maincontroller';
import './css/jellyfin.css';
window.mediaElement = document.getElementById('video-player');
window.repeatMode = RepeatMode.RepeatNone;
================================================
FILE: src/components/__tests__/jellyfinApi.test.ts
================================================
import { describe, beforeAll, beforeEach, test, expect } from 'vitest';
import { JellyfinApi } from '../jellyfinApi';
const setupMockCastSenders = (): void => {
const getSenders = (): any[] => [{ id: 'thisIsSenderId' }]; // eslint-disable-line @typescript-eslint/no-explicit-any
const getInstance = (): any => ({ getSenders }); // eslint-disable-line @typescript-eslint/no-explicit-any
// @ts-expect-error cast is already defined globally, however since we're mocking it we need to redefine it.
global.cast = {
framework: {
CastReceiverContext: {
getInstance
}
}
};
};
describe('creating basic urls', () => {
beforeAll(() => {
setupMockCastSenders();
});
beforeEach(() => {
JellyfinApi.setServerInfo('thisIsAccessToken', 'thisIsServerAddress');
});
test('should return correct url', () => {
const result = JellyfinApi.createUrl('somePath');
const correct = 'thisIsServerAddress/somePath';
expect(result).toEqual(correct);
});
test('should remove leading slashes', () => {
const result = JellyfinApi.createUrl('///////somePath');
const correct = 'thisIsServerAddress/somePath';
expect(result).toEqual(correct);
});
test('should return empty string on undefined serverAddress', () => {
JellyfinApi.setServerInfo();
const result = JellyfinApi.createUrl('somePath');
const correct = '';
expect(result).toEqual(correct);
});
});
describe('creating image urls', () => {
beforeAll(() => {
setupMockCastSenders();
});
beforeEach(() => {
JellyfinApi.setServerInfo('thisIsAccessToken', 'thisIsServerAddress');
});
test('should return correct url with all parameters provided', () => {
const itemId = '1';
const imageType = 'Primary';
const imageTag = 'sampleTag';
const imdIdx = 0;
const result = JellyfinApi.createImageUrl(
itemId,
imageType,
imageTag,
imdIdx
);
const correct = `thisIsServerAddress/Items/${itemId}/Images/${imageType}/${imdIdx.toString()}?tag=${imageTag}`;
expect(result).toEqual(correct);
});
test('should return correct url with minimal parameters provided', () => {
const itemId = '1';
const imageType = 'Primary';
const imageTag = 'sampleTag';
const imdIdx = 0;
const result = JellyfinApi.createImageUrl(itemId, imageType, imageTag);
const correct = `thisIsServerAddress/Items/${itemId}/Images/${imageType}/${imdIdx.toString()}?tag=${imageTag}`;
expect(result).toEqual(correct);
});
test('should return empty string on undefined serverAddress', () => {
JellyfinApi.setServerInfo();
const result = JellyfinApi.createImageUrl('', '', '');
const correct = '';
expect(result).toEqual(correct);
});
});
================================================
FILE: src/components/codecSupportHelper.ts
================================================
import { VideoRangeType } from '@jellyfin/sdk/lib/generated-client';
const castContext = cast.framework.CastReceiverContext.getInstance();
/**
* Converts a codec string to the appropriate MIME type to use for testing support.
* @param codec - The codec in question.
* @returns The MIME type to use for testing support.
*/
function videoCodecToMimeType(codec: VideoCodec): string {
switch (codec) {
case VideoCodec.H264:
case VideoCodec.H265:
return 'video/mp4';
case VideoCodec.VP8:
case VideoCodec.VP9:
case VideoCodec.AV1:
return 'video/webm';
}
}
/**
* Get the string to use for testing support of a codec.
* @param codec - The codec in question.
* @param profile - The profile for the codec.
* @param level - The level for the codec.
* @param bitDepth - The bit depth of the video.
* @returns The string to use for testing support of the codec.
*/
function getCodecString(
codec: VideoCodec,
profile?: string,
level?: number,
bitDepth?: number
): string {
switch (codec) {
case VideoCodec.H264: {
// Default to the oldest baseline profile.
profile = profile ?? 'baseline';
let profileFlag: string;
switch (profile) {
case 'high 10':
profileFlag = '6e00';
break;
case 'high':
profileFlag = '6400';
break;
case 'main':
profileFlag = '4d00';
break;
case 'constrained baseline':
profileFlag = '4240';
break;
case 'baseline':
default:
profileFlag = '4200';
break;
}
// Levels are bound by max frame size (macroblocks) and decoding
// speed (macroblocks/s).
// A macroblock is 16x16 pixels.
//
// See:
// * https://en.wikipedia.org/wiki/Advanced_Video_Coding#Levels
level = level ?? 10;
const levelFlag = level.toString(16).padStart(2, '0');
return `avc1.${profileFlag}${levelFlag}`;
}
case VideoCodec.H265: {
let profileFlag: string;
let constraintFlag: number;
switch (profile) {
case 'main 10':
profileFlag = 'L';
constraintFlag = 4;
break;
case 'high':
profileFlag = 'H';
constraintFlag = 4;
break;
case 'high 10':
profileFlag = 'H';
constraintFlag = 4;
break;
case 'main':
default:
profileFlag = 'L';
constraintFlag = 0;
break;
}
// Levels are bound by the luma picture size (total pixels) and
// luma sample rate (samples/s).
level = level ?? 30;
return `hev1.1.${constraintFlag}.${profileFlag}${level}.B0`;
}
case VideoCodec.VP8:
return 'vp8';
case VideoCodec.VP9: {
let profileFlag: string;
switch (profile?.toLowerCase()) {
case 'profile 1':
profileFlag = '01';
break;
case 'profile 2':
profileFlag = '02';
break;
case 'profile 3':
profileFlag = '03';
break;
case 'profile 0':
default:
profileFlag = '00';
break;
}
level = level ?? 1.0;
bitDepth = bitDepth ?? 8;
const bitDepthFlag = bitDepth.toString().padStart(2, '0');
return `vp09.${profileFlag}.${level * 10}.${bitDepthFlag}`;
}
case VideoCodec.AV1: {
let profileFlag: string;
switch (profile?.toLowerCase()) {
case 'high':
profileFlag = '1';
break;
case 'professional':
profileFlag = '2';
break;
case 'main':
default:
profileFlag = '0';
break;
}
// This level should correspond to the `seq_level_idx`.
level = level ?? 0;
bitDepth = bitDepth ?? 8;
const levelFlag = level.toString().padStart(2, '0');
const bitDepthFlag = bitDepth.toString().padStart(2, '0');
// Assume main tier, since the condition language has no way to
// express that.
return `av01.${profileFlag}.${levelFlag}M.${bitDepthFlag}`;
}
}
}
/**
* Utility class representing a video resolution.
*/
export class Resolution {
public width: number;
public height: number;
constructor(width: number, height: number) {
this.width = width;
this.height = height;
}
public equals(other: Resolution): boolean {
return this.width === other.width && this.height === other.height;
}
}
/**
* Known video codecs
*/
export enum VideoCodec {
H264 = 'h264',
H265 = 'hevc',
VP8 = 'vp8',
VP9 = 'vp9',
AV1 = 'av1'
}
/**
* Checks if there is E-AC-3 support.
* This check returns in line with the cast settings made in Google Home.
* If the device is in auto, EDID information will be used, otherwise it
* depends on the manual setting.
*
* Currently it's disabled because of problems getting it to work with HLS.
* @returns true if E-AC-3 can be played
*/
export function hasEAC3Support(): boolean {
//return castContext.canDisplayType('audio/mp4', 'ec-3');
return false;
}
/**
* Checks if there is AC-3 support.
* This check returns in line with the cast settings made in Google Home.
* If the device is in auto, EDID information will be used, otherwise it
* depends on the manual setting.
*
* Currently it's disabled because of problems getting it to work with HLS.
* @returns true if AC-3 can be played
*/
export function hasAC3Support(): boolean {
//return castContext.canDisplayType('audio/mp4', 'ac-3');
return false;
}
/**
* Checks for every supported video codec.
* @returns An array of supported video codecs
*/
export function getSupportedVideoCodecs(): VideoCodec[] {
const supportedVideoCodecs: VideoCodec[] = [];
for (const videoCodec of Object.values(VideoCodec)) {
if (hasVideoCodecSupport(videoCodec)) {
supportedVideoCodecs.push(videoCodec);
}
}
return supportedVideoCodecs;
}
/**
* Check if the device has any video support.
* @returns `true` if the device can display video.
*/
export function hasVideoSupport(): boolean {
const deviceCaps = castContext.getDeviceCapabilities();
return deviceCaps?.[
cast.framework.system.DeviceCapabilities.DISPLAY_SUPPORTED
];
}
/**
* Gets whether the particular codec is supported.
* @param codec - The codec in question
* @returns `true` if the codec is supported.
*/
export function hasVideoCodecSupport(codec: VideoCodec): boolean {
const mimeType = videoCodecToMimeType(codec);
const codecString = getCodecString(codec);
return castContext.canDisplayType(mimeType, codecString);
}
/**
* Get the supported video ranges for a given codec profile and level.
* @param codec - The codec in question.
* @param profile - The profile in question.
* @param level - The level in question.
* @returns A set of supported video ranges.
*/
export function getVideoRangeSupport(
codec: VideoCodec,
profile: string,
level: number
): Set {
const supportedRanges = new Set([VideoRangeType.Sdr]);
profile = profile.toLowerCase();
const mimeType = videoCodecToMimeType(codec);
const codecString = getCodecString(codec, profile, level, 10);
switch (codec) {
case VideoCodec.H265: {
if (profile !== 'main 10' && profile !== 'high 10') {
break;
}
// HEVC vs. DoVi levels and max pixel rate (luma sample rate)
// +------------+---------------+------------+---------------+
// | HEVC Level | HEVC Max PPS | DoVi Level | DoVi Max PPS |
// +------------+---------------+------------+---------------+
// | 3.0 | 16_588_800 | 01 | 22_118_400 |
// | 3.1 | 33_177_600 | 03 | 49_766_400 |
// | 4.0 | 66_846_720 | 04 | 124_416_000 |
// | 4.1 | 133_693_440 | 06 | 199_065_600 |
// | 5.0 | 267_386_880 | 07 | 248_832_000 |
// | 5.1 | 534_773_760 | 10 | 995_328_000 |
// | 6.0 | 1_069_547_520 | 11 | 1_990_656_000 |
// | 6.1 | 2_139_095_040 | 13 | 3_981_312_000 |
// +------------+---------------+------------+---------------+
const doviLevel = ((): string => {
if (level <= 30 * 3) {
return '01';
} else if (level <= 31 * 3) {
return '03';
} else if (level <= 40 * 3) {
return '04';
} else if (level <= 41 * 3) {
return '06';
} else if (level <= 50 * 3) {
return '07';
} else if (level <= 51 * 3) {
return '10';
} else if (level <= 60 * 3) {
return '11';
} else {
return '13';
}
})();
if (castContext.canDisplayType(mimeType, `dvhe.05.${doviLevel}`)) {
supportedRanges.add(VideoRangeType.Dovi);
}
if (castContext.canDisplayType(mimeType, `dvhe.08.${doviLevel}`)) {
supportedRanges.add(VideoRangeType.DoviWithSdr);
supportedRanges.add(VideoRangeType.DoviWithHlg);
supportedRanges.add(VideoRangeType.DoviWithHdr10);
}
break;
}
case VideoCodec.AV1: {
// AV1 vs. DoVi levels and max pixel rate (luma sample rate)
// +-------------------+---------------+------------+---------------+
// | AV1 seq_level_idx | AV1 Max PPS | DoVi Level | DoVi Max PPS |
// +-------------------+---------------+------------+---------------+
// | 4 | 19_975_680 | 01 | 22_118_400 |
// | 5 | 31_950_720 | 03 | 49_766_400 |
// | 8 | 70_778_880 | 04 | 124_416_000 |
// | 9 | 141_557_760 | 06 | 199_065_600 |
// | 12 | 267_386_880 | 07 | 248_832_000 |
// | 13 | 534_773_760 | 10 | 995_328_000 |
// | 16 | 1_069_547_520 | 11 | 1_990_656_000 |
// | 6.1 | 2_139_095_040 | 13 | 3_981_312_000 |
// +-------------------+---------------+------------+---------------+
const doviLevel = ((): string => {
if (level <= 4) {
return '01';
} else if (level <= 5) {
return '03';
} else if (level <= 8) {
return '04';
} else if (level <= 9) {
return '06';
} else if (level <= 12) {
return '07';
} else if (level <= 13) {
return '10';
} else if (level <= 16) {
return '11';
} else {
return '13';
}
})();
// 110: Chroma subsampling (4:2:0), not Monochrome
// 09: Color Primary (BT.2020)
// 16: Transfer Characteristics (PQ, SMPTE ST 2084)
// 09: Matrix Coefficients (BT.2020 non-constant luminance)
// 0: Studio swing representation
const hasHdr10Support = castContext.canDisplayType(
mimeType,
`${codecString}.110.09.16.09.0`
);
if (hasHdr10Support) {
supportedRanges.add(VideoRangeType.Hdr10);
supportedRanges.add(VideoRangeType.Hdr10Plus);
}
// 110: Chroma subsampling (4:2:0), not Monochrome
// 09: Color Primary (BT.2020)
// 18: Transfer Characteristics (BT.2100 HLG, ARIB STD-B67)
// 09: Matrix Coefficients (BT.2020 non-constant luminance)
// 0: Studio swing representation
const hasHlgSupport = castContext.canDisplayType(
mimeType,
`${codecString}.110.09.18.09.0`
);
if (hasHlgSupport) {
supportedRanges.add(VideoRangeType.Hlg);
}
// Dolby Vision with AV1 is profile 10.
if (castContext.canDisplayType(mimeType, `dav1.10.${doviLevel}`)) {
supportedRanges.add(VideoRangeType.Dovi);
supportedRanges.add(VideoRangeType.DoviWithSdr);
supportedRanges.add(VideoRangeType.DoviWithHlg);
supportedRanges.add(VideoRangeType.DoviWithHdr10);
}
break;
}
case VideoCodec.VP9: {
// 01: Chroma subsampling (4:2:0)
// 09: Color Primary (BT.2020)
// 16: Transfer Characteristics (PQ)
// 09: Matrix Coefficients (BT.2020 non-constant luminance)
// 01: Enforce legal color range
const hasHdr10Support = castContext.canDisplayType(
mimeType,
`${codecString}.01.09.16.09.01`
);
if (hasHdr10Support) {
supportedRanges.add(VideoRangeType.Hdr10);
}
break;
}
case VideoCodec.H264: {
// H.264 supports 8-bit Dolby Vision with BL signal cross-compatibility with SDR.
if (profile !== 'high') {
break;
}
// H.264 Max PPS values calculated assuming largest (16x16) macroblock
//
// +-------------+---------------+------------+---------------+
// | H.264 Level | H.264 Max PPS | DoVi Level | DoVi Max PPS |
// +-------------+---------------+------------+---------------+
// | 3.0 | 10_368_000 | 01 | 22_118_400 |
// | 3.1 | 27_648_000 | 02 | 27_648_000 |
// | 4.0 | 62_914_560 | 05 | 124_416_000 |
// | 4.1 | 62_914_560 | 05 | 124_416_000 |
// | 4.2 | 133_693_440 | 06 | 199_065_600 |
// | 5.0 | 150_994_944 | 06 | 199_065_600 |
// | 5.1 | 251_658_240 | 08 | 398_131_200 |
// | 5.2 | 530_841_600 | 10 | 995_328_000 |
// | 6.0 | 1_069_547_520 | 12 | 1_990_656_000 |
// | 6.1 | 2_139_095_040 | 13 | 3_981_312_000 |
// +-------------+---------------+------------+---------------+
const doviLevel = ((): string => {
if (level <= 30) {
return '01';
} else if (level <= 31) {
return '02';
} else if (level <= 41) {
return '05';
} else if (level <= 50) {
return '06';
} else if (level <= 51) {
return '08';
} else if (level <= 52) {
return '10';
} else if (level <= 60) {
return '12';
} else {
return '13';
}
})();
if (castContext.canDisplayType(mimeType, `dvav.09.${doviLevel}`)) {
supportedRanges.add(VideoRangeType.DoviWithSdr);
}
break;
}
}
return supportedRanges;
}
/**
* Check if this device can play text tracks.
* This is not supported on Chromecast Audio,
* but otherwise is.
* @returns `true` if text tracks are supported
*/
export function hasTextTrackSupport(): boolean {
return hasVideoSupport();
}
/**
* Get the max supported media bitrate for the active Cast device.
* @returns `number` representing the max supported bitrate.
*/
export function getMaxBitrateSupport(): number {
// FIXME: We should get this dynamically or hardcode this to values
// we see fit for each Cast device. More testing is needed.
// 120Mb/s ?
return 120000000;
}
/**
* Tests the max resolution supported by the device of a particular codec.
* @param codec - The codec in question.
* @param profile - The profile for the codec.
* @param level - The level for the codec.
* @param bitDepth - The bit depth of the video.
* @returns `number` representing the maximum resolution supported.
*/
export function getMaxResolutionSupported(
codec: VideoCodec,
profile: string,
level: number,
bitDepth: number
): Resolution {
// This function iteratively tests the maximum resolution assuming a 16:9
// resolution ratio. This should be a good enough approximation for most
// devices.
//
// In reality, some encoders may be limited by pixel count instead of
// resolution, but other devices may arbitrarily limit the resolution.
let maxRes = new Resolution(0, 0);
let newRes = new Resolution(0, 0);
const mimeType = videoCodecToMimeType(codec);
const codecString = getCodecString(codec, profile, level, bitDepth);
// Limit the upper bound to 32K, which is more than enough.
while (newRes.width < 30720) {
newRes = ((): Resolution => {
// Progressively increase steps as resolution increases.
if (newRes.height >= 2160) {
return new Resolution(newRes.width + 1280, newRes.height + 720);
} else if (newRes.height >= 1080) {
return new Resolution(newRes.width + 640, newRes.height + 360);
} else {
return new Resolution(newRes.width + 320, newRes.height + 180);
}
})();
if (
!castContext.canDisplayType(
mimeType,
codecString,
newRes.width,
newRes.height
)
) {
continue;
}
maxRes = newRes;
}
// As a compromise, after we've found the maximum 16:9 resolution, try
// checking other resolutions. These resolutions are ordered descending by
// the scaling factor of the expanding dimension -- in the sense that we
// check 2.40:1 before 1.85:1. We also prioritize wider resolutions over
// taller resolutions.
//
// In these checks, we hold one resolution constant and expand the other to
// test.
const otherResolutions = [
// Wider resolutions
// 32:9 is a super ultrawide resolution typically used by monitors.
new Resolution(Math.floor(maxRes.height * 3.555), maxRes.height),
// 2.40:1 is used by some cinema shot on 35mm film.
new Resolution(Math.floor(maxRes.height * 2.4), maxRes.height),
// "21:9" is the marketing term for multiple ultrawide resolutions.
// The real aspect ratio is somewhere between 2.37:1 and 2.38:1.
new Resolution(Math.floor(maxRes.height * 2.37037), maxRes.height),
// 1.90:1 is a common IMAX resolution.
new Resolution(Math.floor(maxRes.height * 1.9), maxRes.height),
// 1.85:1 is sometimes used in Hollywood cinema.
new Resolution(Math.floor(maxRes.height * 1.85), maxRes.height),
// Taller resolutions.
// 9:19.5 is a common resolution for a horizontal modern phone.
new Resolution(maxRes.width, Math.floor(maxRes.width / 9) * 19.5),
// 9:16 is the vertical version of 16:9.
new Resolution(maxRes.width, Math.floor(maxRes.width / 9) * 16),
// 1:1 resolution
new Resolution(maxRes.width, maxRes.width),
// 4:3 is an older but still common resolution found on old TVs.
new Resolution(maxRes.width, Math.floor((maxRes.width / 4) * 3)),
// 16:10 is a common resolution for computer displays.
new Resolution(maxRes.width, Math.floor((maxRes.width / 16) * 10))
];
for (const newRes of otherResolutions) {
if (
castContext.canDisplayType(
mimeType,
codecString,
newRes.width,
newRes.height
)
) {
// Return early, since it'll be the best we'll find.
return newRes;
}
}
return maxRes;
}
/**
* Gets the supported profiles for a given video codec.
* @param codec - The video codec in question.
* @returns An array of the supported profiles.
*/
export function getVideoProfileSupport(codec: VideoCodec): string[] {
const possibleProfiles = ((): string[] => {
switch (codec) {
case VideoCodec.H264:
return [
'constrained baseline',
'baseline',
'main',
'high',
'high 10'
];
case VideoCodec.H265:
return ['main', 'main 10', 'high', 'high 10'];
case VideoCodec.AV1:
return ['main', 'high', 'professional'];
case VideoCodec.VP8:
return [''];
case VideoCodec.VP9:
return ['Profile 0', 'Profile 1', 'Profile 2', 'Profile 3'];
}
})();
const mimeType = videoCodecToMimeType(codec);
const supportedProfiles = possibleProfiles.filter((profile) => {
const codecString = getCodecString(codec, profile);
return castContext.canDisplayType(mimeType, codecString);
});
return supportedProfiles;
}
/**
* Gets the highest level supported by the given codec profile.
* @param codec - The codec in question.
* @param profile - The profile for the codec.
* @param bitDepth - The bit depth of the video.
* @returns `number` representing the highest level supported.
*/
export function getVideoCodecHighestLevelSupport(
codec: VideoCodec,
profile?: string,
bitDepth?: number
): number | undefined {
const possibleLevels = ((): number[] => {
switch (codec) {
case VideoCodec.H264:
return [
10, 11, 12, 13, 20, 21, 22, 30, 31, 32, 40, 41, 42, 50, 51,
52, 60, 61, 62
];
case VideoCodec.H265:
// The server expects H.265 levels to be multiplied by 3.
return [10, 20, 21, 30, 31, 40, 41, 50, 51, 52, 60, 61, 62].map(
(level) => level * 3
);
case VideoCodec.AV1:
// This level should correspond to the `seq_level_idx`.
return [0, 1, 4, 5, 8, 9, 12, 13, 14, 15, 16, 17, 18, 19];
case VideoCodec.VP8:
return [];
case VideoCodec.VP9:
return [
1.0, 1.1, 2.0, 2.1, 3.0, 3.1, 4.0, 4.1, 5.0, 5.1, 5.2, 6.0,
6.1, 6.2
];
}
})();
const mimeType = videoCodecToMimeType(codec);
const supportedLevels: number[] = [];
for (const level of possibleLevels) {
const codecString = getCodecString(codec, profile, level, bitDepth);
const supported = castContext.canDisplayType(mimeType, codecString);
if (!supported) {
break;
}
supportedLevels.push(level);
}
return supportedLevels.length > 0
? supportedLevels[supportedLevels.length - 1]
: undefined;
}
/**
* Gets the highest bit depth supported by the given codec profile.
* @param codec - The codec in question.
* @param profile - The profile for the codec.
* @param level - The level for the codec.
* @returns The highest bit depth supported by the given codec profile.
*/
export function getVideoCodecHighestBitDepthSupport(
codec: VideoCodec,
profile?: string,
level?: number
): number | undefined {
const possibleBitDepths = ((): number[] => {
switch (codec) {
case VideoCodec.H264:
switch (profile?.toLowerCase()) {
case 'high 10':
return [10, 8];
default:
return [8];
}
case VideoCodec.H265:
switch (profile?.toLowerCase()) {
case 'main 10':
case 'high 10':
return [10, 8];
default:
return [8];
}
case VideoCodec.AV1:
switch (profile?.toLowerCase()) {
case 'professional':
return [10, 8];
default:
return [8];
}
case VideoCodec.VP8:
// VP8's bitstream officially only supports up to 8 bits.
return [8];
case VideoCodec.VP9:
switch (profile?.toLowerCase()) {
case 'profile 2':
case 'profile 3':
return [12, 10];
default:
return [8];
}
}
})();
return possibleBitDepths.find((bitDepth) => {
const mimeType = videoCodecToMimeType(codec);
const codecString = getCodecString(codec, profile, level, bitDepth);
return castContext.canDisplayType(mimeType, codecString);
});
}
/**
* Gets the minimum bit depth required for a given codec and profile.
* @param codec - The codec in question.
* @param profile - The profile for the codec.
* @returns The minimum bit depth required.
*/
export function getVideoCodecMinimumBitDepth(
codec: VideoCodec,
profile: string
): number {
profile = profile.toLowerCase();
// VP9 profiles 2 and 3 require 10 bit depth.
if (
codec === VideoCodec.VP9 &&
(profile === 'profile 2' || profile === 'profile 3')
) {
return 10;
}
return 8;
}
/**
* Get VPX (VP8, VP9) codecs supported by the active Cast device.
* @returns An array of the supported WebM codecs.
*/
export function getSupportedWebMVideoCodecs(): VideoCodec[] {
const possibleCodecs = [VideoCodec.VP8, VideoCodec.VP9, VideoCodec.AV1];
const supportedCodecs = possibleCodecs.filter((codec) => {
return castContext.canDisplayType('video/webm', getCodecString(codec));
});
return supportedCodecs;
}
/**
* Get supported video codecs suitable for use in an MP4 container.
* @returns An array of the supported MP4 video codecs.
*/
export function getSupportedMP4VideoCodecs(): VideoCodec[] {
const possibleCodecs = [VideoCodec.H264, VideoCodec.H265, VideoCodec.AV1];
const supportedCodecs = possibleCodecs.filter((codec) => {
return castContext.canDisplayType('video/mp4', getCodecString(codec));
});
return supportedCodecs;
}
/**
* Get supported audio codecs suitable for use in an MP4 container.
* @returns Supported MP4 audio codecs.
*/
export function getSupportedMP4AudioCodecs(): string[] {
const codecs = ['aac', 'mp3', 'opus'];
if (hasEAC3Support()) {
codecs.push('eac3');
}
if (hasAC3Support()) {
codecs.push('ac3');
}
return codecs;
}
/**
* Get supported video codecs suitable for use with HLS.
* @returns Supported HLS video codecs.
*/
export function getSupportedHLSVideoCodecs(): VideoCodec[] {
// The server now supports fmp4, so return a list of all supported mp4
// codecs.
return getSupportedMP4VideoCodecs();
}
/**
* Get supported audio codecs suitable for use with HLS.
* @returns All supported HLS audio codecs.
*/
export function getSupportedHLSAudioCodecs(): string[] {
// HLS basically supports whatever MP4 supports.
return getSupportedMP4AudioCodecs();
}
/**
* Get supported audio codecs suitable for use in a WebM container.
* @returns All supported WebM audio codecs.
*/
export function getSupportedWebMAudioCodecs(): string[] {
return ['vorbis', 'opus'];
}
/**
* Get supported audio codecs.
* @returns the supported audio codecs.
*/
export function getSupportedAudioCodecs(): string[] {
return ['opus', 'vorbis', 'mp3', 'aac', 'flac', 'wav'];
}
================================================
FILE: src/components/commandHandler.ts
================================================
import { getReportingParams, TicksPerSecond } from '../helpers';
import type {
DataMessage,
DisplayRequest,
PlayRequest,
SeekRequest,
SetIndexRequest,
SetRepeatModeRequest,
SupportedCommands
} from '../types/global';
import { AppStatus } from '../types/appStatus';
import {
translateItems,
shuffle,
instantMix,
setAudioStreamIndex,
setSubtitleStreamIndex,
seek
} from './maincontroller';
import { reportPlaybackProgress } from './jellyfinActions';
import { PlaybackManager } from './playbackManager';
import { DocumentManager } from './documentManager';
// eslint-disable-next-line @typescript-eslint/no-extraneous-class
export abstract class CommandHandler {
private static playerManager: framework.PlayerManager;
private static supportedCommands: SupportedCommands = {
DisplayContent: CommandHandler.displayContentHandler,
Identify: CommandHandler.IdentifyHandler,
InstantMix: CommandHandler.instantMixHandler,
Mute: CommandHandler.MuteHandler,
NextTrack: CommandHandler.nextTrackHandler,
Pause: CommandHandler.PauseHandler,
PlayLast: CommandHandler.playLastHandler,
PlayNext: CommandHandler.playNextHandler,
PlayNow: CommandHandler.playNowHandler,
PlayPause: CommandHandler.PlayPauseHandler,
PreviousTrack: CommandHandler.previousTrackHandler,
Seek: CommandHandler.SeekHandler,
SetAudioStreamIndex: CommandHandler.setAudioStreamIndexHandler,
SetRepeatMode: CommandHandler.SetRepeatModeHandler,
SetSubtitleStreamIndex: CommandHandler.setSubtitleStreamIndexHandler,
SetVolume: CommandHandler.SetVolumeHandler,
Shuffle: CommandHandler.shuffleHandler,
Stop: CommandHandler.StopHandler,
ToggleMute: CommandHandler.ToggleMuteHandler,
Unmute: CommandHandler.MuteHandler,
Unpause: CommandHandler.UnpauseHandler,
VolumeDown: CommandHandler.VolumeDownHandler,
VolumeUp: CommandHandler.VolumeUpHandler
};
static configure(playerManager: framework.PlayerManager): void {
this.playerManager = playerManager;
}
static playNextHandler(data: DataMessage): void {
translateItems(data, data.options as PlayRequest, data.command);
}
static playNowHandler(data: DataMessage): void {
translateItems(data, data.options as PlayRequest, data.command);
}
static playLastHandler(data: DataMessage): void {
translateItems(data, data.options as PlayRequest, data.command);
}
static shuffleHandler(data: DataMessage): void {
shuffle(
data,
data.options as PlayRequest,
(data.options as PlayRequest).items[0]
);
}
static instantMixHandler(data: DataMessage): void {
instantMix(
data,
data.options as PlayRequest,
(data.options as PlayRequest).items[0]
);
}
static displayContentHandler(data: DataMessage): void {
if (PlaybackManager.isIdle()) {
DocumentManager.showItemId((data.options as DisplayRequest).ItemId);
}
}
static nextTrackHandler(): void {
if (PlaybackManager.hasNextItem()) {
PlaybackManager.playNextItem(true);
}
}
static previousTrackHandler(): void {
if (PlaybackManager.hasPrevItem()) {
PlaybackManager.playPreviousItem();
}
}
static setAudioStreamIndexHandler(data: DataMessage): void {
setAudioStreamIndex(
PlaybackManager.playbackState,
(data.options as SetIndexRequest).index
);
}
static setSubtitleStreamIndexHandler(data: DataMessage): void {
setSubtitleStreamIndex(
PlaybackManager.playbackState,
(data.options as SetIndexRequest).index
);
}
// VolumeUp, VolumeDown and ToggleMute commands seem to be handled on the sender in the current implementation.
// From what I can tell there's no convenient way for the receiver to get its own volume.
// We should probably remove these commands in the future.
static VolumeUpHandler(): void {
console.log('VolumeUp handler not implemented');
}
static VolumeDownHandler(): void {
console.log('VolumeDown handler not implemented');
}
static ToggleMuteHandler(): void {
console.log('ToggleMute handler not implemented');
}
static SetVolumeHandler(): void {
// This is now implemented on the sender
console.log('SetVolume handler not implemented');
}
static IdentifyHandler(): void {
if (!PlaybackManager.isPlaying()) {
if (!PlaybackManager.isBuffering()) {
DocumentManager.setAppStatus(AppStatus.Waiting);
}
DocumentManager.startBackdropInterval();
} else {
// When a client connects send back the initial device state (volume etc) via a playbackstop message
reportPlaybackProgress(
PlaybackManager.playbackState,
getReportingParams(PlaybackManager.playbackState),
true,
'playbackstop'
);
}
}
static SeekHandler(data: DataMessage): void {
seek(
PlaybackManager.playbackState,
(data.options as SeekRequest).position * TicksPerSecond
);
}
static MuteHandler(): void {
// CommandHandler is now implemented on the sender
console.log('Mute handler not implemented');
}
static UnmuteHandler(): void {
// CommandHandler is now implemented on the sender
console.log('Unmute handler not implemented');
}
static StopHandler(): void {
this.playerManager.stop();
}
static PlayPauseHandler(): void {
if (
this.playerManager.getPlayerState() ===
cast.framework.messages.PlayerState.PAUSED
) {
this.playerManager.play();
} else {
this.playerManager.pause();
}
}
static PauseHandler(): void {
this.playerManager.pause();
}
static SetRepeatModeHandler(data: DataMessage): void {
window.repeatMode = (data.options as SetRepeatModeRequest).RepeatMode;
window.reportEventType = 'repeatmodechange';
}
static UnpauseHandler(): void {
this.playerManager.play();
}
// We should avoid using a defaulthandler that has a purpose other than informing the dev/user
// Currently all unhandled commands will be treated as play commands.
static defaultHandler(data: DataMessage): void {
translateItems(data, data.options as PlayRequest, 'play');
}
static processMessage(data: DataMessage, command: string): void {
const commandHandler = this.supportedCommands[command];
if (typeof commandHandler === 'function') {
console.debug(
`Command "${command}" received. Identified handler, calling identified handler.`
);
commandHandler.bind(this)(data);
} else {
console.log(
`Command "${command}" received. Could not identify handler, calling default handler.`
);
this.defaultHandler(data);
}
}
}
================================================
FILE: src/components/deviceprofileBuilder.ts
================================================
import {
VideoRangeType,
type CodecProfile,
type ContainerProfile,
type DeviceProfile,
type DirectPlayProfile,
type ProfileCondition,
type SubtitleProfile,
type TranscodingProfile
} from '@jellyfin/sdk/lib/generated-client';
import { CodecType } from '@jellyfin/sdk/lib/generated-client/models/codec-type';
import { DlnaProfileType } from '@jellyfin/sdk/lib/generated-client/models/dlna-profile-type';
import { EncodingContext } from '@jellyfin/sdk/lib/generated-client/models/encoding-context';
import { ProfileConditionType } from '@jellyfin/sdk/lib/generated-client/models/profile-condition-type';
import { ProfileConditionValue } from '@jellyfin/sdk/lib/generated-client/models/profile-condition-value';
import { SubtitleDeliveryMethod } from '@jellyfin/sdk/lib/generated-client/models/subtitle-delivery-method';
import {
hasTextTrackSupport,
getSupportedWebMVideoCodecs,
getSupportedMP4VideoCodecs,
getSupportedMP4AudioCodecs,
getSupportedHLSVideoCodecs,
getSupportedHLSAudioCodecs,
getSupportedWebMAudioCodecs,
getSupportedAudioCodecs,
hasVideoSupport,
getSupportedVideoCodecs,
getVideoProfileSupport,
getVideoCodecHighestLevelSupport,
getVideoCodecHighestBitDepthSupport,
type Resolution,
getMaxResolutionSupported,
getVideoCodecMinimumBitDepth,
getVideoRangeSupport
} from './codecSupportHelper';
/**
* Create and return a new ProfileCondition
* @param Property - What property the condition should test.
* @param Condition - The condition to test the values for.
* @param Value - The value to compare against.
* @param [IsRequired] - Don't permit unknown values
* @returns A profile condition created from the parameters.
*/
function createProfileCondition(
Property: ProfileConditionValue,
Condition: ProfileConditionType,
Value: string,
IsRequired = false
): ProfileCondition {
return {
Condition,
IsRequired,
Property,
Value
};
}
/**
* Get container profiles
* @todo Why does this always return an empty array?
* @returns Container profiles.
*/
function getContainerProfiles(): ContainerProfile[] {
return [];
}
/**
* Get direct play profiles
* @returns Direct play profiles.
*/
function getDirectPlayProfiles(): DirectPlayProfile[] {
const DirectPlayProfiles: DirectPlayProfile[] = [];
if (hasVideoSupport()) {
const mp4VideoCodecs = getSupportedMP4VideoCodecs();
const mp4AudioCodecs = getSupportedMP4AudioCodecs();
const webmVideoCodecs = getSupportedWebMVideoCodecs();
const webmAudioCodecs = getSupportedWebMAudioCodecs();
for (const codec of webmVideoCodecs) {
DirectPlayProfiles.push({
AudioCodec: webmAudioCodecs.join(','),
Container: 'webm',
Type: DlnaProfileType.Video,
VideoCodec: codec
});
}
DirectPlayProfiles.push({
AudioCodec: mp4AudioCodecs.join(','),
Container: 'mp4,m4v',
Type: DlnaProfileType.Video,
VideoCodec: mp4VideoCodecs.join(',')
});
}
const supportedAudio = getSupportedAudioCodecs();
// N.B. Supported audio formats and containers can be found here:
// https://developers.google.com/cast/docs/media#mp4_audio_only
for (const audioFormat of supportedAudio) {
switch (audioFormat.toLowerCase()) {
case 'mp3':
DirectPlayProfiles.push({
AudioCodec: audioFormat,
Container: 'mp3,mp4',
Type: DlnaProfileType.Audio
});
break;
case 'opus':
case 'vorbis':
DirectPlayProfiles.push({
AudioCodec: audioFormat,
Container: 'ogg,webm',
Type: DlnaProfileType.Audio
});
break;
case 'aac':
DirectPlayProfiles.push({
AudioCodec: audioFormat,
Container: 'm4a',
Type: DlnaProfileType.Audio
});
break;
case 'flac':
case 'wav':
default:
DirectPlayProfiles.push({
AudioCodec: audioFormat,
Container: audioFormat,
Type: DlnaProfileType.Audio
});
break;
}
}
return DirectPlayProfiles;
}
/**
* Get codec profiles
* @returns Codec profiles.
*/
function getCodecProfiles(): CodecProfile[] {
const codecProfiles: CodecProfile[] = [];
const deviceHasVideo = hasVideoSupport();
const audioConditions: CodecProfile = {
Codec: 'flac',
Conditions: [
createProfileCondition(
ProfileConditionValue.AudioSampleRate,
ProfileConditionType.LessThanEqual,
'96000'
),
createProfileCondition(
ProfileConditionValue.AudioBitDepth,
ProfileConditionType.LessThanEqual,
'24'
)
],
Type: CodecType.Audio
};
codecProfiles.push(audioConditions);
// Google Cast does not support AAC 5.1, as officially stated by the Google team.
// Additionally, the Cast SDK seems to silently downmix anything that isn't Opus or Dolby codecs
// to stereo.
//
// Let the server decide how to handle the downmixing vs. transcoding trade-off instead by
// transmitting these limitations.
//
// See: https://issuetracker.google.com/issues/69112577#comment20
// See: https://issuetracker.google.com/issues/330548743
for (const audioCodec of getSupportedAudioCodecs()) {
switch (audioCodec) {
case 'opus':
case 'eac3':
case 'ac3':
continue;
}
const profileConditions: ProfileCondition[] = [
createProfileCondition(
ProfileConditionValue.AudioChannels,
ProfileConditionType.LessThanEqual,
'2'
)
];
codecProfiles.push({
Codec: audioCodec,
Conditions: profileConditions,
Type: CodecType.Audio
});
if (deviceHasVideo) {
codecProfiles.push({
Codec: audioCodec,
Conditions: profileConditions,
Type: CodecType.VideoAudio
});
}
}
// If device is audio only, don't add all the video related stuff
if (!deviceHasVideo) {
return codecProfiles;
}
for (const videoCodec of getSupportedVideoCodecs()) {
const videoProfiles = getVideoProfileSupport(videoCodec);
if (videoProfiles.length === 0) {
continue;
}
const maxLevels: number[] = [];
const minBitDepths: number[] = [];
const maxBitDepths: number[] = [];
const maxResolutions: Resolution[] = [];
const videoRangeSets: Set[] = [];
for (const videoProfile of videoProfiles) {
const maxVideoLevel =
getVideoCodecHighestLevelSupport(videoCodec, videoProfile) ?? 0;
const minBitDepth = getVideoCodecMinimumBitDepth(
videoCodec,
videoProfile
);
const maxBitDepth =
getVideoCodecHighestBitDepthSupport(
videoCodec,
videoProfile,
maxVideoLevel
) ?? 0;
const maxResolution = getMaxResolutionSupported(
videoCodec,
videoProfile,
maxVideoLevel,
maxBitDepth
);
const videoRangeSupport = getVideoRangeSupport(
videoCodec,
videoProfile,
maxVideoLevel
);
maxLevels.push(maxVideoLevel);
minBitDepths.push(minBitDepth);
maxBitDepths.push(maxBitDepth);
maxResolutions.push(maxResolution);
videoRangeSets.push(videoRangeSupport);
}
// If all other constraints are equal, merge into one condition. This
// is pretty common.
if (
maxLevels.every((l) => l === maxLevels[0]) &&
minBitDepths.every((b) => b === minBitDepths[0]) &&
maxBitDepths.every((b) => b === maxBitDepths[0]) &&
maxResolutions.every((r) => r.equals(maxResolutions[0])) &&
videoRangeSets.every(
(r) =>
r.size === videoRangeSets[0].size &&
[...r].every((v) => videoRangeSets[0].has(v))
)
) {
const maxLevel = maxLevels[0];
const minBitDepth = minBitDepths[0];
const maxBitDepth = maxBitDepths[0];
const maxResolution = maxResolutions[0];
const videoRanges = videoRangeSets[0];
const profileConditions = [
createProfileCondition(
ProfileConditionValue.IsAnamorphic,
ProfileConditionType.NotEquals,
'true'
),
createProfileCondition(
ProfileConditionValue.VideoProfile,
ProfileConditionType.EqualsAny,
videoProfiles.join('|')
),
createProfileCondition(
ProfileConditionValue.VideoLevel,
ProfileConditionType.LessThanEqual,
maxLevel.toString()
),
createProfileCondition(
ProfileConditionValue.VideoBitDepth,
ProfileConditionType.GreaterThanEqual,
minBitDepth.toString()
),
createProfileCondition(
ProfileConditionValue.VideoBitDepth,
ProfileConditionType.LessThanEqual,
maxBitDepth.toString()
),
createProfileCondition(
ProfileConditionValue.Width,
ProfileConditionType.LessThanEqual,
maxResolution.width.toString()
),
createProfileCondition(
ProfileConditionValue.Height,
ProfileConditionType.LessThanEqual,
maxResolution.height.toString()
),
createProfileCondition(
ProfileConditionValue.VideoRangeType,
ProfileConditionType.EqualsAny,
[...videoRanges].join('|')
)
];
codecProfiles.push({
Codec: videoCodec,
Conditions: profileConditions,
Type: CodecType.Video
});
} else {
// Different profiles of the same codec have different video profile
// constraints. Create a new codec profile for each.
for (let i = 0; i < videoProfiles.length; i++) {
const videoProfile = videoProfiles[i];
const maxLevel = maxLevels[i];
const minBitDepth = minBitDepths[i];
const maxBitDepth = maxBitDepths[i];
const maxResolution = maxResolutions[i];
const videoRanges = videoRangeSets[i];
const profileConditions = [
createProfileCondition(
ProfileConditionValue.IsAnamorphic,
ProfileConditionType.NotEquals,
'true'
),
createProfileCondition(
ProfileConditionValue.VideoProfile,
ProfileConditionType.Equals,
videoProfile
),
createProfileCondition(
ProfileConditionValue.VideoLevel,
ProfileConditionType.LessThanEqual,
maxLevel.toString()
),
createProfileCondition(
ProfileConditionValue.VideoBitDepth,
ProfileConditionType.GreaterThanEqual,
minBitDepth.toString()
),
createProfileCondition(
ProfileConditionValue.VideoBitDepth,
ProfileConditionType.LessThanEqual,
maxBitDepth.toString()
),
createProfileCondition(
ProfileConditionValue.Width,
ProfileConditionType.LessThanEqual,
maxResolution.width.toString()
),
createProfileCondition(
ProfileConditionValue.Height,
ProfileConditionType.LessThanEqual,
maxResolution.height.toString()
),
createProfileCondition(
ProfileConditionValue.VideoRangeType,
ProfileConditionType.EqualsAny,
[...videoRanges].join('|')
)
];
codecProfiles.push({
Codec: videoCodec,
Conditions: profileConditions,
Type: CodecType.Video
});
}
}
}
const videoAudioConditions: CodecProfile = {
Conditions: [
// Apparently something like an audiotrack from a second source, not in the current mediasource.
// Input from multiple sources is not supported, so this feature is not allowed.
createProfileCondition(
ProfileConditionValue.IsSecondaryAudio,
ProfileConditionType.Equals,
'false'
)
],
Type: CodecType.VideoAudio
};
codecProfiles.push(videoAudioConditions);
return codecProfiles;
}
/**
* Get transcoding profiles
* @returns Transcoding profiles.
*/
function getTranscodingProfiles(): TranscodingProfile[] {
const transcodingProfiles: TranscodingProfile[] = [];
const hlsAudioCodecs = getSupportedHLSAudioCodecs();
transcodingProfiles.push({
AudioCodec: hlsAudioCodecs.join(','),
BreakOnNonKeyFrames: false,
Container: 'ts',
Context: EncodingContext.Streaming,
MinSegments: 1,
Protocol: 'hls',
Type: DlnaProfileType.Audio
});
const supportedAudio = getSupportedAudioCodecs();
// audio only profiles here
for (const audioFormat of supportedAudio) {
transcodingProfiles.push({
AudioCodec: audioFormat,
Container: audioFormat,
Context: EncodingContext.Streaming,
Protocol: 'http',
Type: DlnaProfileType.Audio
});
}
// If device is audio only, don't add all the video related stuff
if (!hasVideoSupport()) {
return transcodingProfiles;
}
const hlsVideoCodecs = getSupportedHLSVideoCodecs();
if (hlsVideoCodecs.length > 0 && hlsAudioCodecs.length > 0) {
transcodingProfiles.push({
AudioCodec: hlsAudioCodecs.join(','),
BreakOnNonKeyFrames: false,
Container: 'mp4',
Context: EncodingContext.Streaming,
MinSegments: 1,
Protocol: 'hls',
Type: DlnaProfileType.Video,
VideoCodec: hlsVideoCodecs.map((codec) => codec as string).join(',')
});
// Currently, if there are any HLS codecs, stop early. This mimics the web client's
// behavior and works around a bug where the server may pick other single-codec containers
// because the audio codec needs less transcoding.
//
// In reality, we're only really losing out on the VPx codecs, which have middling compute
// to efficiency ratios anyways.
return transcodingProfiles;
}
const mp4VideoCodecs = getSupportedMP4VideoCodecs();
const mp4AudioCodecs = getSupportedMP4AudioCodecs();
if (mp4AudioCodecs.length > 0 && mp4VideoCodecs.length > 0) {
transcodingProfiles.push({
AudioCodec: mp4AudioCodecs.join(','),
Container: 'mp4',
Context: EncodingContext.Streaming,
MinSegments: 1,
Protocol: 'http',
Type: DlnaProfileType.Video,
VideoCodec: mp4VideoCodecs.join(',')
});
}
const webmAudioCodecs = getSupportedWebMAudioCodecs();
const webmVideoCodecs = getSupportedWebMVideoCodecs();
if (webmAudioCodecs.length > 0 && hlsVideoCodecs.length > 0) {
transcodingProfiles.push({
AudioCodec: webmAudioCodecs.join(','),
Container: 'webm',
Context: EncodingContext.Streaming,
Protocol: 'http',
Type: DlnaProfileType.Video,
VideoCodec: webmVideoCodecs.join(',')
});
}
return transcodingProfiles;
}
/**
* Get subtitle profiles
* @returns Subtitle profiles.
*/
function getSubtitleProfiles(): SubtitleProfile[] {
const subProfiles: SubtitleProfile[] = [];
if (hasTextTrackSupport()) {
subProfiles.push({
Format: 'vtt',
Method: SubtitleDeliveryMethod.External
});
subProfiles.push({
Format: 'vtt',
Method: SubtitleDeliveryMethod.Hls
});
}
return subProfiles;
}
/**
* Creates a device profile containing supported codecs for the active Cast device.
* @param maxBitrate - maximum bitrate to be used by the server when streaming data
* @returns Device profile.
*/
export function getDeviceProfile(maxBitrate: number): DeviceProfile {
// MaxStaticBitrate seems to be for offline sync only
const profile: DeviceProfile = {
MaxStaticBitrate: maxBitrate,
MaxStreamingBitrate: maxBitrate,
MusicStreamingTranscodingBitrate: Math.min(maxBitrate, 192000)
};
profile.DirectPlayProfiles = getDirectPlayProfiles();
profile.TranscodingProfiles = getTranscodingProfiles();
profile.ContainerProfiles = getContainerProfiles();
profile.CodecProfiles = getCodecProfiles();
profile.SubtitleProfiles = getSubtitleProfiles();
return profile;
}
================================================
FILE: src/components/documentManager.ts
================================================
import type { BaseItemDto } from '@jellyfin/sdk/lib/generated-client';
import { getItemsApi, getUserLibraryApi } from '@jellyfin/sdk/lib/utils/api';
import { AppStatus } from '../types/appStatus';
import { parseISO8601Date, TicksPerSecond, ticksToSeconds } from '../helpers';
import { JellyfinApi } from './jellyfinApi';
import { hasVideoSupport } from './codecSupportHelper';
// eslint-disable-next-line @typescript-eslint/no-extraneous-class
export abstract class DocumentManager {
// Duration between each backdrop switch in ms
private static backdropPeriodMs = 30000;
// Timer state - so that we don't start the interval more than necessary
private static backdropTimer: number | null = null;
private static status = AppStatus.Unset;
/**
* Hide the document body on chromecast audio to save resources
*/
public static initialize(): void {
if (!hasVideoSupport()) {
document.body.style.display = 'none';
}
}
/**
* Set the background image for a html element, without preload.
* You should do the preloading first with preloadImage.
* @param element - HTML Element
* @param src - URL to the image or null to remove the active one
*/
private static setBackgroundImage(
element: HTMLElement,
src: string | null
): void {
if (src) {
element.style.backgroundImage = `url(${src})`;
} else {
element.style.backgroundImage = '';
}
}
/**
* Preload an image
* @param src - URL to the image or null
* @returns wait for the preload and return the url to use. Might be nulled after loading error.
*/
private static preloadImage(src: string | null): Promise {
if (src) {
return new Promise((resolve, reject) => {
const preload = new Image();
preload.src = src;
preload.addEventListener('load', () => {
resolve(src);
});
preload.addEventListener('error', () => {
// might also resolve and return null here, to have the caller take away the background.
reject();
});
});
} else {
return Promise.resolve(null);
}
}
/**
* Get url for primary image for a given item
* @param item - to look up
* @returns url to image after preload
*/
private static getPrimaryImageUrl(
item: BaseItemDto
): Promise {
let src: string | null = null;
if (item.AlbumPrimaryImageTag && item.AlbumId) {
src = JellyfinApi.createImageUrl(
item.AlbumId,
'Primary',
item.AlbumPrimaryImageTag
);
} else if (item.ImageTags?.Primary && item.Id) {
src = JellyfinApi.createImageUrl(
item.Id,
'Primary',
item.ImageTags.Primary
);
}
if (
item?.UserData?.PlayedPercentage &&
item?.UserData?.PlayedPercentage < 100 &&
!item.IsFolder &&
src != null
) {
src += `&PercentPlayed=${item.UserData.PlayedPercentage}`;
}
return this.preloadImage(src);
}
/**
* Get url for logo image for a given item
* @param item - to look up
* @returns url to logo image after preload
*/
private static getLogoUrl(item: BaseItemDto): Promise {
let src: string | null = null;
if (item.ImageTags?.Logo && item.Id) {
src = JellyfinApi.createImageUrl(
item.Id,
'Logo',
item.ImageTags.Logo
);
} else if (item.ParentLogoItemId && item.ParentLogoImageTag) {
src = JellyfinApi.createImageUrl(
item.ParentLogoItemId,
'Logo',
item.ParentLogoImageTag
);
}
return this.preloadImage(src);
}
/**
* This fucntion takes an item and shows details about it
* on the details page. This happens when no media is playing,
* and the connected client is browsing the library.
* @param item - to show information about
* @returns for the page to load
*/
public static async showItem(item: BaseItemDto): Promise {
// no showItem for cc audio
if (!hasVideoSupport()) {
return;
}
// stop cycling backdrops
this.clearBackdropInterval();
const promises = [
this.getWaitingBackdropUrl(item),
this.getPrimaryImageUrl(item),
this.getLogoUrl(item)
];
const urls = await Promise.all(promises);
requestAnimationFrame(() => {
this.setWaitingBackdrop(urls[0], item);
this.setDetailImage(urls[1]);
this.setLogo(urls[2]);
this.setOverview(item.Overview ?? null);
this.setGenres(item?.Genres?.join(' / ') ?? null);
this.setDisplayName(item);
this.setMiscInfo(item);
this.setRating(item);
if (item?.UserData?.Played) {
this.setPlayedIndicator(true);
} else if (item?.UserData?.UnplayedItemCount) {
this.setPlayedIndicator(item?.UserData?.UnplayedItemCount);
} else {
this.setPlayedIndicator(false);
}
if (
item?.UserData?.PlayedPercentage &&
item?.UserData?.PlayedPercentage < 100 &&
!item.IsFolder
) {
this.setHasPlayedPercentage(false);
this.setPlayedPercentage(item.UserData.PlayedPercentage);
} else {
this.setHasPlayedPercentage(false);
this.setPlayedPercentage(0);
}
// Switch visible view!
this.setAppStatus(AppStatus.Details);
});
}
/**
* Set value of played indicator
* @param value - True = played, false = not visible, number = number of unplayed items
*/
private static setPlayedIndicator(value: boolean | number): void {
const playedIndicatorOk = this.getElementById('played-indicator-ok');
const playedIndicatorValue = this.getElementById(
'played-indicator-value'
);
if (value === true) {
// All items played
this.setVisibility(playedIndicatorValue, false);
this.setVisibility(playedIndicatorOk, true);
} else if (value === false) {
// No indicator
this.setVisibility(playedIndicatorValue, false);
this.setVisibility(playedIndicatorOk, false);
} else {
// number
playedIndicatorValue.innerHTML = value.toString();
this.setVisibility(playedIndicatorValue, true);
this.setVisibility(playedIndicatorOk, false);
}
}
/**
* Show item, but from just the id number, not an actual item.
* Looks up the item and then calls showItem
* @param itemId - id of item to look up
* @returns promise that resolves when the item is shown
*/
public static async showItemId(itemId: string): Promise {
// no showItemId for cc audio
if (!hasVideoSupport()) {
return;
}
const response = await getUserLibraryApi(
JellyfinApi.jellyfinApi
).getItem({
itemId
});
DocumentManager.showItem(response.data);
}
/**
* Update item rating elements
* @param item - to look up
*/
private static setRating(item: BaseItemDto): void {
const starRating = this.getElementById('star-rating');
const starRatingValue = this.getElementById('star-rating-value');
if (item.CommunityRating != null) {
starRatingValue.innerHTML = item.CommunityRating.toFixed(1);
this.setVisibility(starRating, true);
this.setVisibility(starRatingValue, true);
} else {
this.setVisibility(starRating, false);
this.setVisibility(starRatingValue, false);
}
const criticRating = this.getElementById('critic-rating');
const criticRatingValue = this.getElementById('critic-rating-value');
if (item.CriticRating != null) {
const verdict = item.CriticRating >= 60 ? 'fresh' : 'rotten';
criticRating.classList.add(verdict);
criticRating.classList.remove(
verdict == 'fresh' ? 'rotten' : 'fresh'
);
criticRatingValue.innerHTML = item.CriticRating.toString();
this.setVisibility(criticRating, true);
this.setVisibility(criticRatingValue, true);
} else {
this.setVisibility(criticRating, false);
this.setVisibility(criticRatingValue, false);
}
}
/**
* Set the status of the app, and switch the visible view
* to the corresponding one.
* @param status - to set
*/
public static setAppStatus(status: AppStatus): void {
this.status = status;
document.body.className = status;
}
/**
* Get the status of the app
* @returns app status
*/
public static getAppStatus(): AppStatus {
return this.status;
}
// BACKDROP LOGIC
/**
* Get url to the backdrop image, and return a preload promise.
* @param item - Item to use for waiting backdrop, null to remove it.
* @returns promise for the preload to complete
*/
public static getWaitingBackdropUrl(
item: BaseItemDto | null
): Promise {
// no backdrop as a fallback
let src: string | null = null;
if (item != null) {
if (item.BackdropImageTags?.length && item.Id) {
// get first backdrop of image if applicable
src = JellyfinApi.createImageUrl(
item.Id,
'Backdrop',
item.BackdropImageTags[0]
);
} else if (
item.ParentBackdropItemId &&
item.ParentBackdropImageTags?.length
) {
// otherwise get first backdrop from parent
src = JellyfinApi.createImageUrl(
item.ParentBackdropItemId,
'Backdrop',
item.ParentBackdropImageTags[0]
);
}
}
return this.preloadImage(src);
}
/**
* Backdrops are set on the waiting container.
* They are switched around every 30 seconds by default
* (governed by startBackdropInterval)
* @param src - Url to image
* @param item - Item to use for waiting backdrop, null to remove it.
*/
public static async setWaitingBackdrop(
src: string | null,
item: BaseItemDto | null
): Promise {
let element: HTMLElement = this.querySelector(
'#waiting-container-backdrop'
);
this.setBackgroundImage(element, src);
element = this.getElementById('waiting-description');
element.innerHTML = item?.Name ?? '';
}
/**
* Set a random backdrop on the waiting container
* @returns promise waiting for the backdrop to be set
*/
private static async setRandomUserBackdrop(): Promise {
const response = await getItemsApi(JellyfinApi.jellyfinApi).getItems({
imageTypes: ['Backdrop'],
includeItemTypes: ['Movie', 'Series'],
limit: 1,
// Although we're limiting to what the user has access to,
// not everyone will want to see adult backdrops rotating on their TV.
maxOfficialRating: 'PG-13',
recursive: true,
sortBy: ['Random']
});
const result = response.data;
let src: string | null = null;
let item: BaseItemDto | null = null;
if (result.Items?.[0]) {
item = result.Items[0];
src = await DocumentManager.getWaitingBackdropUrl(item);
}
requestAnimationFrame(() => {
DocumentManager.setWaitingBackdrop(src, item);
});
}
/**
* Stop the backdrop rotation
*/
public static clearBackdropInterval(): void {
if (this.backdropTimer !== null) {
clearInterval(this.backdropTimer);
this.backdropTimer = null;
}
}
/**
* Start the backdrop rotation, restart if running, stop if disabled
* @returns promise for the first backdrop to be set
*/
public static async startBackdropInterval(): Promise {
// no backdrop rotation for cc audio
if (!hasVideoSupport()) {
return;
}
// avoid running it multiple times
this.clearBackdropInterval();
this.backdropTimer = window.setInterval(
() => DocumentManager.setRandomUserBackdrop(),
this.backdropPeriodMs
);
await this.setRandomUserBackdrop();
}
/**
* Set background behind the media player,
* this is shown while the media is loading.
* @param item - to get backdrop from
*/
public static setPlayerBackdrop(item: BaseItemDto): void {
// no backdrop rotation for cc audio
if (!hasVideoSupport()) {
return;
}
let backdropUrl: string | null = null;
if (item.BackdropImageTags?.length && item.Id) {
backdropUrl = JellyfinApi.createImageUrl(
item.Id,
'Backdrop',
item.BackdropImageTags[0]
);
} else if (
item.ParentBackdropItemId &&
item.ParentBackdropImageTags?.length
) {
backdropUrl = JellyfinApi.createImageUrl(
item.ParentBackdropItemId,
'Backdrop',
item.ParentBackdropImageTags[0]
);
}
if (backdropUrl != null) {
window.mediaElement?.style.setProperty(
'--background-image',
`url("${backdropUrl}")`
);
} else {
window.mediaElement?.style.removeProperty('--background-image');
}
}
/* /BACKDROP LOGIC */
/**
* Set the URL to the item logo, or null to remove it
* @param src - Source url or null
*/
public static setLogo(src: string | null): void {
const element: HTMLElement = this.querySelector('.detailLogo');
this.setBackgroundImage(element, src);
}
/**
* Set the URL to the item banner image (I think?),
* or null to remove it
* @param src - Source url or null
*/
public static setDetailImage(src: string | null): void {
const element: HTMLElement = this.querySelector('.detailImage');
this.setBackgroundImage(element, src);
}
/**
* Set the human readable name for an item
*
* This combines the old statement setDisplayName(getDisplayName(item))
* into setDisplayName(item).
* @param item - source for the displayed name
*/
private static setDisplayName(item: BaseItemDto): void {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const name: string = item.EpisodeTitle ?? item.Name!;
let displayName: string = name;
if (item.Type == 'TvChannel') {
if (item.Number) {
displayName = `${item.Number} ${name}`;
}
} else if (
item.Type == 'Episode' &&
item.IndexNumber != null &&
item.ParentIndexNumber != null
) {
let episode = `S${item.ParentIndexNumber}, E${item.IndexNumber}`;
if (item.IndexNumberEnd) {
episode += `-${item.IndexNumberEnd}`;
}
displayName = `${episode} - ${name}`;
}
const element = this.querySelector('.displayName');
element.innerHTML = displayName || '';
}
/**
* Set the html of the genres container
* @param name - String/html for genres box, null to empty
*/
private static setGenres(name: string | null): void {
const element = this.querySelector('.genres');
element.innerHTML = name ?? '';
}
/**
* Set the html of the overview container
* @param name - string or html to insert
*/
private static setOverview(name: string | null): void {
const element = this.querySelector('.overview');
element.innerHTML = name ?? '';
}
/**
* Set the progress of the progress bar in the
* item details page. (Not the same as the playback ui)
* @param value - Percentage to set
*/
private static setPlayedPercentage(value = 0): void {
const element = this.querySelector(
'.itemProgressBar'
) as HTMLProgressElement;
element.value = value;
}
/**
* Set the visibility of the item progress bar in the
* item details page
* @param value - If true, show progress on details page
*/
private static setHasPlayedPercentage(value: boolean): void {
const element = this.querySelector('.detailImageProgressContainer');
if (value) {
element.classList.remove('d-none');
} else {
element.classList.add('d-none');
}
}
/**
* Get a human readable representation of the current position
* in ticks
* @param ticks - tick position
* @returns human readable position
*/
private static formatRunningTime(ticks: number): string {
const ticksPerMinute = TicksPerSecond * 60;
const ticksPerHour = ticksPerMinute * 60;
const parts: string[] = [];
const hours: number = Math.floor(ticks / ticksPerHour);
if (hours) {
parts.push(hours.toString());
}
ticks -= hours * ticksPerHour;
const minutes: number = Math.floor(ticks / ticksPerMinute);
ticks -= minutes * ticksPerMinute;
if (minutes < 10 && hours) {
parts.push(`0${minutes.toString()}`);
} else {
parts.push(minutes.toString());
}
const seconds: number = Math.floor(ticksToSeconds(ticks));
if (seconds < 10) {
parts.push(`0${seconds.toString()}`);
} else {
parts.push(seconds.toString());
}
return parts.join(':');
}
/**
* Set information about mostly episodes or series
* on the item details page
* @param item - to look up
*/
private static setMiscInfo(item: BaseItemDto): void {
const info: string[] = [];
if (item.Type == 'Episode') {
if (item.PremiereDate) {
try {
info.push(
parseISO8601Date(item.PremiereDate).toLocaleDateString()
);
} catch {
console.log(`Error parsing date: ${item.PremiereDate}`);
}
}
}
if (item.StartDate) {
try {
info.push(
parseISO8601Date(item.StartDate).toLocaleDateString()
);
} catch {
console.log(`Error parsing date: ${item.PremiereDate}`);
}
}
if (item.ProductionYear && item.Type == 'Series') {
if (item.Status == 'Continuing') {
info.push(`${item.ProductionYear}-Present`);
} else if (item.ProductionYear) {
let text: string = item.ProductionYear.toString();
if (item.EndDate) {
try {
const endYear = parseISO8601Date(
item.EndDate
).getFullYear();
if (endYear != item.ProductionYear) {
text += `-${parseISO8601Date(
item.EndDate
).getFullYear()}`;
}
} catch {
console.log(`Error parsing date: ${item.EndDate}`);
}
}
info.push(text);
}
}
if (item.Type != 'Series' && item.Type != 'Episode') {
if (item.ProductionYear) {
info.push(item.ProductionYear.toString());
} else if (item.PremiereDate) {
try {
info.push(
parseISO8601Date(item.PremiereDate)
.getFullYear()
.toString()
);
} catch {
console.log(`Error parsing date: ${item.PremiereDate}`);
}
}
}
let minutes;
if (item.RunTimeTicks && item.Type != 'Series') {
if (item.Type == 'Audio') {
info.push(this.formatRunningTime(item.RunTimeTicks));
} else {
minutes = item.RunTimeTicks / 600000000;
minutes = minutes || 1;
info.push(`${Math.round(minutes)}min`);
}
}
if (
item.OfficialRating &&
item.Type !== 'Season' &&
item.Type !== 'Episode'
) {
info.push(item.OfficialRating);
}
if (item.Video3DFormat) {
info.push('3D');
}
const element = this.getElementById('miscInfo');
element.innerHTML = info.join(' ');
}
// Generic / Helper functions
/**
* Set the visibility of an element
* @param element - Element to set visibility on
* @param visible - True if the element should be visible.
*/
private static setVisibility(element: HTMLElement, visible: boolean): void {
if (visible) {
element.classList.remove('d-none');
} else {
element.classList.add('d-none');
}
}
/**
* Get a HTMLElement from id or throw an error
* @param id - ID to look up
* @returns HTML Element
*/
private static getElementById(id: string): HTMLElement {
const element = document.getElementById(id);
if (!element) {
throw new ReferenceError(`Cannot find element ${id} by id`);
}
return element;
}
/**
* Get a HTMLElement by class
* @param cls - Class to look up
* @returns HTML Element
*/
private static querySelector(cls: string): HTMLElement {
const element: HTMLElement | null = document.querySelector(cls);
if (!element) {
throw new ReferenceError(`Cannot find element ${cls} by class`);
}
return element;
}
}
document.addEventListener('load', () => DocumentManager.initialize());
================================================
FILE: src/components/jellyfinActions.ts
================================================
import type {
BaseItemDto,
DeviceProfile,
LiveStreamResponse,
MediaSourceInfo,
PlaybackInfoDto,
PlaybackInfoResponse,
PlaybackProgressInfo
} from '@jellyfin/sdk/lib/generated-client';
import {
getHlsSegmentApi,
getMediaInfoApi,
getPlaystateApi
} from '@jellyfin/sdk/lib/utils/api';
import { getSenderReportingData, broadcastToMessageBus } from '../helpers';
import { AppStatus } from '../types/appStatus';
import { JellyfinApi } from './jellyfinApi';
import { DocumentManager } from './documentManager';
import { PlaybackManager, type PlaybackState } from './playbackManager';
import type {
BusMessageType,
JellyfinMediaInformationCustomData
} from '~/types/global';
let pingInterval: number;
let lastTranscoderPing = 0;
/**
* Start the transcoder pinging.
*
* This is used to keep the transcode available during pauses
* @param reportingParams - parameters to report to the server
*/
function restartPingInterval(reportingParams: PlaybackProgressInfo): void {
stopPingInterval();
if (reportingParams.PlayMethod == 'Transcode') {
pingInterval = window.setInterval(() => {
if (reportingParams.PlaySessionId) {
pingTranscoder(reportingParams.PlaySessionId);
}
}, 1000);
}
}
/**
* Stop the transcoder ping
*
* Needed to stop the pinging when it's not needed anymore
*/
export function stopPingInterval(): void {
if (pingInterval !== 0) {
clearInterval(pingInterval);
pingInterval = 0;
}
}
/**
* Report to the server that playback has started.
* @param state - playback state.
* @param reportingParams - parameters to send to the server
* @returns promise to wait for the request
*/
export async function reportPlaybackStart(
state: PlaybackState,
reportingParams: PlaybackProgressInfo
): Promise {
// it's just "reporting" that the playback is starting
// but it's also disabling the rotating backdrops
// in the line below.
// TODO move the responsibility to the caller.
DocumentManager.clearBackdropInterval();
broadcastToMessageBus({
//TODO: convert these to use a defined type in the type field
data: getSenderReportingData(state, reportingParams),
type: 'playbackstart'
});
restartPingInterval(reportingParams);
await getPlaystateApi(JellyfinApi.jellyfinApi).reportPlaybackStart({
playbackStartInfo: reportingParams
});
}
/**
* Report to the server the progress of the playback.
* @param state - playback state.
* @param reportingParams - parameters for jellyfin
* @param reportToServer - if jellyfin should be informed
* @param broadcastEventName - name of event to send to the cast sender
* @returns Promise for the http request
*/
export async function reportPlaybackProgress(
state: PlaybackState,
reportingParams: PlaybackProgressInfo,
reportToServer = true,
broadcastEventName: BusMessageType = 'playbackprogress'
): Promise {
broadcastToMessageBus({
data: getSenderReportingData(state, reportingParams),
type: broadcastEventName
});
if (reportToServer === false) {
return Promise.resolve();
}
restartPingInterval(reportingParams);
lastTranscoderPing = new Date().getTime();
await getPlaystateApi(JellyfinApi.jellyfinApi).reportPlaybackProgress({
playbackProgressInfo: reportingParams
});
}
/**
* Report to the server that playback has stopped.
* @param state - playback state.
* @param reportingParams - parameters to send to the server
* @returns promise for waiting for the request
*/
export async function reportPlaybackStopped(
state: PlaybackState,
reportingParams: PlaybackProgressInfo
): Promise {
stopPingInterval();
broadcastToMessageBus({
data: getSenderReportingData(state, reportingParams),
type: 'playbackstop'
});
await getPlaystateApi(JellyfinApi.jellyfinApi).reportPlaybackStopped({
playbackStopInfo: reportingParams
});
}
/**
* This keeps the session alive when playback is paused by refreshing the server.
* /Sessions/Playing/Progress does work but may not be called during pause.
* The web client calls that during pause, but this endpoint gets the job done
* as well.
* @param playSessionId - the playback session ID to ping
* @returns promise for waiting for the request
*/
export async function pingTranscoder(playSessionId: string): Promise {
const now = new Date().getTime();
// 10s is the timeout value, so use half that to report often enough
if (now - lastTranscoderPing < 5000) {
console.debug('Skipping ping due to recent progress check-in');
return new Promise((resolve) => {
resolve(undefined);
});
}
lastTranscoderPing = new Date().getTime();
await getPlaystateApi(JellyfinApi.jellyfinApi).pingPlaybackSession({
playSessionId: playSessionId
});
}
/**
* Update the context about the item we are playing.
* @param customData - data to set on playback state.
* @param serverItem - item that is playing
*/
export function load(
customData: JellyfinMediaInformationCustomData,
serverItem: BaseItemDto
): void {
PlaybackManager.resetPlaybackScope();
const state = PlaybackManager.playbackState;
// These are set up in maincontroller.createMediaInformation
state.playSessionId = customData.playSessionId;
state.audioStreamIndex = customData.audioStreamIndex;
state.subtitleStreamIndex = customData.subtitleStreamIndex;
state.startPositionTicks = customData.startPositionTicks;
state.canSeek = customData.canSeek;
state.itemId = customData.itemId;
state.liveStreamId = customData.liveStreamId;
state.mediaSourceId = customData.mediaSourceId;
state.playMethod = customData.playMethod;
state.runtimeTicks = customData.runtimeTicks;
state.item = serverItem;
DocumentManager.setAppStatus(AppStatus.Backdrop);
state.mediaType = serverItem?.MediaType;
}
/**
* Tell the media manager to play and switch back into the correct view for Audio at least
* It's really weird and I don't get the 20ms delay.
*
* I also don't get doing nothing based on the currently visible app status
*
* TODO: rename these
* @param state - playback state.
*/
export function play(state: PlaybackState): void {
if (
DocumentManager.getAppStatus() == AppStatus.Backdrop ||
DocumentManager.getAppStatus() == AppStatus.PlayingWithControls ||
DocumentManager.getAppStatus() == AppStatus.Audio
) {
setTimeout(() => {
window.playerManager.play();
if (state.mediaType == 'Audio') {
DocumentManager.setAppStatus(AppStatus.Audio);
} else {
DocumentManager.setAppStatus(AppStatus.PlayingWithControls);
}
}, 20);
}
}
/**
* get PlaybackInfo
* @param item - item
* @param maxBitrate - maxBitrate
* @param deviceProfile - deviceProfile
* @param startPosition - startPosition
* @param mediaSourceId - mediaSourceId
* @param audioStreamIndex - audioStreamIndex
* @param subtitleStreamIndex - subtitleStreamIndex
* @param liveStreamId - liveStreamId
* @returns promise
*/
export async function getPlaybackInfo(
item: BaseItemDto,
maxBitrate: number,
deviceProfile: DeviceProfile,
startPosition: number | null,
mediaSourceId: string | null,
audioStreamIndex: number | null,
subtitleStreamIndex: number | null,
liveStreamId: string | null = null
): Promise {
if (!item.Id) {
console.error('getPlaybackInfo: Item ID not provided');
return Promise.reject('Item ID not available.');
}
const query: PlaybackInfoDto = {
DeviceProfile: deviceProfile,
MaxStreamingBitrate: maxBitrate,
StartTimeTicks: startPosition ?? 0
};
if (audioStreamIndex != null) {
query.AudioStreamIndex = audioStreamIndex;
}
if (subtitleStreamIndex != null) {
query.SubtitleStreamIndex = subtitleStreamIndex;
}
if (mediaSourceId) {
query.MediaSourceId = mediaSourceId;
}
if (liveStreamId) {
query.LiveStreamId = liveStreamId;
}
const response = await getMediaInfoApi(
JellyfinApi.jellyfinApi
).getPostedPlaybackInfo({
itemId: item.Id,
playbackInfoDto: query
});
return response.data;
}
/**
* get LiveStream
* @param item - item
* @param playSessionId - playSessionId
* @param maxBitrate - maxBitrate
* @param deviceProfile - deviceProfile
* @param startPosition - startPosition
* @param mediaSource - mediaSource
* @param audioStreamIndex - audioStreamIndex
* @param subtitleStreamIndex - subtitleStreamIndex
* @returns promise
*/
export async function getLiveStream(
item: BaseItemDto,
playSessionId: string,
maxBitrate: number,
deviceProfile: DeviceProfile,
startPosition: number | null,
mediaSource: MediaSourceInfo,
audioStreamIndex: number | null,
subtitleStreamIndex: number | null
): Promise {
const liveStreamResponse = await getMediaInfoApi(
JellyfinApi.jellyfinApi
).openLiveStream({
openLiveStreamDto: {
AudioStreamIndex: audioStreamIndex,
DeviceProfile: deviceProfile,
ItemId: item.Id,
MaxStreamingBitrate: maxBitrate,
OpenToken: mediaSource.OpenToken,
PlaySessionId: playSessionId,
StartTimeTicks: startPosition ?? 0,
SubtitleStreamIndex: subtitleStreamIndex
}
});
return liveStreamResponse.data;
}
/**
* Get download speed based on the jellyfin bitratetest api.
* The API has a 10MB limit.
* @param byteSize - number of bytes to request
* @returns the bitrate in bits/s
*/
export async function getDownloadSpeed(byteSize: number): Promise {
const now = new Date().getTime();
const response = await getMediaInfoApi(
JellyfinApi.jellyfinApi
).getBitrateTestBytes(
{
size: byteSize
},
{
timeout: 5000
}
);
// Force javascript to download the whole response before calculating bitrate
await response.data;
const responseTimeSeconds = (new Date().getTime() - now) / 1000;
const bytesPerSecond = byteSize / responseTimeSeconds;
const bitrate = Math.round(bytesPerSecond * 8);
return bitrate;
}
/**
* Function to detect the bitrate.
* It starts at 500kB and doubles it every time it takes under 2s, for max 10MB.
* This should get an accurate bitrate relatively fast on any connection
* @param numBytes - Number of bytes to start with, default 500k
* @returns bitrate in bits/s
*/
export async function detectBitrate(numBytes = 500000): Promise {
// Jellyfin has a 10MB limit on the test size
const byteLimit = 10000000;
if (numBytes > byteLimit) {
numBytes = byteLimit;
}
const bitrate = await getDownloadSpeed(numBytes);
if (bitrate * (2 / 8.0) < numBytes || numBytes >= byteLimit) {
// took > 2s, or numBytes hit the limit
return Math.round(bitrate * 0.8);
} else {
// If that produced a fairly high speed, try again with a larger size to get a more accurate result
return await detectBitrate(numBytes * 2);
}
}
/**
* Tell Jellyfin to kill off our active transcoding session
* @param playSessionId - the play session ID to stop encoding
* @returns Promise for the http request to go through
*/
export async function stopActiveEncodings(
playSessionId: string
): Promise {
await getHlsSegmentApi(JellyfinApi.jellyfinApi).stopEncodingProcess({
deviceId: JellyfinApi.deviceId,
playSessionId: playSessionId
});
}
================================================
FILE: src/components/jellyfinApi.ts
================================================
import { Api, Jellyfin } from '@jellyfin/sdk';
import { version as packageVersion } from '../../package.json';
// eslint-disable-next-line @typescript-eslint/no-extraneous-class
export abstract class JellyfinApi {
// Security token to prove authentication
public static accessToken: string | undefined;
// Address of server
public static serverAddress: string | undefined;
// device name
public static deviceName = 'Google Cast';
// unique id
public static deviceId = '';
// Jellyfin SDK
private static jellyfinSdk: Jellyfin | undefined;
// Jellyfin API
public static jellyfinApi: Api;
public static setServerInfo(
accessToken?: string,
serverAddress?: string,
receiverName = ''
): void {
const regenApi =
this.accessToken !== accessToken ||
this.serverAddress !== serverAddress;
console.debug(
`JellyfinApi.setServerInfo: token:${accessToken}, server:${serverAddress}, name:${receiverName}`
);
this.accessToken = accessToken;
this.serverAddress = serverAddress;
if (receiverName) {
// remove special characters from the receiver name
receiverName = receiverName.replace(/[^\w\s]/gi, '');
this.deviceName = receiverName;
// deviceId just needs to be unique-ish
this.deviceId = btoa(receiverName);
} else {
const senders =
cast.framework.CastReceiverContext.getInstance().getSenders();
this.deviceName = 'Google Cast';
this.deviceId =
senders.length !== 0 && senders[0].id
? senders[0].id
: new Date().getTime().toString();
}
this.jellyfinSdk ??= new Jellyfin({
clientInfo: {
name: 'Chromecast',
version: packageVersion
},
deviceInfo: {
id: this.deviceId,
name: this.deviceName
}
});
if (!this.jellyfinApi || regenApi) {
if (serverAddress && accessToken) {
this.jellyfinApi = this.jellyfinSdk.createApi(
serverAddress,
accessToken
);
} else {
console.error(
'Server address or access token not provided - could not create instance of Jellyfin API'
);
}
}
}
// Create a basic url.
// Cannot start with /.
public static createUrl(path: string): string {
if (this.serverAddress === undefined) {
console.error('JellyfinApi.createUrl: no server address present');
return '';
}
// Remove leading slashes
while (path.startsWith('/')) {
path = path.substring(1);
}
return `${this.serverAddress}/${path}`;
}
/**
* Create url to image
* @param itemId - Item id
* @param imgType - Image type: Primary, Logo, Backdrop
* @param imgTag - Image tag
* @param imgIdx - Image index, default 0
* @returns URL
*/
public static createImageUrl(
itemId: string,
imgType: string,
imgTag: string,
imgIdx = 0
): string {
return this.createUrl(
`Items/${itemId}/Images/${imgType}/${imgIdx.toString()}?tag=${imgTag}`
);
}
}
================================================
FILE: src/components/maincontroller.ts
================================================
import type {
BaseItemDto,
MediaStream,
MediaSourceInfo
} from '@jellyfin/sdk/lib/generated-client';
import { getSessionApi, getUserLibraryApi } from '@jellyfin/sdk/lib/utils/api';
import {
getCurrentPositionTicks,
getReportingParams,
getMetadata,
getStreamByIndex,
getShuffleItems,
getInstantMixItems,
translateRequestedItems,
broadcastToMessageBus,
ticksToSeconds,
TicksPerSecond
} from '../helpers';
import {
reportPlaybackStart,
reportPlaybackProgress,
reportPlaybackStopped,
play,
detectBitrate
} from './jellyfinActions';
import { getDeviceProfile } from './deviceprofileBuilder';
import { JellyfinApi } from './jellyfinApi';
import { PlaybackManager, type PlaybackState } from './playbackManager';
import { CommandHandler } from './commandHandler';
import { getMaxBitrateSupport } from './codecSupportHelper';
import type { BusMessageType, PlayRequest, StreamInfo } from '~/types/global';
window.castReceiverContext = cast.framework.CastReceiverContext.getInstance();
window.playerManager = window.castReceiverContext.getPlayerManager();
PlaybackManager.setPlayerManager(window.playerManager);
CommandHandler.configure(window.playerManager);
PlaybackManager.resetPlaybackScope();
let broadcastToServer = new Date();
let hasReportedCapabilities = false;
/**
* onMediaElementTimeUpdate
*/
export function onMediaElementTimeUpdate(): void {
if (PlaybackManager.playbackState.isChangingStream) {
return;
}
const now = new Date();
const elapsed = now.valueOf() - broadcastToServer.valueOf();
const playbackState = PlaybackManager.playbackState;
if (elapsed > 5000) {
// TODO use status as input
reportPlaybackProgress(
playbackState,
getReportingParams(playbackState)
);
broadcastToServer = now;
} else if (elapsed > 1500) {
// TODO use status as input
reportPlaybackProgress(
playbackState,
getReportingParams(playbackState),
false
);
}
}
/**
* onMediaElementPause
*/
export function onMediaElementPause(): void {
if (PlaybackManager.playbackState.isChangingStream) {
return;
}
reportEvent('playstatechange', true);
}
/**
* onMediaElementPlaying
*/
export function onMediaElementPlaying(): void {
if (PlaybackManager.playbackState.isChangingStream) {
return;
}
reportEvent('playstatechange', true);
}
/**
* onMediaElementVolumeChange
* @param event - event
*/
function onMediaElementVolumeChange(event: framework.system.Event): void {
window.volume = (event as framework.system.SystemVolumeChangedEvent).data;
console.log(`Received volume update: ${window.volume.level}`);
if (JellyfinApi.serverAddress !== null) {
reportEvent('volumechange', true);
}
}
/**
* enableTimeUpdateListener
*/
export function enableTimeUpdateListener(): void {
window.playerManager.addEventListener(
cast.framework.events.EventType.TIME_UPDATE,
onMediaElementTimeUpdate
);
window.castReceiverContext.addEventListener(
cast.framework.system.EventType.SYSTEM_VOLUME_CHANGED,
onMediaElementVolumeChange
);
window.playerManager.addEventListener(
cast.framework.events.EventType.PAUSE,
onMediaElementPause
);
window.playerManager.addEventListener(
cast.framework.events.EventType.PLAYING,
onMediaElementPlaying
);
}
/**
* disableTimeUpdateListener
*/
export function disableTimeUpdateListener(): void {
window.playerManager.removeEventListener(
cast.framework.events.EventType.TIME_UPDATE,
onMediaElementTimeUpdate
);
window.castReceiverContext.removeEventListener(
cast.framework.system.EventType.SYSTEM_VOLUME_CHANGED,
onMediaElementVolumeChange
);
window.playerManager.removeEventListener(
cast.framework.events.EventType.PAUSE,
onMediaElementPause
);
window.playerManager.removeEventListener(
cast.framework.events.EventType.PLAYING,
onMediaElementPlaying
);
}
enableTimeUpdateListener();
window.addEventListener('beforeunload', () => {
disableTimeUpdateListener();
});
window.playerManager.addEventListener(
cast.framework.events.EventType.PLAY,
(): void => {
const playbackState = PlaybackManager.playbackState;
play(playbackState);
reportPlaybackProgress(
playbackState,
getReportingParams(playbackState)
);
}
);
window.playerManager.addEventListener(
cast.framework.events.EventType.PAUSE,
(): void => {
const playbackState = PlaybackManager.playbackState;
reportPlaybackProgress(
playbackState,
getReportingParams(playbackState)
);
}
);
/**
* defaultOnStop
*/
function defaultOnStop(): void {
PlaybackManager.onStop();
}
window.playerManager.addEventListener(
cast.framework.events.EventType.MEDIA_FINISHED,
async (mediaFinishedEvent): Promise => {
const playbackState = PlaybackManager.playbackState;
// Don't notify server or client if changing streams, but notify next time.
if (!playbackState.isChangingStream) {
await reportPlaybackStopped(playbackState, {
...getReportingParams(playbackState),
PositionTicks:
(mediaFinishedEvent.currentMediaTime ??
getCurrentPositionTicks(playbackState)) * TicksPerSecond
});
defaultOnStop();
} else {
playbackState.isChangingStream = false;
}
}
);
window.playerManager.addEventListener(
cast.framework.events.EventType.ABORT,
defaultOnStop
);
window.playerManager.addEventListener(
cast.framework.events.EventType.ENDED,
(): void => {
const playbackState = PlaybackManager.playbackState;
// If we're changing streams, do not report playback ended.
if (playbackState.isChangingStream) {
return;
}
PlaybackManager.resetPlaybackScope();
if (!PlaybackManager.playNextItem()) {
PlaybackManager.resetPlaylist();
PlaybackManager.onStop();
}
}
);
// Notify of playback start as soon as the media is playing. Only then is the tick position good.
window.playerManager.addEventListener(
cast.framework.events.EventType.PLAYING,
(): void => {
reportPlaybackStart(
PlaybackManager.playbackState,
getReportingParams(PlaybackManager.playbackState)
);
}
);
// Set the active subtitle track once the player has loaded
window.playerManager.addEventListener(
cast.framework.events.EventType.PLAYER_LOAD_COMPLETE,
() => {
setTextTrack(
window.playerManager.getMediaInformation()?.customData
?.subtitleStreamIndex ?? null
);
}
);
/**
* reportDeviceCapabilities
* @returns Promise
*/
export async function reportDeviceCapabilities(): Promise {
const maxBitrate = await getMaxBitrate();
const deviceProfile = getDeviceProfile(maxBitrate);
hasReportedCapabilities = true;
await getSessionApi(JellyfinApi.jellyfinApi).postFullCapabilities({
clientCapabilitiesDto: {
DeviceProfile: deviceProfile,
PlayableMediaTypes: ['Audio', 'Video'],
SupportsMediaControl: true,
SupportsPersistentIdentifier: false
}
});
}
/**
* processMessage
* @param data - data
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export function processMessage(data: any): void {
if (!data.command || !data.serverAddress || !data.accessToken) {
console.log('Invalid message sent from sender. Sending error response');
broadcastToMessageBus({
message:
'Missing one or more required params - command,options,userId,accessToken,serverAddress',
type: 'error'
});
return;
}
data.options = data.options ?? {};
// Items will have properties - Id, Name, Type, MediaType, IsFolder
JellyfinApi.setServerInfo(
data.accessToken,
data.serverAddress,
data.receiverName
);
if (data.subtitleAppearance) {
window.subtitleAppearance = data.subtitleAppearance;
}
if (data.maxBitrate) {
window.MaxBitrate = data.maxBitrate;
}
// Report device capabilities
if (!hasReportedCapabilities) {
reportDeviceCapabilities();
}
CommandHandler.processMessage(data, data.command);
if (window.reportEventType) {
const playbackState = PlaybackManager.playbackState;
const report = (): void => {
reportPlaybackProgress(
playbackState,
getReportingParams(playbackState)
);
};
reportPlaybackProgress(
playbackState,
getReportingParams(playbackState),
true,
window.reportEventType
);
setTimeout(report, 100);
setTimeout(report, 500);
}
}
/**
* reportEvent
* @param name - name
* @param reportToServer - reportToServer
* @returns Promise
*/
export function reportEvent(
name: BusMessageType,
reportToServer: boolean
): Promise {
const playbackState = PlaybackManager.playbackState;
return reportPlaybackProgress(
playbackState,
getReportingParams(playbackState),
reportToServer,
name
);
}
/**
* setSubtitleStreamIndex
* @param state - playback state.
* @param index - index
*/
export function setSubtitleStreamIndex(
state: PlaybackState,
index: number
): void {
console.log(`setSubtitleStreamIndex. index: ${index}`);
let positionTicks;
// FIXME: Possible index error when MediaStreams is undefined.
const currentSubtitleStream = state.mediaSource?.MediaStreams?.find(
(m: MediaStream) => {
return m.Index == state.subtitleStreamIndex && m.Type == 'Subtitle';
}
);
const currentDeliveryMethod = currentSubtitleStream
? currentSubtitleStream.DeliveryMethod
: null;
if (index == -1 || index == null) {
// Need to change the stream to turn off the subs
if (currentDeliveryMethod == 'Encode') {
console.log('setSubtitleStreamIndex video url change required');
positionTicks = getCurrentPositionTicks(state);
changeStream(state, positionTicks, {
SubtitleStreamIndex: -1
});
} else {
state.subtitleStreamIndex = -1;
setTextTrack(null);
}
return;
}
const mediaStreams = state.PlaybackMediaSource?.MediaStreams ?? [];
const subtitleStream = getStreamByIndex(mediaStreams, 'Subtitle', index);
if (!subtitleStream) {
console.log(
'setSubtitleStreamIndex error condition - subtitle stream not found.'
);
return;
}
console.log(
`setSubtitleStreamIndex DeliveryMethod:${subtitleStream.DeliveryMethod}`
);
if (
subtitleStream.DeliveryMethod == 'External' ||
currentDeliveryMethod == 'Encode'
) {
let textStreamUrl;
if (subtitleStream.IsExternal && subtitleStream.DeliveryUrl) {
textStreamUrl = subtitleStream.DeliveryUrl;
} else if (subtitleStream.DeliveryUrl) {
textStreamUrl = JellyfinApi.createUrl(subtitleStream.DeliveryUrl);
}
console.log(`Subtitle url: ${textStreamUrl}`);
setTextTrack(index);
state.subtitleStreamIndex = subtitleStream.Index ?? null;
return;
} else {
console.log('setSubtitleStreamIndex video url change required');
positionTicks = getCurrentPositionTicks(state);
changeStream(state, positionTicks, {
SubtitleStreamIndex: index
});
}
}
/**
* setAudioStreamIndex
* @param state - playback state.
* @param index - index
* @returns promise
*/
export function setAudioStreamIndex(
state: PlaybackState,
index: number
): Promise {
const positionTicks = getCurrentPositionTicks(state);
return changeStream(state, positionTicks, {
AudioStreamIndex: index
});
}
/**
* seek
* @param state - playback state.
* @param ticks - ticks
* @returns promise
*/
export function seek(state: PlaybackState, ticks: number): Promise {
return changeStream(state, ticks);
}
/**
* changeStream
* @param state - playback state.
* @param ticks - ticks
* @param params - params
* @returns promise
*/
export async function changeStream(
state: PlaybackState,
ticks: number,
params: any = undefined // eslint-disable-line @typescript-eslint/no-explicit-any
): Promise {
if (
window.playerManager.getMediaInformation()?.customData?.canClientSeek &&
params == null
) {
window.playerManager.seek(ticksToSeconds(ticks));
reportPlaybackProgress(state, getReportingParams(state));
return Promise.resolve();
}
params = params ?? {};
// TODO Could be useful for garbage collection.
// It needs to predict if the server side transcode needs
// to restart.
// Possibility: Always assume it will. Downside: VTT subs switching doesn't
// need to restart the transcode.
//const requiresStoppingTranscoding = false;
//
//if (requiresStoppingTranscoding) {
// window.playerManager.pause();
// await stopActiveEncodings($scope.playSessionId);
//}
state.isChangingStream = true;
// @ts-expect-error is possible here
return await PlaybackManager.playItemInternal(state.item, {
audioStreamIndex: params.AudioStreamIndex ?? state.audioStreamIndex,
liveStreamId: state.liveStreamId,
mediaSourceId: state.mediaSourceId,
startPositionTicks: ticks,
subtitleStreamIndex:
params.SubtitleStreamIndex ?? state.subtitleStreamIndex
});
}
// Create a message handler for the custome namespace channel
// TODO save namespace somewhere global?
window.castReceiverContext.addCustomMessageListener(
'urn:x-cast:com.connectsdk',
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(evt: any) => {
let data = evt.data;
// Apparently chromium likes to pass it as json, not as object.
// chrome on android works fine
if (typeof data === 'string') {
console.log('Event data is a string.. Chromium detected..');
data = JSON.parse(data);
}
data.options = data.options ?? {};
data.options.senderId = evt.senderId;
// TODO set it somewhere better perhaps
window.senderId = evt.senderId;
console.log(`Received message: ${JSON.stringify(data)}`);
processMessage(data);
}
);
/**
* translateItems
* @param data - data
* @param options - options
* @param method - method
* @returns promise
*/
export async function translateItems(
data: any, // eslint-disable-line @typescript-eslint/no-explicit-any
options: PlayRequest,
method: string
): Promise {
const playNow = method != 'PlayNext' && method != 'PlayLast';
const result = await translateRequestedItems(options.items, playNow);
if (result.Items) {
options.items = result.Items;
}
if (method == 'PlayNext' || method == 'PlayLast') {
for (let i = 0, length = options.items.length; i < length; i++) {
PlaybackManager.enqueue(options.items[i]);
}
} else {
PlaybackManager.playFromOptions(data.options);
}
}
/**
* instantMix
* @param data - data
* @param options - options
* @param item - item
* @returns promise
*/
export async function instantMix(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: any,
options: PlayRequest,
item: BaseItemDto
): Promise {
const result = await getInstantMixItems(item);
options.items = result.Items ?? [];
PlaybackManager.playFromOptions(data.options);
}
/**
* shuffle
* @param data - data
* @param options - options
* @param item - item
* @returns promise
*/
export async function shuffle(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: any,
options: PlayRequest,
item: BaseItemDto
): Promise {
const result = await getShuffleItems(item);
options.items = result.Items ?? [];
PlaybackManager.playFromOptions(data.options);
}
/**
* onStopPlayerBeforePlaybackDone
* This function fetches the full information of an item before playing it.
* Only item.Id needs to be set.
* @param item - Item to look up
* @param options - Extra information about how it should be played back.
* @returns Promise waiting for the item to be loaded for playback
*/
export async function onStopPlayerBeforePlaybackDone(
item: BaseItemDto,
options: PlayRequest
): Promise {
if (item.Id) {
const response = await getUserLibraryApi(
JellyfinApi.jellyfinApi
).getItem({
itemId: item.Id
});
PlaybackManager.playItemInternal(response.data, options);
}
}
let lastBitrateDetect = 0;
let detectedBitrate = 0;
/**
* getMaxBitrate
* @returns promise
*/
export async function getMaxBitrate(): Promise {
console.log('getMaxBitrate');
if (window.MaxBitrate) {
console.log(`bitrate is set to ${window.MaxBitrate}`);
return window.MaxBitrate;
}
if (detectedBitrate && new Date().getTime() - lastBitrateDetect < 600000) {
console.log(
`returning previous detected bitrate of ${detectedBitrate}`
);
return detectedBitrate;
}
console.log('detecting bitrate');
const bitrate = await detectBitrate();
try {
console.log(`Max bitrate auto detected to ${bitrate}`);
lastBitrateDetect = new Date().getTime();
detectedBitrate = bitrate;
return Math.min(detectedBitrate, getMaxBitrateSupport());
} catch {
// The client can set this number
console.log('Error detecting bitrate, will return device maximum.');
return getMaxBitrateSupport();
}
}
/**
* showPlaybackInfoErrorMessage
* @param error - error
*/
export function showPlaybackInfoErrorMessage(error: string): void {
broadcastToMessageBus({ message: error, type: 'playbackerror' });
}
/**
* getOptimalMediaSource
* @param versions - versions
* @returns stream
*/
export function getOptimalMediaSource(
versions: MediaSourceInfo[]
): MediaSourceInfo | null {
let optimalVersion = versions.find((v) => {
checkDirectPlay(v);
return v.SupportsDirectPlay;
});
optimalVersion ??= versions.find((v) => {
return v.SupportsDirectStream;
});
return (
optimalVersion ??
versions.find((s) => {
return s.SupportsTranscoding;
}) ??
null
);
}
// Disable direct play on non-http sources
/**
* checkDirectPlay
* @param mediaSource - mediaSource
*/
export function checkDirectPlay(mediaSource: MediaSourceInfo): void {
if (
mediaSource.SupportsDirectPlay &&
mediaSource.Protocol == 'Http' &&
!mediaSource.RequiredHttpHeaders?.length
) {
return;
}
mediaSource.SupportsDirectPlay = false;
}
/**
* setTextTrack
* @param index - index
*/
export function setTextTrack(index: number | null): void {
try {
const textTracksManager = window.playerManager.getTextTracksManager();
if (index == null) {
textTracksManager.setActiveByIds(null);
return;
}
const subtitleTrack = textTracksManager.getTrackById(index);
if (subtitleTrack?.trackId !== undefined) {
textTracksManager.setActiveByIds([subtitleTrack.trackId]);
const subtitleAppearance = window.subtitleAppearance;
if (subtitleAppearance) {
const textTrackStyle =
new cast.framework.messages.TextTrackStyle();
if (subtitleAppearance.dropShadow != null) {
// Empty string is DROP_SHADOW
textTrackStyle.edgeType =
subtitleAppearance.dropShadow ||
cast.framework.messages.TextTrackEdgeType.DROP_SHADOW;
textTrackStyle.edgeColor = '#000000FF';
}
if (subtitleAppearance.font) {
textTrackStyle.fontFamily = subtitleAppearance.font;
}
if (subtitleAppearance.textColor) {
// Append the transparency, hardcoded to 100%
textTrackStyle.foregroundColor = `${subtitleAppearance.textColor}FF`;
}
if (subtitleAppearance.textBackground === 'transparent') {
textTrackStyle.backgroundColor = '#00000000'; // RGBA
}
switch (subtitleAppearance.textSize) {
case 'smaller':
textTrackStyle.fontScale = 0.6;
break;
case 'small':
textTrackStyle.fontScale = 0.8;
break;
case 'large':
textTrackStyle.fontScale = 1.15;
break;
case 'larger':
textTrackStyle.fontScale = 1.3;
break;
case 'extralarge':
textTrackStyle.fontScale = 1.45;
break;
default:
textTrackStyle.fontScale = 1.0;
break;
}
textTracksManager.setTextTrackStyle(textTrackStyle);
}
}
} catch (e) {
console.log(`Setting subtitle track failed: ${e}`);
}
}
/**
* createMediaInformation
* @param playSessionId - playSessionId
* @param item - item
* @param streamInfo - streamInfo
* @returns media information
*/
export function createMediaInformation(
playSessionId: string,
item: BaseItemDto,
streamInfo: StreamInfo
): framework.messages.MediaInformation {
const mediaInfo = new cast.framework.messages.MediaInformation();
mediaInfo.contentId = streamInfo.url;
mediaInfo.contentType = streamInfo.contentType;
mediaInfo.customData = {
audioStreamIndex: streamInfo.audioStreamIndex,
canClientSeek: streamInfo.canClientSeek,
canSeek: streamInfo.canSeek,
itemId: item.Id,
liveStreamId: streamInfo.mediaSource?.LiveStreamId ?? null,
mediaSourceId: streamInfo.mediaSource?.Id ?? null,
playMethod: streamInfo.isStatic ? 'DirectStream' : 'Transcode',
playSessionId: playSessionId,
runtimeTicks: streamInfo.mediaSource?.RunTimeTicks ?? null,
startPositionTicks: streamInfo.startPositionTicks ?? 0,
subtitleStreamIndex: streamInfo.subtitleStreamIndex
};
mediaInfo.metadata = getMetadata(item);
mediaInfo.streamType = cast.framework.messages.StreamType.BUFFERED;
mediaInfo.tracks = streamInfo.tracks;
if (streamInfo.mediaSource?.RunTimeTicks) {
mediaInfo.duration = Math.floor(
ticksToSeconds(streamInfo.mediaSource.RunTimeTicks)
);
}
// If the client actually sets startPosition:
// if(streamInfo.startPosition)
// mediaInfo.customData.startPositionTicks = streamInfo.startPosition
return mediaInfo;
}
// Set the available buttons in the UI controls.
const controls = cast.framework.ui.Controls.getInstance();
controls.clearDefaultSlotAssignments();
/* Disabled for now, dynamically set controls for each media type in the future.
// Assign buttons to control slots.
controls.assignButton(
cast.framework.ui.ControlsSlot.SLOT_SECONDARY_1,
cast.framework.ui.ControlsButton.CAPTIONS
);*/
controls.assignButton(
cast.framework.ui.ControlsSlot.SLOT_PRIMARY_1,
cast.framework.ui.ControlsButton.SEEK_BACKWARD_15
);
controls.assignButton(
cast.framework.ui.ControlsSlot.SLOT_PRIMARY_2,
cast.framework.ui.ControlsButton.SEEK_FORWARD_15
);
const options = new cast.framework.CastReceiverOptions();
// Global variable set by Vite
if (!import.meta.env.PROD) {
window.castReceiverContext.setLoggerLevel(cast.framework.LoggerLevel.DEBUG);
// Don't time out on me :(
// This is only normally allowed for non media apps, but in this case
// it's for debugging purposes.
options.disableIdleTimeout = true;
// This alternative seems to close sooner; I think it
// quits once the client closes the connection.
// options.maxInactivity = 3600;
options.shakaVariant = cast.framework.ShakaVariant.DEBUG;
window.playerManager.addEventListener(
cast.framework.events.category.CORE,
(event: framework.events.Event) => {
console.log(`Core event: ${event.type}`);
console.log(event);
}
);
} else {
window.castReceiverContext.setLoggerLevel(cast.framework.LoggerLevel.NONE);
}
options.useShakaForHls = true;
options.playbackConfig = new cast.framework.PlaybackConfig();
// Set the player to start playback as soon as there are five seconds of
// media content buffered. Default is 10.
options.playbackConfig.autoResumeDuration = 5;
options.supportedCommands = cast.framework.messages.Command.ALL_BASIC_MEDIA;
console.log('Application is ready, starting system');
window.castReceiverContext.start(options);
================================================
FILE: src/components/playbackManager.ts
================================================
import type {
BaseItemDto,
MediaSourceInfo,
PlaybackInfoResponse,
PlayMethod
} from '@jellyfin/sdk/lib/generated-client';
import { RepeatMode } from '@jellyfin/sdk/lib/generated-client';
import { AppStatus } from '../types/appStatus';
import {
broadcastConnectionErrorMessage,
createStreamInfo,
ticksToSeconds
} from '../helpers';
import { DocumentManager } from './documentManager';
import { getDeviceProfile } from './deviceprofileBuilder';
import {
getPlaybackInfo,
getLiveStream,
load,
stopPingInterval
} from './jellyfinActions';
import {
onStopPlayerBeforePlaybackDone,
getMaxBitrate,
getOptimalMediaSource,
showPlaybackInfoErrorMessage,
checkDirectPlay,
createMediaInformation
} from './maincontroller';
import type { ItemIndex, PlayRequest } from '~/types/global';
export interface PlaybackState {
startPositionTicks: number;
mediaType: string | null | undefined;
itemId: string | undefined;
audioStreamIndex: number | null;
subtitleStreamIndex: number | null;
mediaSource: MediaSourceInfo | null;
mediaSourceId: string | null;
PlaybackMediaSource: MediaSourceInfo | null;
playMethod: PlayMethod | undefined;
canSeek: boolean;
isChangingStream: boolean;
playNextItemBool: boolean;
item: BaseItemDto | null;
liveStreamId: string | null;
playSessionId: string;
runtimeTicks: number | null;
}
// eslint-disable-next-line @typescript-eslint/no-extraneous-class
export abstract class PlaybackManager {
private static playerManager: framework.PlayerManager;
private static activePlaylist: BaseItemDto[];
private static activePlaylistIndex: number;
static playbackState: PlaybackState = {
audioStreamIndex: null,
canSeek: false,
isChangingStream: false,
item: null,
itemId: '',
liveStreamId: '',
mediaSource: null,
mediaSourceId: '',
mediaType: '',
PlaybackMediaSource: null,
playMethod: undefined,
playNextItemBool: true,
playSessionId: '',
runtimeTicks: 0,
startPositionTicks: 0,
subtitleStreamIndex: null
};
static setPlayerManager(playerManager: framework.PlayerManager): void {
// Parameters
this.playerManager = playerManager;
this.resetPlaylist();
}
/* This is used to check if we can switch to
* some other info overlay.
*
* Returns true when playing or paused.
* (before: true only when playing)
*/
static isPlaying(): boolean {
return (
this.playerManager.getPlayerState() ===
cast.framework.messages.PlayerState.PLAYING ||
this.playerManager.getPlayerState() ===
cast.framework.messages.PlayerState.PAUSED
);
}
static isBuffering(): boolean {
return (
this.playerManager.getPlayerState() ===
cast.framework.messages.PlayerState.BUFFERING
);
}
static isIdle(): boolean {
return (
this.playerManager.getPlayerState() ===
cast.framework.messages.PlayerState.IDLE
);
}
static async playFromOptions(options: PlayRequest): Promise {
const firstItem = options.items[0];
if (options.startPositionTicks || firstItem.MediaType !== 'Video') {
return this.playFromOptionsInternal(options);
}
return this.playFromOptionsInternal(options);
}
private static playFromOptionsInternal(
options: PlayRequest
): Promise {
const stopPlayer =
this.activePlaylist && this.activePlaylist.length > 0;
this.activePlaylist = options.items;
this.activePlaylistIndex = options.startIndex ?? 0;
console.log('Loaded new playlist:', this.activePlaylist);
// When starting playback initially, don't use
// the next item facility.
return this.playItem(options, stopPlayer);
}
// add item to playlist
static enqueue(item: BaseItemDto): void {
this.activePlaylist.push(item);
}
static resetPlaylist(): void {
this.activePlaylistIndex = -1;
this.activePlaylist = [];
}
// If there are items in the queue after the current one
static hasNextItem(): boolean {
return this.activePlaylistIndex < this.activePlaylist.length - 1;
}
// If there are items in the queue before the current one
static hasPrevItem(): boolean {
return this.activePlaylistIndex > 0;
}
static playNextItem(stopPlayer = false): boolean {
const nextItemInfo = this.getNextPlaybackItemInfo();
if (nextItemInfo) {
this.activePlaylistIndex = nextItemInfo.index;
this.playItem({ items: [] }, stopPlayer);
return true;
}
return false;
}
static playPreviousItem(): boolean {
if (this.activePlaylist && this.activePlaylistIndex > 0) {
this.activePlaylistIndex--;
this.playItem({ items: [] }, true);
return true;
}
return false;
}
// play item from playlist
private static async playItem(
options: PlayRequest,
stopPlayer = false
): Promise {
if (stopPlayer) {
this.stop();
}
const item = this.activePlaylist[this.activePlaylistIndex];
console.log(`Playing index ${this.activePlaylistIndex}`, item);
return await onStopPlayerBeforePlaybackDone(item, options);
}
// Would set private, but some refactorings need to happen first.
static async playItemInternal(
item: BaseItemDto,
options: PlayRequest
): Promise {
DocumentManager.setAppStatus(AppStatus.Loading);
const maxBitrate = await getMaxBitrate();
const deviceProfile = getDeviceProfile(maxBitrate);
let playbackInfo: PlaybackInfoResponse = {};
try {
playbackInfo = await getPlaybackInfo(
item,
maxBitrate,
deviceProfile,
options.startPositionTicks ?? null,
options.mediaSourceId ?? null,
options.audioStreamIndex ?? null,
options.subtitleStreamIndex ?? null,
options.liveStreamId
);
} catch {
broadcastConnectionErrorMessage();
}
if (playbackInfo.ErrorCode) {
return showPlaybackInfoErrorMessage(playbackInfo.ErrorCode);
}
const mediaSource = getOptimalMediaSource(
playbackInfo.MediaSources ?? []
);
if (!mediaSource) {
return showPlaybackInfoErrorMessage('NoCompatibleStream');
}
let itemToPlay = mediaSource;
if (mediaSource.RequiresOpening && playbackInfo.PlaySessionId) {
const openLiveStreamResult = await getLiveStream(
item,
playbackInfo.PlaySessionId,
maxBitrate,
deviceProfile,
options.startPositionTicks ?? null,
mediaSource,
null,
null
);
if (openLiveStreamResult.MediaSource) {
checkDirectPlay(openLiveStreamResult.MediaSource);
itemToPlay = openLiveStreamResult.MediaSource;
}
}
if (playbackInfo.PlaySessionId) {
this.playMediaSource(
playbackInfo.PlaySessionId,
item,
itemToPlay,
options
);
}
}
private static playMediaSource(
playSessionId: string,
item: BaseItemDto,
mediaSource: MediaSourceInfo,
options: PlayRequest
): void {
DocumentManager.setAppStatus(AppStatus.Loading);
const streamInfo = createStreamInfo(
item,
mediaSource,
options.startPositionTicks ?? null
);
const mediaInfo = createMediaInformation(
playSessionId,
item,
streamInfo
);
const loadRequestData = new cast.framework.messages.LoadRequestData();
loadRequestData.media = mediaInfo;
loadRequestData.autoplay = true;
const startPositionTicks =
mediaInfo.customData?.startPositionTicks ?? -1;
// If we should seek at the start, translate it
// to seconds and give it to loadRequestData :)
if (startPositionTicks > 0) {
loadRequestData.currentTime = ticksToSeconds(startPositionTicks);
}
const isChangingStream = this.playbackState.isChangingStream;
if (mediaInfo.customData) {
load(mediaInfo.customData, item);
}
this.playbackState.isChangingStream = isChangingStream;
this.playerManager.load(loadRequestData);
this.playbackState.PlaybackMediaSource = mediaSource;
console.log(`setting src to ${streamInfo.url}`);
this.playbackState.mediaSource = mediaSource;
DocumentManager.setPlayerBackdrop(item);
this.playbackState.audioStreamIndex = streamInfo.audioStreamIndex;
this.playbackState.subtitleStreamIndex = streamInfo.subtitleStreamIndex;
// We use false as we do not want to broadcast the new status yet
// we will broadcast manually when the media has been loaded, this
// is to be sure the duration has been updated in the media element
this.playerManager.setMediaInformation(mediaInfo, false);
}
/**
* stop playback, as requested by the client
*/
static stop(): void {
this.playerManager.stop();
// onStop will be called when playback comes to a halt.
}
/**
* Called when media stops playing.
* TODO avoid doing this between tracks in a playlist
*/
static onStop(): void {
if (this.getNextPlaybackItemInfo()) {
this.playbackState.playNextItemBool = true;
} else {
this.playbackState.playNextItemBool = false;
DocumentManager.setAppStatus(AppStatus.Waiting);
stopPingInterval();
DocumentManager.startBackdropInterval();
}
}
/**
* Get information about the next item to play from window.playlist
* @returns item and index, or null to end playback
*/
static getNextPlaybackItemInfo(): ItemIndex | null {
if (this.activePlaylist.length < 1) {
return null;
}
let newIndex: number;
if (this.activePlaylistIndex < 0) {
// negative = play the first item
newIndex = 0;
} else {
switch (window.repeatMode) {
case RepeatMode.RepeatOne:
newIndex = this.activePlaylistIndex;
break;
case RepeatMode.RepeatAll:
newIndex = this.activePlaylistIndex + 1;
if (newIndex >= this.activePlaylist.length) {
newIndex = 0;
}
break;
default:
newIndex = this.activePlaylistIndex + 1;
break;
}
}
if (newIndex < this.activePlaylist.length) {
return {
index: newIndex,
item: this.activePlaylist[newIndex]
};
}
return null;
}
/**
* Attempt to clean the receiver state.
*/
static resetPlaybackScope(): void {
DocumentManager.setAppStatus(AppStatus.Waiting);
this.playbackState.startPositionTicks = 0;
DocumentManager.setWaitingBackdrop(null, null);
this.playbackState.mediaType = '';
this.playbackState.itemId = '';
this.playbackState.audioStreamIndex = null;
this.playbackState.subtitleStreamIndex = null;
this.playbackState.mediaSource = null;
this.playbackState.mediaSourceId = '';
this.playbackState.PlaybackMediaSource = null;
this.playbackState.playMethod = undefined;
this.playbackState.canSeek = false;
this.playbackState.isChangingStream = false;
this.playbackState.playNextItemBool = true;
this.playbackState.item = null;
this.playbackState.liveStreamId = '';
this.playbackState.playSessionId = '';
// Detail content
DocumentManager.setLogo(null);
DocumentManager.setDetailImage(null);
}
}
================================================
FILE: src/css/jellyfin.css
================================================
html,
body {
height: 100%;
width: 100%;
}
body {
font-family: Quicksand, sans-serif;
font-weight: 300;
color: #ddd;
background-color: #000;
margin: 0;
padding: 0;
}
#waiting-container,
#waiting-container-backdrop,
.waiting > #video-player,
.details > #video-player,
.detailContent,
.detailLogo {
/* There is an open bug on the chromecast, transitions are buggy and sometimes are not triggered.
opacity: 0;
-webkit-transition: opacity .25s ease-in-out;
transition: opacity .25s ease-in-out;
*/
display: none;
}
.d-none {
display: none !important;
}
#waiting-container-backdrop {
position: absolute;
inset: 0;
background-color: #000;
background-position: center;
background-size: cover;
background-repeat: no-repeat;
}
#waiting-container {
background-position: center;
background-size: cover;
background-repeat: no-repeat;
/* Layer on top of the backdrop image: */
background-color: rgb(15 15 15 / 60%);
position: absolute;
inset: 0;
padding: 18px 32px;
}
.detailContent {
background-position: center;
background-size: cover;
background-repeat: no-repeat;
position: absolute;
inset: 0;
background-color: rgb(15 15 15 / 82%);
}
.detailLogo {
height: 50px;
width: 300px;
background-position: left top;
background-size: contain;
background-repeat: no-repeat;
position: absolute;
top: 35px;
left: 50px;
}
.detailImage {
background-position: left top;
background-size: contain;
background-repeat: no-repeat;
position: absolute;
top: 22%;
height: 63%;
left: 8%;
width: 20%;
}
.playedIndicator {
display: block;
position: absolute;
top: 5px;
right: 5px;
text-align: center;
width: 1.8vw;
height: 1.6vw;
padding-top: 0.1vw;
border-radius: 50%;
color: #fff;
background: rgb(0 128 0 / 80%);
font-size: 1.1vw;
}
.playedIndicator img {
display: block;
width: 100%;
height: 100%;
}
.detailImageProgressContainer {
position: absolute;
bottom: 10px;
right: 0;
left: 0;
text-align: center;
}
.detailImageProgressContainer progress {
width: 100%;
margin: 0 auto;
height: 6px;
}
/* Chrome */
.itemProgressBar::-webkit-progress-value {
border-radius: 0;
background-image: none;
background-color: #52b54b;
}
/* Polyfill */
.itemProgressBar[aria-valuenow]::before {
border-radius: 0;
background-image: none;
background-color: #52b54b;
}
.itemProgressBar {
background: #000 !important;
appearance: none;
border: 0;
border: 0 solid #222;
border-radius: 0;
}
.detailInfo {
position: absolute;
top: 22%;
height: 63%;
left: 30.5%;
font-size: 1.2vw;
width: 60%;
}
.detailInfo p {
margin: 10px 0;
}
.detailRating {
margin: -4px 0 0;
}
.displayNameContainer {
margin-top: -6px !important;
}
.displayName {
font-size: 3vw;
}
#miscInfo {
font-size: 1.5vw;
margin-left: 2vw;
}
.starRating {
background-image: url('../img/stars.svg');
background-position: left center;
background-repeat: no-repeat;
background-size: cover;
width: 1.6vw;
height: 1.4vw;
display: inline-block;
vertical-align: text-bottom;
top: 6px;
}
.starRatingValue {
display: inline-block;
margin-left: 1px;
}
.rottentomatoesicon {
display: inline-block;
width: 1.4vw;
height: 1.4vw;
background-size: cover;
background-position: left center;
background-repeat: no-repeat;
vertical-align: text-bottom;
top: 6px;
}
.starRatingValue + .rottentomatoesicon {
margin-left: 1em;
}
.fresh {
background-image: url('../img/fresh.svg');
}
.rotten {
background-image: url('../img/rotten.svg');
}
.metascorehigh {
background-color: rgb(102 204 51 / 70%);
}
.metascoremid {
background-color: rgb(255 204 51 / 70%);
}
.metascorelow {
background-color: rgb(240 0 0 / 70%);
}
.criticRating + .metascore,
.starRatingValue + .metascore {
margin-left: 1em;
}
.criticRating {
display: inline-block;
margin-left: 1px;
}
.overview {
max-height: 350px;
overflow: hidden;
text-overflow: ellipsis;
}
/* Container for "ready to cast" and the logo */
.waitingContent {
position: fixed;
bottom: 0;
left: 0;
text-align: center;
font-size: 3vw;
margin-bottom: 3%;
margin-left: 5%;
}
/* Container for backdrop description */
.waitingDescription {
position: fixed;
bottom: 0;
right: 0;
margin-right: 5%;
margin-bottom: 3%;
font-size: 1.5vw;
}
#waiting-container h1,
#waiting-container h2 {
margin: 25px 0;
}
#waiting-container h1 {
font-size: 45px;
font-weight: 300;
}
/* stylelint-disable no-descending-specificity */
.error-container h2,
#waiting-container h2 {
font-size: 30px;
font-weight: 300;
}
/* stylelint-enable no-descending-specificity */
/* jellyfin logo in the waiting container */
#waiting-container .logo {
width: 4vw;
display: inline-block;
vertical-align: text-bottom;
}
.waiting > #waiting-container-backdrop,
.waiting > #waiting-container,
.details .detailContent,
.details .detailLogo,
.details #waiting-container-backdrop {
/* opacity: 1; */
display: initial;
}
/* stylelint-disable selector-type-no-unknown */
cast-media-player {
--spinner-image: url('../img/spinner.png');
--playback-logo-image: url('../img/banner.svg');
--watermark-image: url('../img/banner.svg');
--watermark-size: 225px;
--watermark-position: top right;
--theme-hue: 195.3; /* Jellyfin blue */
--progress-color: #00a4dc;
}
/* stylelint-enable selector-type-no-unknown */
================================================
FILE: src/helpers.ts
================================================
import type {
BaseItemDtoQueryResult,
PlaybackProgressInfo,
MediaSourceInfo,
MediaStream,
BaseItemDto,
BaseItemPerson,
TvShowsApiGetEpisodesRequest,
UserDto,
InstantMixApiGetInstantMixFromAlbumRequest,
InstantMixApiGetInstantMixFromPlaylistRequest,
InstantMixApiGetInstantMixFromArtistsRequest,
InstantMixApiGetInstantMixFromSongRequest,
ItemFields,
ItemsApiGetItemsRequest
} from '@jellyfin/sdk/lib/generated-client';
import {
getInstantMixApi,
getItemsApi,
getTvShowsApi,
getUserApi
} from '@jellyfin/sdk/lib/utils/api';
import type {
GenericMediaMetadata,
MovieMediaMetadata,
MusicTrackMediaMetadata,
PhotoMediaMetadata,
TvShowMediaMetadata
} from 'chromecast-caf-receiver/cast.framework.messages';
import { JellyfinApi } from './components/jellyfinApi';
import {
PlaybackManager,
type PlaybackState
} from './components/playbackManager';
import type { BusMessage, StreamInfo } from './types/global';
type InstantMixApiRequest =
| InstantMixApiGetInstantMixFromAlbumRequest
| InstantMixApiGetInstantMixFromArtistsRequest
| InstantMixApiGetInstantMixFromSongRequest
| InstantMixApiGetInstantMixFromPlaylistRequest;
export const TicksPerSecond = 10000000;
/**
* Get current playback position in ticks, adjusted for server seeking
* @param state - playback state.
* @returns position in ticks
*/
export function getCurrentPositionTicks(state: PlaybackState): number {
let positionTicks =
window.playerManager.getCurrentTimeSec() * TicksPerSecond;
const mediaInformation = window.playerManager.getMediaInformation();
if (mediaInformation && !mediaInformation.customData?.canClientSeek) {
positionTicks += state.startPositionTicks || 0;
}
return positionTicks;
}
/**
* Get parameters used for playback reporting
* @param state - playback state.
* @returns progress information for use with the reporting APIs
*/
export function getReportingParams(state: PlaybackState): PlaybackProgressInfo {
/* Math.round() calls:
* on 10.7, any floating point will give an API error,
* so it's actually really important to make sure that
* those fields are always rounded.
*/
return {
AudioStreamIndex: state.audioStreamIndex,
CanSeek: state.canSeek,
IsMuted: window.volume?.muted ?? false,
IsPaused:
window.playerManager.getPlayerState() ===
cast.framework.messages.PlayerState.PAUSED,
ItemId: state.itemId,
LiveStreamId: state.liveStreamId,
MediaSourceId: state.mediaSourceId,
PlayMethod: state.playMethod,
PlaySessionId: state.playSessionId,
PositionTicks: Math.round(getCurrentPositionTicks(state)),
RepeatMode: window.repeatMode,
SubtitleStreamIndex: state.subtitleStreamIndex,
VolumeLevel: Math.round((window.volume?.level ?? 0) * 100)
};
}
/**
* getSenderReportingData
* This is used in playback reporting to find out information
* about the item that is currently playing. This is sent over the cast protocol over to
* the connected client (or clients?).
* @param playbackState - playback state.
* @param reportingData - object full of random information
* @returns lots of data for the connected client
*/
export function getSenderReportingData(
playbackState: PlaybackState,
reportingData: PlaybackProgressInfo
// eslint-disable-next-line @typescript-eslint/no-explicit-any
): any {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const state: any = {
ItemId: reportingData.ItemId,
PlayState: reportingData
};
state.NowPlayingItem = {
Id: reportingData.ItemId,
RunTimeTicks: playbackState.runtimeTicks
};
const item = playbackState.item;
if (item) {
const nowPlayingItem = state.NowPlayingItem;
nowPlayingItem.ServerId = item.ServerId;
nowPlayingItem.Chapters = item.Chapters ?? [];
const mediaSource = item.MediaSources?.find((m: MediaSourceInfo) => {
return m.Id == reportingData.MediaSourceId;
});
nowPlayingItem.MediaStreams = mediaSource
? mediaSource.MediaStreams
: [];
nowPlayingItem.MediaType = item.MediaType;
nowPlayingItem.Type = item.Type;
nowPlayingItem.Name = item.Name;
nowPlayingItem.IndexNumber = item.IndexNumber;
nowPlayingItem.IndexNumberEnd = item.IndexNumberEnd;
nowPlayingItem.ParentIndexNumber = item.ParentIndexNumber;
nowPlayingItem.ProductionYear = item.ProductionYear;
nowPlayingItem.PremiereDate = item.PremiereDate;
nowPlayingItem.SeriesName = item.SeriesName;
nowPlayingItem.Album = item.Album;
nowPlayingItem.Artists = item.Artists;
const imageTags = item.ImageTags ?? {};
if (item.SeriesPrimaryImageTag) {
nowPlayingItem.PrimaryImageItemId = item.SeriesId;
nowPlayingItem.PrimaryImageTag = item.SeriesPrimaryImageTag;
} else if (imageTags.Primary) {
nowPlayingItem.PrimaryImageItemId = item.Id;
nowPlayingItem.PrimaryImageTag = imageTags.Primary;
} else if (item.AlbumPrimaryImageTag) {
nowPlayingItem.PrimaryImageItemId = item.AlbumId;
nowPlayingItem.PrimaryImageTag = item.AlbumPrimaryImageTag;
}
if (item.BackdropImageTags?.length) {
nowPlayingItem.BackdropItemId = item.Id;
nowPlayingItem.BackdropImageTag = item.BackdropImageTags[0];
} else if (item.ParentBackdropImageTags?.length) {
nowPlayingItem.BackdropItemId = item.ParentBackdropItemId;
nowPlayingItem.BackdropImageTag = item.ParentBackdropImageTags[0];
}
if (imageTags.Thumb) {
nowPlayingItem.ThumbItemId = item.Id;
nowPlayingItem.ThumbImageTag = imageTags.Thumb;
}
if (imageTags.Logo) {
nowPlayingItem.LogoItemId = item.Id;
nowPlayingItem.LogoImageTag = imageTags.Logo;
} else if (item.ParentLogoImageTag) {
nowPlayingItem.LogoItemId = item.ParentLogoItemId;
nowPlayingItem.LogoImageTag = item.ParentLogoImageTag;
}
if (playbackState.playNextItemBool) {
const nextItemInfo = PlaybackManager.getNextPlaybackItemInfo();
if (nextItemInfo) {
state.NextMediaType = nextItemInfo.item.MediaType;
}
}
}
return state;
}
/**
* Create CAF-native metadata for a given item
* @param item - item to look up
* @returns one of the metadata classes in cast.framework.messages.*Metadata
*/
export function getMetadata(
item: BaseItemDto
):
| GenericMediaMetadata
| MovieMediaMetadata
| MusicTrackMediaMetadata
| PhotoMediaMetadata
| TvShowMediaMetadata {
let metadata:
| GenericMediaMetadata
| MovieMediaMetadata
| MusicTrackMediaMetadata
| PhotoMediaMetadata
| TvShowMediaMetadata;
let posterUrl = '';
if (item.SeriesPrimaryImageTag) {
posterUrl = JellyfinApi.createUrl(
`Items/${item.SeriesId}/Images/Primary?tag=${item.SeriesPrimaryImageTag}`
);
} else if (item.AlbumPrimaryImageTag) {
posterUrl = JellyfinApi.createUrl(
`Items/${item.AlbumId}/Images/Primary?tag=${item.AlbumPrimaryImageTag}`
);
} else if (item.ImageTags?.Primary) {
posterUrl = JellyfinApi.createUrl(
`Items/${item.Id}/Images/Primary?tag=${item.ImageTags.Primary}`
);
}
if (item.Type == 'Episode') {
const tvShowMedata = new cast.framework.messages.TvShowMediaMetadata();
tvShowMedata.seriesTitle = item.SeriesName ?? undefined;
if (item.PremiereDate) {
tvShowMedata.originalAirdate = parseISO8601Date(
item.PremiereDate
).toISOString();
}
if (item.IndexNumber != null) {
tvShowMedata.episode = item.IndexNumber;
}
if (item.ParentIndexNumber != null) {
tvShowMedata.season = item.ParentIndexNumber;
}
metadata = tvShowMedata;
} else if (item.Type == 'Photo') {
const photoMetadata = new cast.framework.messages.PhotoMediaMetadata();
if (item.PremiereDate) {
photoMetadata.creationDateTime = parseISO8601Date(
item.PremiereDate
).toISOString();
}
// TODO more metadata?
metadata = photoMetadata;
} else if (item.Type == 'Audio') {
const musicTrackMetadata =
new cast.framework.messages.MusicTrackMediaMetadata();
musicTrackMetadata.songName = item.Name ?? undefined;
musicTrackMetadata.artist = item.Artists?.length
? item.Artists.join(', ')
: '';
musicTrackMetadata.albumArtist = item.AlbumArtist ?? undefined;
musicTrackMetadata.albumName = item.Album ?? undefined;
if (item.PremiereDate) {
musicTrackMetadata.releaseDate = parseISO8601Date(
item.PremiereDate
).toISOString();
}
if (item.IndexNumber != null) {
musicTrackMetadata.trackNumber = item.IndexNumber;
}
if (item.ParentIndexNumber != null) {
musicTrackMetadata.discNumber = item.ParentIndexNumber;
}
// previously: p.PersonType == 'Type'.. wtf?
const composer = (item.People ?? []).find(
(p: BaseItemPerson) => p.Type == 'Composer'
);
if (composer?.Name) {
musicTrackMetadata.composer = composer.Name;
}
metadata = musicTrackMetadata;
} else if (item.Type == 'Movie') {
const movieMetadata = new cast.framework.messages.MovieMediaMetadata();
if (item.PremiereDate) {
movieMetadata.releaseDate = parseISO8601Date(
item.PremiereDate
).toISOString();
}
if (item.Studios?.length && item.Studios[0].Name) {
movieMetadata.studio = item.Studios[0].Name;
}
metadata = movieMetadata;
} else {
const genericMetadata =
new cast.framework.messages.GenericMediaMetadata();
if (item.PremiereDate) {
genericMetadata.releaseDate = parseISO8601Date(
item.PremiereDate
).toISOString();
}
metadata = genericMetadata;
}
metadata.title = item.Name ?? '????';
metadata.images = [new cast.framework.messages.Image(posterUrl)];
return metadata;
}
/**
* Check if a media source is an HLS stream
* @param mediaSource - mediaSource
* @returns boolean
*/
export function isHlsStream(mediaSource: MediaSourceInfo): boolean {
return mediaSource.TranscodingSubProtocol == 'hls';
}
/**
* Create the necessary information about an item
* needed for playback
* @param item - Item to play
* @param mediaSource - MediaSourceInfo for the item
* @param startPosition - Where to seek to (possibly server seeking)
* @returns object with enough information to start playback
*/
export function createStreamInfo(
item: BaseItemDto,
mediaSource: MediaSourceInfo,
startPosition: number | null
): StreamInfo {
let mediaUrl;
let contentType;
// server seeking
const startPositionInSeekParam = startPosition
? ticksToSeconds(startPosition)
: 0;
const seekParam = startPositionInSeekParam
? `#t=${startPositionInSeekParam}`
: '';
let isStatic = false;
let streamContainer = mediaSource.Container;
let playerStartPositionTicks = 0;
const type = item.MediaType?.toLowerCase();
if (type == 'video') {
contentType = `video/${mediaSource.Container}`;
if (mediaSource.SupportsDirectPlay && mediaSource.Path) {
mediaUrl = mediaSource.Path;
isStatic = true;
} else if (mediaSource.SupportsDirectStream) {
mediaUrl = JellyfinApi.createUrl(
`videos/${item.Id}/stream.${mediaSource.Container}?mediaSourceId=${mediaSource.Id}&api_key=${JellyfinApi.accessToken}&static=true${seekParam}`
);
isStatic = true;
playerStartPositionTicks = startPosition ?? 0;
} else {
// TODO deal with !TranscodingUrl
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
mediaUrl = JellyfinApi.createUrl(mediaSource.TranscodingUrl!);
if (isHlsStream(mediaSource)) {
mediaUrl += seekParam;
playerStartPositionTicks = startPosition ?? 0;
contentType = 'application/x-mpegURL';
streamContainer = 'm3u8';
} else {
contentType = `video/${mediaSource.TranscodingContainer}`;
streamContainer = mediaSource.TranscodingContainer;
if (mediaUrl.toLowerCase().includes('copytimestamps=true')) {
startPosition = 0;
}
}
}
} else {
contentType = `audio/${mediaSource.Container}`;
if (mediaSource.SupportsDirectPlay && mediaSource.Path) {
mediaUrl = mediaSource.Path;
isStatic = true;
playerStartPositionTicks = startPosition ?? 0;
} else {
const isDirectStream = mediaSource.SupportsDirectStream;
if (isDirectStream) {
const outputContainer = (
mediaSource.Container ?? ''
).toLowerCase();
mediaUrl = JellyfinApi.createUrl(
`Audio/${item.Id}/stream.${outputContainer}?mediaSourceId=${mediaSource.Id}&api_key=${JellyfinApi.accessToken}&static=true${seekParam}`
);
isStatic = true;
} else {
streamContainer = mediaSource.TranscodingContainer;
contentType = `audio/${mediaSource.TranscodingContainer}`;
// TODO deal with !TranscodingUrl
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
mediaUrl = JellyfinApi.createUrl(mediaSource.TranscodingUrl!);
}
}
}
// TODO: Remove the second half of the expression by supporting changing the mediaElement src dynamically.
// It is a pain and will require unbinding all event handlers during the operation
const canSeek = (mediaSource.RunTimeTicks ?? 0) > 0;
const info: StreamInfo = {
audioStreamIndex: mediaSource.DefaultAudioStreamIndex ?? null,
canClientSeek: isStatic || (canSeek && streamContainer == 'm3u8'),
canSeek: canSeek,
contentType: contentType,
isStatic: isStatic,
mediaSource: mediaSource,
playerStartPositionTicks: playerStartPositionTicks,
startPositionTicks: startPosition,
streamContainer: streamContainer,
subtitleStreamIndex: mediaSource.DefaultSubtitleStreamIndex ?? null,
url: mediaUrl
};
const subtitleStreams =
mediaSource.MediaStreams?.filter((stream: MediaStream) => {
return stream.Type === 'Subtitle';
}) ?? [];
const subtitleTracks: framework.messages.Track[] = [];
subtitleStreams.forEach((subtitleStream) => {
if (subtitleStream.DeliveryUrl === undefined) {
/* The CAF v3 player only supports vtt currently,
* SRT subs can be "transcoded" to vtt by jellyfin.
* The server will do that in accordance with the device profiles and
* give us a DeliveryUrl if that is the case.
* Support for more could be added with a custom implementation
*/
return;
}
if (!info.subtitleStreamIndex) {
return;
}
const track = new cast.framework.messages.Track(
info.subtitleStreamIndex,
cast.framework.messages.TrackType.TEXT
);
if (subtitleStream.IsExternal && subtitleStream.DeliveryUrl) {
track.trackContentId = subtitleStream.DeliveryUrl;
} else if (subtitleStream.DeliveryUrl) {
track.trackContentId = JellyfinApi.createUrl(
subtitleStream.DeliveryUrl
);
}
if (subtitleStream.Index) {
track.trackId = subtitleStream.Index;
}
if (subtitleStream.Language) {
track.language = subtitleStream.Language;
}
if (subtitleStream.DisplayTitle) {
track.name = subtitleStream.DisplayTitle;
}
// TODO this should not be hardcoded but we only support VTT currently
track.trackContentType = 'text/vtt';
track.subtype = cast.framework.messages.TextTrackType.SUBTITLES;
subtitleTracks.push(track);
console.log(`Subtitle url: ${info.subtitleStreamUrl}`);
});
info.tracks = subtitleTracks;
return info;
}
/**
* Get stream by its index while making a type assertion
* @param streams - array streams to consider
* @param type - type of stream
* @param index - index of stream
* @returns first matching stream
*/
export function getStreamByIndex(
streams: MediaStream[],
type: string,
index: number
): MediaStream {
return (
streams.find((s) => {
return s.Type == type && s.Index == index;
}) ?? {}
);
}
// defined for use in the 3 next functions
const requiredItemFields: ItemFields[] = ['MediaSources', 'Chapters'];
/**
* Get a random selection of items given one item,
* this item can be a music artist item, or a music genre item,
* or something else. If something else it searches for child items
* of the provided one.
*
* It's used only in maincomponents.shuffle.
* @param item - Parent item of shuffle search
* @returns items for the queue
*/
export function getShuffleItems(
item: BaseItemDto
): Promise {
let query: ItemsApiGetItemsRequest = {
fields: requiredItemFields,
filters: ['IsNotFolder'],
limit: 50,
recursive: true,
sortBy: ['Random']
};
if (item.Type == 'MusicArtist') {
query = {
...query,
artistIds: item.Id ? [item.Id] : undefined,
mediaTypes: ['Audio']
};
} else if (item.Type == 'MusicGenre') {
query = {
...query,
genres: item.Name ? [item.Name] : undefined,
mediaTypes: ['Audio']
};
} else {
query = {
...query,
parentId: item.Id
};
}
return getItemsForPlayback(query);
}
/**
* Get an "Instant Mix" given an item, which can be a
* music artist, genre, album, playlist
* @param item - Parent item of the search
* @returns items for the queue
*/
export async function getInstantMixItems(
item: BaseItemDto
): Promise {
if (item.Id === undefined) {
throw new Error('Item ID not provided');
}
const query: InstantMixApiRequest = {
fields: ['MediaSources', 'Chapters'],
itemId: item.Id,
limit: 50
};
const instantMixApi = getInstantMixApi(JellyfinApi.jellyfinApi);
if (item.Type == 'MusicArtist') {
return (await instantMixApi.getInstantMixFromArtists(query)).data;
} else if (item.Type == 'MusicGenre') {
return (
await instantMixApi.getInstantMixFromMusicGenreById({
...query,
id: item.Id
})
).data;
} else if (item.Type == 'MusicAlbum') {
return (await instantMixApi.getInstantMixFromAlbum(query)).data;
} else if (item.Type == 'Audio') {
return (await instantMixApi.getInstantMixFromSong(query)).data;
} else if (item.Type == 'Playlist') {
return (await instantMixApi.getInstantMixFromPlaylist(query)).data;
}
throw new Error(`InstantMix: Unknown item type: ${item.Type}`);
}
/**
* Get items to be played back
* @param query - specification on what to search for
* @returns items to be played back
*/
export async function getItemsForPlayback(
query: ItemsApiGetItemsRequest
): Promise {
const response = await getItemsApi(JellyfinApi.jellyfinApi).getItems({
...query,
excludeLocationTypes: ['Virtual'],
fields: requiredItemFields,
limit: query.limit ?? 100
});
return response.data;
}
/**
* Get episodes for a show given by seriesId
* @param query - query parameters to build on
* @returns episode items
*/
export async function getEpisodesForPlayback(
query: TvShowsApiGetEpisodesRequest
): Promise {
const response = await getTvShowsApi(JellyfinApi.jellyfinApi).getEpisodes(
query
);
return response.data;
}
/**
* Get user object for the current user
* @returns user object
*/
export async function getUser(): Promise {
const response = await getUserApi(JellyfinApi.jellyfinApi).getCurrentUser();
return response.data;
}
/**
* Process a list of items for playback
* by resolving things like folders to playable items.
* @param items - items to resolve
* @param smart - If enabled it will try to find the next episode given the current one,
* if the connected user has enabled that in their settings
* @returns Promise for search result containing items to play
*/
export async function translateRequestedItems(
items: BaseItemDto[],
smart = false
): Promise {
const firstItem = items[0];
if (firstItem.Type == 'Playlist') {
return await getItemsForPlayback({
parentId: firstItem.Id
});
} else if (firstItem.Type == 'MusicArtist') {
return await getItemsForPlayback({
artistIds: firstItem.Id ? [firstItem.Id] : undefined,
filters: ['IsNotFolder'],
mediaTypes: ['Audio'],
recursive: true,
sortBy: ['SortName']
});
} else if (firstItem.Type == 'MusicGenre') {
return await getItemsForPlayback({
filters: ['IsNotFolder'],
genres: firstItem.Name ? [firstItem.Name] : undefined,
mediaTypes: ['Audio'],
recursive: true,
sortBy: ['SortName']
});
} else if (firstItem.IsFolder) {
return await getItemsForPlayback({
filters: ['IsNotFolder'],
mediaTypes: ['Audio', 'Video'],
parentId: firstItem.Id,
recursive: true,
sortBy: ['SortName']
});
} else if (smart && firstItem.Type == 'Episode' && items.length == 1) {
const user = await getUser();
if (!user.Configuration?.EnableNextEpisodeAutoPlay) {
return {
Items: items
};
}
const result = await getItemsForPlayback({
ids: firstItem.Id ? [firstItem.Id] : undefined
});
if (!result.Items || result.Items.length < 1) {
return result;
}
const episode = result.Items[0];
if (!episode.SeriesId) {
return result;
}
const episodesResult = await getEpisodesForPlayback({
isMissing: false,
seriesId: episode.SeriesId
});
let foundItem = false;
episodesResult.Items = episodesResult.Items?.filter(
(e: BaseItemDto) => {
if (foundItem) {
return true;
}
if (e.Id == episode.Id) {
foundItem = true;
return true;
}
return false;
}
);
episodesResult.TotalRecordCount = episodesResult.Items?.length ?? 0;
return episodesResult;
}
return {
Items: items
};
}
/**
* Parse a date.. Just a wrapper around new Date,
* but could be useful to deal with weird date strings
* in the future.
* @param date - string date to parse
* @returns date object
*/
export function parseISO8601Date(date: string): Date {
return new Date(date);
}
/**
* Convert ticks to seconds
* @param ticks - number of ticks to convert
* @returns number of seconds
*/
export function ticksToSeconds(ticks: number): number {
return ticks / TicksPerSecond;
}
/**
* Send a message over the custom message transport
* @param message - to send
*/
export function broadcastToMessageBus(message: BusMessage): void {
window.castReceiverContext.sendCustomMessage(
'urn:x-cast:com.connectsdk',
window.senderId,
message
);
}
/**
* Inform the cast sender that we couldn't connect
*/
export function broadcastConnectionErrorMessage(): void {
broadcastToMessageBus({ message: '', type: 'connectionerror' });
}
================================================
FILE: src/index.html
================================================
Jellyfin

Ready to cast
================================================
FILE: src/types/appStatus.ts
================================================
export enum AppStatus {
Audio = 'audio',
Backdrop = 'backdrop',
Details = 'details',
Loading = 'loading',
PlayingWithControls = 'playing-with-controls',
Unset = '',
Waiting = 'waiting'
}
================================================
FILE: src/types/global.d.ts
================================================
import {
CastReceiverContext,
PlayerManager
} from 'chromecast-caf-receiver/cast.framework';
import { SystemVolumeData } from 'chromecast-caf-receiver/cast.framework.system';
import type {
BaseItemDto,
MediaSourceInfo,
PlayMethod,
RepeatMode
} from '@jellyfin/sdk/lib/generated-client';
import type {
TextTrackEdgeType,
Track
} from 'chromecast-caf-receiver/cast.framework.messages';
type BusMessageType =
| 'connectionerror'
| 'error'
| 'playbackerror'
| 'playbackprogress'
| 'playbackstart'
| 'playbackstop'
| 'playstatechange'
| 'repeatmodechange'
| 'volumechange';
// Messagebus message
export interface BusMessage {
type: BusMessageType;
message?: string;
data?: string;
}
//
// For the old queue stuff
//
export interface ItemIndex {
item: BaseItemDto;
index: number;
}
// From commandHandler
export interface PlayRequest {
startIndex?: number;
items: BaseItemDto[];
startPositionTicks?: number;
mediaSourceId?: string;
audioStreamIndex?: number;
subtitleStreamIndex?: number;
liveStreamId?: string;
}
export interface DisplayRequest {
ItemId: string;
}
export interface SetIndexRequest {
index: number;
}
export interface SetRepeatModeRequest {
RepeatMode: RepeatMode;
}
export interface SeekRequest {
position: number; // seconds
}
export interface DataMessage {
options:
| PlayRequest
| DisplayRequest
| SetIndexRequest
| SetRepeatModeRequest
| SeekRequest;
command: string;
}
type SupportedCommands = Record void>;
// /From commandHandler
interface SubtitleAppearance {
dropShadow: TextTrackEdgeType;
font: string;
textColor: string;
textBackground: string;
textSize: 'smaller' | 'small' | 'large' | 'larger' | 'extralarge';
}
interface StreamInfo {
tracks?: Track[];
audioStreamIndex: number | null;
canClientSeek: boolean;
canSeek: boolean;
contentType: string;
isStatic: boolean;
mediaSource?: MediaSourceInfo;
playerStartPositionTicks?: number;
startPositionTicks: number | null;
streamContainer?: string | null;
subtitleStreamIndex: number | null;
subtitleStreamUrl?: string;
url: string;
}
declare global {
export interface Window {
mediaElement: HTMLElement | null;
playerManager: PlayerManager;
castReceiverContext: CastReceiverContext;
repeatMode: RepeatMode;
reportEventType: 'repeatmodechange';
subtitleAppearance: SubtitleAppearance;
MaxBitrate: number | undefined;
senderId: string | undefined;
volume: SystemVolumeData;
}
}
declare module 'chromecast-caf-receiver/cast.framework.messages' {
// eslint-disable-next-line @typescript-eslint/no-empty-object-type
interface MediaInformationCustomData
extends JellyfinMediaInformationCustomData {}
}
interface JellyfinMediaInformationCustomData {
audioStreamIndex: number | null;
canClientSeek: boolean;
canSeek: boolean;
itemId: string | undefined;
liveStreamId: string | null;
mediaSourceId: string | null;
playMethod: PlayMethod;
playSessionId: string;
runtimeTicks: number | null;
startPositionTicks: number;
subtitleStreamIndex: number | null;
}
================================================
FILE: stylelint.config.js
================================================
module.exports = {
extends: ['stylelint-config-standard'],
rules: {
'at-rule-no-unknown': null,
'selector-class-pattern': null,
'selector-id-pattern': null
},
syntax: 'css'
};
================================================
FILE: tsconfig.json
================================================
{
"compilerOptions": {
"target": "ES2015",
"module": "ESNext",
"moduleResolution": "bundler",
"lib": ["dom", "ES2015"],
"skipLibCheck": true,
"resolveJsonModule": true,
"allowSyntheticDefaultImports": true,
"allowJs": true,
"sourceMap": true,
"outDir": "./dist/",
"strict": true,
"paths": {
"~/*": ["./src/*"]
},
"types": ["@types/chromecast-caf-receiver", "vite/client"]
}
}
================================================
FILE: vite.config.ts
================================================
/* eslint-disable sort-keys */
import { defineConfig } from 'vite';
export default defineConfig({
root: 'src',
base: './',
build: {
outDir: '../dist',
emptyOutDir: true,
target: 'es2015',
assetsInlineLimit: 0
},
server: {
port: 9000
}
});