[
  {
    "path": ".github/actions/install-slang/action.yaml",
    "content": "name: Install slang\ninputs:\n  version:\n    required: true\n  target:\n    required: true\n  token:\n    required: true\nruns:\n  using: \"composite\"\n  steps:\n    - name: install slang\n      shell: bash\n      run: |\n        mkdir $RUNNER_TEMP/slang\n        slang_url=$( gh api https://api.github.com/repos/shader-slang/slang/releases/tags/${{ inputs.version }} |\\\n          jq -r '.assets[].browser_download_url' | grep ${{ inputs.target }}.tar.gz | head -1 )\n        (cd $RUNNER_TEMP/slang && curl -o - -fsSL \"$slang_url\" | tar zxv)\n        echo \"SLANG_DIR=$RUNNER_TEMP/slang\" >> \"$GITHUB_ENV\"\n        echo \"LD_LIBRARY_PATH=$RUNNER_TEMP/slang/lib\" >> \"$GITHUB_ENV\"\n        echo \"DYLD_LIBRARY_PATH=$RUNNER_TEMP/slang/lib\" >> \"$GITHUB_ENV\"\n      env:\n        GH_TOKEN: ${{ inputs.token }}\n\n\n"
  },
  {
    "path": ".github/workflows/bump-version.yaml",
    "content": "on:\n  push:\n    branches: [main]\nname: Open a PR to bump the version\njobs:\n  open_pr:\n    strategy:\n      matrix:\n        component: [\"server\", \"client\"]\n    name: Open PR\n    runs-on: ubuntu-24.04\n    permissions:\n      pull-requests: write\n      contents: write\n    steps:\n      - uses: dtolnay/rust-toolchain@stable\n        with:\n          targets: x86_64-unknown-linux-gnu\n      - uses: actions/checkout@v4\n        with:\n          fetch-depth: 0\n          fetch-tags: true\n\n      - uses: swatinem/rust-cache@v2\n      - run: cargo install git-cliff@^2.6 cargo-edit@^0.12\n\n      - name: determine version\n        run: |\n          echo \"COMPONENT=${{ matrix.component }}\" | tee -a \"$GITHUB_ENV\"\n          echo \"CURRENT_VERSION=$( git tag | grep \"${{ matrix.component }}\" | tail -1 )\" | tee -a \"$GITHUB_ENV\"\n\n          mm_component=\"mm-${{ matrix.component }}\"\n          echo \"MM_COMPONENT=$mm_component\" | tee -a \"$GITHUB_ENV\"\n\n          version=$( git cliff -c .github/workflows/cliff.toml \\\n            --bumped-version \\\n            --include-path \"$MM_COMPONENT*/**/*\" \\\n            --tag-pattern \"${{ matrix.component }}\" )\n          echo \"BUMPED_VERSION=$version\" | tee -a \"$GITHUB_ENV\"\n          echo \"BUMPED_VERSION_SHORT=$( echo $version | sed -E 's/^[a-z]+-v(.*)/\\1/' )\" | tee -a \"$GITHUB_ENV\"\n\n      - name: replace version in files\n        if: ${{ env.BUMPED_VERSION != env.CURRENT_VERSION }}\n        run: |\n          git grep --cached -l '' | grep -v CHANGELOG |\\\n            xargs sed -i -E \"s/mm$COMPONENT-v[0-9]+\\.[0-9]+\\.[0-9]+/$BUMPED_VERSION/g\"\n\n      - name: replace version in Cargo.toml\n        if: ${{ env.BUMPED_VERSION != env.CURRENT_VERSION }}\n        run: (cd $MM_COMPONENT && cargo set-version --offline $BUMPED_VERSION_SHORT)\n\n      - name: cargo update\n        if: ${{ env.BUMPED_VERSION != env.CURRENT_VERSION }}\n        run: (cd $MM_COMPONENT && cargo update $MM_COMPONENT)\n\n      - name: update BUSL change date\n        if: ${{ env.BUMPED_VERSION != env.CURRENT_VERSION && matrix.component == 'server' }}\n        run: |\n          change_date=$(date -d \"4 years hence\" +%Y-%m-01) # Round down to the 1st of the month\n          sed -i -E \"/Change/s/[0-9]{4}-[0-9]{2}-[0-9]{2}/$change_date/\" LICENSES/BUSL-1.1.txt\n\n      - name: update CHANGELOG.md\n        if: ${{ env.BUMPED_VERSION != env.CURRENT_VERSION }}\n        run: |\n          git cliff -c .github/workflows/cliff.toml \\\n            --include-path \"$MM_COMPONENT*/**/*\" \\\n            --tag-pattern \"$COMPONENT\" \\\n            -t \"$BUMPED_VERSION\" -u \\\n            -p CHANGELOG.md\n\n      - name: generate PR body\n        if: ${{ env.BUMPED_VERSION != env.CURRENT_VERSION }}\n        run: |\n          git cliff -c .github/workflows/cliff.toml \\\n            --include-path \"$MM_COMPONENT*/**/*\" \\\n            --tag-pattern \"$COMPONENT\" \\\n            -t \"$BUMPED_VERSION\" -u > \"$RUNNER_TEMP/pr-body.txt\"\n\n      - name: open PR\n        if: ${{ env.BUMPED_VERSION != env.CURRENT_VERSION }}\n        id: cpr\n        uses: peter-evans/create-pull-request@6d6857d36972b65feb161a90e484f2984215f83e\n        with:\n          draft: true\n          branch: \"auto-bump-${{ matrix.component }}\"\n          title: \":robot: bump mm${{ matrix.component }} to ${{ env.BUMPED_VERSION }}\"\n          commit-message: \"chore: release ${{ env.BUMPED_VERSION }}\"\n          body-path: \"${{ runner.temp }}/pr-body.txt\"\n"
  },
  {
    "path": ".github/workflows/cliff.toml",
    "content": "[changelog]\nrender_always = true\nbody = \"\"\"\n{% if version %}\\\n    ## [{{ version | trim_start_matches(pat=\"v\") }}] - {{ timestamp | date(format=\"%Y-%m-%d\") }}\n{% else %}\\\n    ## [unreleased]\n{% endif %}\\\n{% for group, commits in commits | group_by(attribute=\"group\") %}\n    ### {{ group | striptags | trim | upper_first }}\n    {% for commit in commits %}\n\t- {{ commit.message | upper_first }} \\\n\t({{ commit.id }})\\\n    {% endfor %}\n{% endfor %}\\n\n\"\"\"\n\n[git]\ncommit_parsers = [\n  { message = \"^feat\", group = \"<!-- 0 -->New Features\" },\n  { message = \"^fix\", group = \"<!-- 1 -->Bugfixes\" },\n  { message = \"^doc\", skip = true },\n  { message = \"^perf\", skip = true },\n  { message = \"^refactor\", skip = true },\n  { message = \"^style\", skip = true },\n  { message = \"^test\", skip = true },\n  { message = \"^chore|^ci\", skip = true },\n  { message = \"build\", skip = true },\n  { body = \".*security\", skip = true },\n  { message = \"^revert\", skip = true },\n]\n\n[bump]\nfeatures_always_bump_minor = false\nbreaking_always_bump_major = false\n"
  },
  {
    "path": ".github/workflows/docs.yaml",
    "content": "on:\n  push:\n    branches: [main, docs]\n\nname: Build documentation site\njobs:\n  build:\n    name: Build\n    runs-on: ubuntu-24.04\n    steps:\n      - uses: dtolnay/rust-toolchain@stable\n      - uses: actions/checkout@v4\n        with:\n          submodules: true\n\n      - uses: swatinem/rust-cache@v2\n        with:\n          workspaces: |\n            mm-protocol\n            mm-client-common\n\n      - name: install protoc\n        run: |\n          sudo apt update\n          sudo apt install protobuf-compiler\n\n      - name: install zola\n        uses: taiki-e/install-action@v2\n        with:\n          tool: zola@0.19.2\n\n      - name: generate config reference\n        run: |\n          mkdir -p docs/content/reference\n          cargo run --manifest-path mm-docgen/Cargo.toml --bin config-docgen \\\n            mmserver.default.toml > docs/content/reference/config.md\n\n      - name: generate protocol reference\n        run: |\n          cargo run --manifest-path mm-docgen/Cargo.toml --bin protocol-docgen \\\n            mm-protocol/src/messages.proto > docs/content/reference/protocol.md\n\n      - name: zola build\n        run: zola -r docs build -o docs/build\n\n      - name: generate rustdoc for mm-protocol\n        run: |\n          cargo doc --manifest-path mm-protocol/Cargo.toml \\\n            --no-deps --target-dir docs/build\n\n      - name: generate rustdoc for mm-client-common\n        run: |\n          cargo doc --manifest-path mm-client-common/Cargo.toml \\\n            --no-deps --target-dir docs/build\n\n      - name: Upload static files\n        id: deployment\n        uses: actions/upload-pages-artifact@v3\n        with:\n          path: docs/build\n  deploy:\n    name: Deploy\n    runs-on: ubuntu-latest\n    needs: build\n    permissions:\n      pages: write\n      id-token: write\n    environment:\n      name: github-pages\n      url: ${{ steps.deployment.outputs.page_url }}\n    steps:\n      - name: Deploy to GitHub Pages\n        id: deployment\n        uses: actions/deploy-pages@v4\n\n\n\n\n"
  },
  {
    "path": ".github/workflows/release-mmclient.yaml",
    "content": "on:\n  push:\n    tags:\n      - 'mmclient-v*.*.*'\n\nname: Release mmclient\njobs:\n  create_tarball_linux:\n    name: Build mmclient (linux)\n    runs-on: ubuntu-24.04\n    steps:\n      - uses: dtolnay/rust-toolchain@stable\n        with:\n          targets: x86_64-unknown-linux-gnu\n\n      - name: install deps\n        run: |\n          sudo apt update\n          sudo apt install \\\n            nasm cmake protobuf-compiler libxkbcommon-dev libwayland-dev libasound2-dev \\\n            ffmpeg libavutil-dev libavformat-dev libavdevice-dev libavfilter-dev \\\n            libfontconfig-dev libfreetype-dev libudev-dev\n\n      - uses: actions/checkout@v4\n      - uses: ./.github/actions/install-slang\n        with:\n          token: ${{ secrets.GITHUB_TOKEN }}\n          target: linux-x86_64\n          version: v2025.5\n\n      - uses: swatinem/rust-cache@v2\n        with:\n          workspaces: |\n            mm-client\n            mm-protocol\n\n      - name: cargo build\n        run: (cd mm-client && cargo build --bin mmclient --release --target x86_64-unknown-linux-gnu)\n      - name: create release tarball\n        run: |-\n          mkdir \"${RUNNER_TEMP}/${GITHUB_REF_NAME}\"\n          cp -r mm-client/target/x86_64-unknown-linux-gnu/release/mmclient \\\n            README.md CHANGELOG.md \\\n            \"${RUNNER_TEMP}/${GITHUB_REF_NAME}\"\n          cp LICENSES/MIT.txt \"${RUNNER_TEMP}/${GITHUB_REF_NAME}/LICENSE.txt\"\n          tar -C \"${RUNNER_TEMP}\" --numeric-owner -cvzf \"${GITHUB_REF_NAME}-linux-amd64.tar.gz\" \"$GITHUB_REF_NAME\"\n      - name: upload tarball\n        uses: actions/upload-artifact@v4\n        with:\n          name: mmclient-linux\n          path: mmclient-*.tar.gz\n\n  create_tarball_macos:\n    name: Build mmclient (macos arm)\n    runs-on: macos-latest\n    steps:\n      - uses: dtolnay/rust-toolchain@stable\n        with:\n          targets: aarch64-apple-darwin\n\n      - name: install deps\n        run: |\n          brew install ffmpeg@6 protobuf\n          brew link ffmpeg@6\n\n      - uses: actions/checkout@v4\n      - uses: ./.github/actions/install-slang\n        with:\n          token: ${{ secrets.GITHUB_TOKEN }}\n          target: macos-aarch64\n          version: v2024.15.2\n\n      - uses: swatinem/rust-cache@v2\n        with:\n          workspaces: |\n            mm-client\n            mm-protocol\n\n      - name: cargo build\n        run: (cd mm-client && cargo build --bin mmclient --release --features moltenvk_static --target aarch64-apple-darwin)\n      - name: create release tarball\n        run: |-\n          mkdir \"${RUNNER_TEMP}/${GITHUB_REF_NAME}\"\n          cp -r mm-client/target/aarch64-apple-darwin/release/mmclient \\\n            README.md CHANGELOG.md \\\n            \"${RUNNER_TEMP}/${GITHUB_REF_NAME}\"\n          cp LICENSES/MIT.txt \"${RUNNER_TEMP}/${GITHUB_REF_NAME}/LICENSE.txt\"\n          gtar -C \"${RUNNER_TEMP}\" --numeric-owner -cvzf \"${GITHUB_REF_NAME}-darwin-arm64.tar.gz\" \"$GITHUB_REF_NAME\"\n      - name: upload tarball\n        uses: actions/upload-artifact@v4\n        with:\n          name: mmclient-mac\n          path: mmclient-*.tar.gz\n\n  create_tarball_macos_intel:\n    name: Build mmclient (macos intel)\n    runs-on: macos-13\n    steps:\n      - uses: dtolnay/rust-toolchain@stable\n        with:\n          targets: x86_64-apple-darwin\n\n      - name: install deps\n        run: |\n          brew install ffmpeg@6 protobuf\n          brew link ffmpeg@6\n\n      - uses: actions/checkout@v4\n      - uses: ./.github/actions/install-slang\n        with:\n          token: ${{ secrets.GITHUB_TOKEN }}\n          target: macos-x86_64\n          version: v2024.15.2\n\n      - uses: swatinem/rust-cache@v2\n        with:\n          workspaces: |\n            mm-client\n            mm-protocol\n\n      - name: cargo build\n        run: (cd mm-client && cargo build --bin mmclient --release --features moltenvk_static --target x86_64-apple-darwin)\n      - name: create release tarball\n        run: |-\n          mkdir \"${RUNNER_TEMP}/${GITHUB_REF_NAME}\"\n          cp -r mm-client/target/x86_64-apple-darwin/release/mmclient \\\n            README.md CHANGELOG.md \\\n            \"${RUNNER_TEMP}/${GITHUB_REF_NAME}\"\n          cp LICENSES/MIT.txt \"${RUNNER_TEMP}/${GITHUB_REF_NAME}/LICENSE.txt\"\n          gtar -C \"${RUNNER_TEMP}\" --numeric-owner -cvzf \"${GITHUB_REF_NAME}-darwin-amd64.tar.gz\" \"$GITHUB_REF_NAME\"\n      - name: upload tarball\n        uses: actions/upload-artifact@v4\n        with:\n          name: mmclient-mac-intel\n          path: mmclient-*.tar.gz\n\n  create_release:\n    name: Create release\n    needs: [create_tarball_linux, create_tarball_macos, create_tarball_macos_intel]\n    runs-on: ubuntu-24.04\n    permissions:\n      contents: write\n    steps:\n      - uses: actions/checkout@v4\n        with:\n          fetch-depth: 0\n          fetch-tags: true\n      - uses: dtolnay/rust-toolchain@stable\n      - name: install git-cliff\n        run: cargo install git-cliff\n      - name: generate release notes\n        run: |-\n          echo \"# Client version ${GITHUB_REF_NAME/mmclient-v/}\" >> release-notes.txt\n          git cliff -c .github/workflows/cliff.toml \\\n            --include-path \"mm-client/**/*\" \\\n            --include-path \"mm-client-common/**/*\" \\\n            --tag-pattern \"client\" \\\n            --latest | tail -n +2 | tee -a release-notes.txt\n\n      - name: download artifacts\n        uses: actions/download-artifact@v4\n        with:\n          merge-multiple: true\n      - name: create release\n        uses: softprops/action-gh-release@v2\n        with:\n          body_path: release-notes.txt\n          files: \"mmclient-*.tar.gz\"\n\n\n"
  },
  {
    "path": ".github/workflows/release-mmserver.yaml",
    "content": "on:\n  push:\n    tags:\n      - 'mmserver-v*.*.*'\n\nname: Release mmserver\njobs:\n  create_release:\n    name: Create mmserver release\n    runs-on: ubuntu-24.04\n    permissions:\n      contents: write\n    steps:\n      - uses: dtolnay/rust-toolchain@stable\n        with:\n          targets: x86_64-unknown-linux-gnu\n\n      - name: install deps\n        run: |\n          sudo apt update\n          sudo apt install nasm cmake protobuf-compiler libxkbcommon-dev\n\n      - uses: actions/checkout@v4\n        with:\n          fetch-depth: 0\n          fetch-tags: true\n\n      - uses: ./.github/actions/install-slang\n        with:\n          token: ${{ secrets.GITHUB_TOKEN }}\n          target: linux-x86_64\n          version: v2025.5\n\n      - uses: swatinem/rust-cache@v2\n        with:\n          workspaces: |\n            mm-server\n            mm-client\n            mm-protocol\n\n      - name: cargo build\n        run: (cd mm-server && cargo build --bin mmserver --release --target x86_64-unknown-linux-gnu)\n      - name: create release tarball\n        run: |-\n          mkdir \"${RUNNER_TEMP}/${GITHUB_REF_NAME}\"\n          cp -r mm-server/target/x86_64-unknown-linux-gnu/release/mmserver \\\n            README.md CHANGELOG.md mmserver.default.toml \\\n            \"${RUNNER_TEMP}/${GITHUB_REF_NAME}\"\n          cp LICENSES/BUSL-1.1.txt \"${RUNNER_TEMP}/${GITHUB_REF_NAME}/LICENSE.txt\"\n          tar -C \"${RUNNER_TEMP}\" --numeric-owner -cvzf \"${GITHUB_REF_NAME}-linux-amd64.tar.gz\" \"$GITHUB_REF_NAME\"\n\n      - name: install git-cliff\n        run: cargo install git-cliff\n      - name: generate release notes\n        run: |-\n          echo \"# Server version ${GITHUB_REF_NAME/mmserver-v/}\" >> release-notes.txt\n          git cliff -c .github/workflows/cliff.toml \\\n            --include-path \"mm-server/**/*\" \\\n            --tag-pattern \"server\" \\\n            --latest | tail -n +2 | tee -a release-notes.txt\n\n      - name: create release\n        uses: softprops/action-gh-release@v2\n        with:\n          body_path: release-notes.txt\n          files: \"mmserver-*.tar.gz\"\n\n\n\n"
  },
  {
    "path": ".github/workflows/tests.yaml",
    "content": "on:\n  push:\n    branches: [main, test-ci]\n  pull_request:\n    branches: [main]\nname: Tests\njobs:\n  tests:\n    name: Tests\n    runs-on: ubuntu-24.04\n    steps:\n      - uses: dtolnay/rust-toolchain@stable\n      - name: install deps\n        run: |\n          sudo apt update\n          sudo apt install \\\n            nasm cmake protobuf-compiler libxkbcommon-dev libwayland-dev libasound2-dev \\\n            ffmpeg libavutil-dev libavformat-dev libavdevice-dev libavfilter-dev \\\n            libfontconfig-dev libfreetype-dev libudev-dev\n\n      - uses: actions/checkout@v4\n      - uses: ./.github/actions/install-slang\n        with:\n          token: ${{ secrets.GITHUB_TOKEN }}\n          target: linux-x86_64\n          version: v2025.5\n\n      - uses: swatinem/rust-cache@v2\n        with:\n          workspaces: |\n            mm-server\n            mm-client\n            mm-protocol\n\n      - name: install deny\n        run: cargo install cargo-deny\n\n      - name: server deny\n        run: (cd mm-server && cargo deny check)\n      - name: server tests\n        run: |\n          export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER='sudo -E'\n          (cd mm-server && cargo test -- --test-threads=1)\n      - name: protocol tests\n        run: (cd mm-protocol && cargo test)\n      - name: client tests\n        run: (cd mm-client && cargo test)\n      - name: server cargo clippy\n        run: (cd mm-server && cargo clippy)\n      - name: protocol cargo clippy\n        run: (cd mm-protocol && cargo clippy)\n      - name: client cargo clippy\n        run: (cd mm-client && cargo clippy)\n"
  },
  {
    "path": ".gitignore",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\ntarget\n.vscode\n.reuse\n*.log\nmm-protocol/Cargo.lock\nmm-client-common/Cargo.lock\n"
  },
  {
    "path": ".gitmodules",
    "content": "[submodule \"docs/themes/anemone\"]\n\tpath = docs/themes/anemone\n\turl = https://github.com/Speyll/anemone\n"
  },
  {
    "path": ".rustfmt.toml",
    "content": "use_field_init_shorthand = true\nuse_try_shorthand = true\n\nunstable_features = true\nformat_code_in_doc_comments = true\nformat_macro_bodies = true\nformat_macro_matchers = true\nformat_strings = true\ngroup_imports = \"StdExternalCrate\"\nnormalize_doc_attributes = true\nwrap_comments = true\n"
  },
  {
    "path": "BUILD.md",
    "content": "## Building `mmserver`\n\nThe following are required to build the server and its dependencies:\n\n```\nrust (MSRV 1.77.2)\nnasm\ncmake\nprotoc\nlibxkbcommon\n```\n\nBesides rust, the following command will install everything on ubuntu:\n\n```\napt install nasm cmake protobuf-compiler libxkbcommon-dev\n```\n\nThen you should be good to go:\n\n```\ncd mm-server\ncargo build --bin mmserver [--release]\n```\n\n### Feature flags\n\nThe following feature flags are available:\n\n - `vulkan_encode` (on by default) - enables hardware encode\n - `svt_encode` (on by default) - enables svt-av1 and svt-hevc for CPU encode\n - `ffmpeg_encode` - allows using system-installed ffmpeg to do CPU encode\n\nNote that `ffmpeg_encode` takes precedence over `svt_encode` if enabled, but the server will always choose hardware encode if available on your platform.\n\n## Building `mmclient`\n\nThe following are required to build the client and its dependencies:\n\n```\nrust (MSRV 1.77.2)\nnasm\ncmake\nprotoc\nlibxkbcommon (only linux)\nlibwayland-client (only linux)\nalsa (only linux)\nffmpeg 6.x\n```\n\nBesides rust, the following command will install everything on ubuntu:\n\n```\napt install \\\n    nasm cmake protobuf-compiler libxkbcommon-dev libwayland-dev libasound2-dev \\\n    ffmpeg libavutil-dev libavformat-dev libavdevice-dev libavfilter-dev\n```\n\nOr using homebrew on macOS:\n\n```\nbrew install nasm cmake ffmpeg@6 protobuf\n```"
  },
  {
    "path": "CHANGELOG.md",
    "content": "## [mmserver-v0.8.4] - 2025-05-21\n\n### Bugfixes\n\n- Make missing hardware encode support a hard error (b16dccb01902b854a2c345406f4df416d3024811)\n\n## [mmserver-v0.8.3] - 2025-03-12\n\n### Bugfixes\n\n- Try to avoid colliding with the system x11 socket (3af95ba9ab012e723d21415baf0b6f4679ba1534)\n- Follow symlinks when calling move_mount (a7505aed296cab4648e2f3752d5901e5d95ded45)\n\n## [mmserver-v0.8.2] - 2025-02-20\n\n### Bugfixes\n\n- Drop application frames if the application is too slow (d73d78dfc37a17fd011c1d3ef1dbfe12c85ed856)\n\n## [mmclient-v0.7.0] - 2025-02-12\n\n### New Features\n\n- Deprecate KeepAlive in favor of connection keepalives (ad3cdca8faf089b85902977e3e48b4a35d5f89e3)\n- Send hierarchical_layer as video frame metadata (ddbe84346fa03f55ebed7289b005b2e36ec23d36)\n- Expose hierarchical_layer (b4bd4c66b62439c71c6a3ed52c26046b3a2f0b6f)\n- Allow clients to configure their connection timeout (452ef70eb118280df9170bed382b8539813e7802)\n\n### Bugfixes\n\n- Remove a useless warning (c98bbe5382914dec22c92f4f160b2f276fb811ef)\n\n## [mmserver-v0.8.0] - 2025-02-12\n\n### New Features\n\n- Make the session timeout configurable (39fa20cadfe7a780088c86f78ef2eae87e0c1222)\n- Send hierarchical_layer as video frame metadata (ddbe84346fa03f55ebed7289b005b2e36ec23d36)\n\n### Bugfixes\n\n- Increment stream_seq when a refresh packet is sent (0fe282ae0fb71b476929ddf61bf71e4041ac0323)\n- Send headers with H265 keyframes (dc7084412c3c4eec661a9887f8c0d031f8dc8a19)\n- Add a warning if users are about to hit #29 (d5591bb4d59a635ffb9796b9c1f1cba9eba22b36)\n\n## [mmserver-v0.7.2] - 2025-02-05\n\n### New Features\n\n- Deprecate KeepAlive in favor of connection keepalives (ad3cdca8faf089b85902977e3e48b4a35d5f89e3)\n\n### Bugfixes\n\n- Remove noexec from /tmp in the container (0c534f6677e07cda77e0384854dded47dd8a949b)\n- Support resampling app audio output (897a053abc568255040a66c356703e3e6c3c9070)\n- Support downmixing by throwing away extra channels (17d81d866efc94ed2c2839589541362be3d5aae1)\n- Allow subtitles in application names (2fcac04765ce4af02923314667289ed88094f824)\n- Use aligned width and height for DPB images (d6f3bf713373bbadde0590f44659f8146e44c28d)\n- Relax the app startup timeout (a840c2b27b7adda073820a62d72fd64dc90e752b)\n- Use QUIC PING frames to keep clients alive (95ddb3d6bdc7e761ff596e249cc7be83b3d14cfb)\n- Don't flood the client with pings (b3b3194c042b8d56fda1f8b08f230042bf4461f7)\n- Turn down the heat on udevfs logging (53f448e45110ef722a60f927edd7c5fe58455a19)\n\n## [mmserver-v0.7.1] - 2025-01-30\n\n### New Features\n\n- Let the encoder swap frames (b519680e3e8c552874f53cd88e98859e90698ac8)\n\n### Bugfixes\n\n- Update quiche (c3d1e0080c1040151ecdc08e85584ff267f6eed6)\n- Remove an extra copy on the dgram path (0d204fa2549f4fd9abc804f4996f8fe11162e67b)\n- Fix clippy warning (7b041bbff7c908e57928d621ad74751bb7b76355)\n- Respect layer limits in VkVideoEncodeCapabilities (ce515b4d85b8af4da83a3fed0281f907e28253b1)\n- Print child logs correctly (e8cd88fb344ec74398eae09e757589700de3bff3)\n- Print an error when dmabuf importing fails (112d48d706fc19ce8882e2767c33672e7a044527)\n- Change target for vulkan error logs (641f51675a572402710fe3ee2ff0721857228ab9)\n- Add context (7572ac6a5131486f6d4cf6951742eea9c0f24d25)\n- Use the default congestion control algorithm (7245e624f785b2b01e0b5da507380a88121de542)\n- Get explicit sync working on NVIDIA (8d5786445e56629c67338258b1bc8cc7debb410e)\n- Remove unused import (df03d5d38236b325e826b998cef26ea2d9008e75)\n- Fix explicit sync on nvidia proprietary (8f806b233f537395d72de6e06d3861e73963bec2)\n- Check for the right nvidia version (08b6462a320ee76eabaf4387c354a1a6634ec8df)\n- Fix explicit sync on nvidia proprietary (3c70a79cda9cc545acf665ccacc495ed30f3440a)\n\n## [mmclient-v0.6.0] - 2025-01-26\n\n### New Features\n\n- Support ffmpeg vulkan decode (5c76b29273d3c0b29edb9e34e33096af76814398)\n- Explicit video refresh (60dffc04f4f338c3fce6d791211c12d7471a187a)\n- Implement forward error correction (729e652a001d155345c80b7f5fef397a884a1a98)\n\n### Bugfixes\n\n- Enforce non-fractional scales from the client (2a25ca95db01ff8460328f8f258faadf55d948bb)\n- Take application names with path included (100d51e8f44129a23b1df944a897a3123ef12d1c)\n\n## [mmserver-v0.7.0] - 2025-01-26\n\n### New Features\n\n- Enable hierarchical coding (90d636ffba8379da420e09c6f228fb65c334a7f2)\n- Explicit video refresh (60dffc04f4f338c3fce6d791211c12d7471a187a)\n- Implement forward error correction (729e652a001d155345c80b7f5fef397a884a1a98)\n- Remove support for CPU encoders (a5d069cb7bde15931748e41ae3d9e12a6f917445)\n- Log basic attachment stats (b42cb40cb3d5fbedd2a17d37e09da8984029998c)\n\n### Bugfixes\n\n- Pass correct flags to move_mount (af519eebc5a8f251624b3d063a7241910cddf2cc)\n- Pass correct flags to move_mount (take 2) (2e6053675a229dba4fc012b5de4afb723e9a0aca)\n- Enforce non-fractional scales from the client (2a25ca95db01ff8460328f8f258faadf55d948bb)\n- Disable explicit sync if the syncobj_surface is destroyed (e6017dec6bb9daadbbe50898f7cd9cf7c14b19aa)\n- Reduce the verbosity of some logging (b0abe2a76466e98ba8d4f844e88fd4ad4ce6c7ee)\n- Print frame duration from encoder (4a9af4f712ce723306364bece359c9bf18515554)\n- Add overall encode timing to tracy (334d5b37fe394e652bd27224c1c8e905e9c8a794)\n\n## [mmserver-v0.6.1] - 2024-12-17\n\n### New Features\n\n- Save vulkaninfo for --bug-report (6deae3feb5a72a7e0099edd4983814d7fc873f15)\n\n### Bugfixes\n\n- Avoid an endless loop when printing child output (7d700dfa4b9ef6d02e58c4a32151e69055fa3929)\n\n## [mmserver-v0.6.0] - 2024-12-11\n\n### New Features\n\n- Support wp_linux_drm_syncobj_timeline (54f311653d800cf5a7aefe1b54edd27010f219ce)\n- Officially support nvidia proprietary (204126cdfcce09f4971de2e1bb9c86a4adf04d97)\n\n### Bugfixes\n\n- Bind-mount nvidia devices, if present (4bb63d3c1e85f297c5d169219943694f133bbcfa)\n\n## [mmclient-v0.5.0] - 2024-12-11\n\n### New Features\n\n- Add 'app_path' for organizing apps (b417559625c97e182dc074a5732ea35617332f36)\n- Add header images to the application list (756bfa866020da57be18d383367e0a2b189051aa)\n\n### Bugfixes\n\n- Use Error::ServerError to communicate server errors (a857e0f186b9514cd3e1dc9b0f60df04b4abe3fe)\n- Correctly represent cursor images as optional (b08c76c9c65441fa92156f5282e9b02e98fa3ed9)\n- Be more resilient sending messages on closed streams (8e3eea65ccff2b6448dd9993b9afef9996c6650d)\n- Ensure attachment_ended is called on delegate (fd4d1c41e7da5ec949e26c91cc6171db1a41b1ce)\n- Always send pointer_left events (06010c9cf336d637526dcc308d1ee842e3a21cc1)\n- Handle ipv6 addresses correctly (9d442d2c8ad4c8cbfef96cb378289e1699d17e02)\n- Log connection errors (0ecc6ef05a5470991f1df8d0feaf18ace99b8de8)\n- Remove zone identifiers from hostnames (f9cee190718dc71aad8e9a0372b581a611551289)\n\n## [mmserver-v0.5.6] - 2024-12-08\n\n### New Features\n\n- Warn if the client is using shm buffers (461e8913d9645c240d30a2ce1d269f8ba8aa0e39)\n- Support wp_fractional_scale (2a267e102add6fb72504652375d9ea48ec2c6484)\n\n### Bugfixes\n\n- Handle invalid executables more comprehensively (f51174eb1509cecc73c10ab57cf991ee12a5cce7)\n- Throw an error if the app exe would be shadowed (cc6ee7e3df086bba443bb41471d671a2bd1b191b)\n- Reset staging query state each frame (982afb811ec062ddb6cc498a9cb92e6a4b5472ef)\n- Handle stride of shm buffers correctly (e8e1ee5eeba71d767c543ae83c4fa09b381beba1)\n- Log when container setup fails (83ea7b46fb95e1f1811cf516c55343622f9d9d35)\n- Put the XWayland socket in a canonical location (76056acbdc084307c6d71a66d2c7a343adea9b77)\n- Never discard surface content (f28e947201bc53be91ed13a53ad0221c27f931fb)\n- Handle xdg_popups more gracefully (03e392506a52349a4fdb075f4a4e53008a237958)\n- Translate surface coordinates correctly (9107636d2cb835409df3f604c47eed2d7397e819)\n- Shadow /run/user but not /run (c810f24305a169d896cbe92b57d53fd732bdef09)\n\n## [mmserver-v0.5.5] - 2024-12-05\n\n### New Features\n\n- MDNS service discovery (152d82ca7595063aa77db7470e1dfdace9ae7ac2)\n- Add 'app_path' for organizing apps (b417559625c97e182dc074a5732ea35617332f36)\n- Make the mDNS instance name configurable (17e632ccbee15132e2420a5fc162c94171d4a34a)\n- Add header images to the application list (756bfa866020da57be18d383367e0a2b189051aa)\n\n### Bugfixes\n\n- Align h265 bitstreams correctly (fc0543889b70eb0a151084d6a117e464cbeaaca0)\n- Improve error message when using self-signed certs (211dbcded77dc6fd0d97f19a415ca4b286327fb9)\n- Handle differing width/height in encode granularity (6b4b2dac3473d3631da6daa31fd09dc1bd3e2059)\n- Update the maximum message size to reflect the protocol docs (c517624d3683b7ad1e37fc7ea6a18d86c09ccb75)\n- Remove unecessary casts (d28b0b4335eb3e220b004421395e1f7d1d874939)\n- Warn when no hardware encoder is available (bef772948bbb7ff04788016fe74a84eefa7dee8c)\n- Bail early on mesa 24.2 (17758e3269ba661541ee2e94616606f2d935c626)\n\n## [mmserver-v0.5.4] - 2024-11-18\n\n### Bugfixes\n\n- Handle missing /sys/devices/virtual/input (8f316fe41c41101ae18156a41abe2e9ba1e3497f)\n- Lock pointer based on pointer focus (4ce202d3bd9cb764c0586cdc83e890843c3c04d7)\n- Correctly handle an edge case with pointer locks (7c3428932651a372c69b25d1f77dc973746273a9)\n\n## [mmserver-v0.5.3] - 2024-10-24\n\n### Bugfixes\n\n- Be consistent in xwayland socket naming (f6f6db3ab8b61e7af7684f14202d2b203b7e7760)\n- Never use a 0 audio stream_seq (632bcb1f7c79d35701f31a29d2dbe659ab411e3c)\n- Use the attachment coordinate space (57a59f478a6e4e248490b04b8c1ab42d2b1ae115)\n- Don't close streams while partial writes are pending (0add85078734a27e121dda97293f0e48d8ebd214)\n\n## [mmclient-v0.4.1] - 2024-10-24\n\n### Bugfixes\n\n- Handle video/audio stream seq more intelligently (4bab3902d1e7d88c7222ed6ef404190c512b1940)\n- Make the overlay work again (0b1579bf68b2cd31611ca10a735061ef58e64604)\n- Use the attachment coordinate space (57a59f478a6e4e248490b04b8c1ab42d2b1ae115)\n- Don't close streams while partial writes are pending (0add85078734a27e121dda97293f0e48d8ebd214)\n- Send relative pointer motion again (7fced702ebe37de5b2f96e46091c6b862806f757)\n\n## [mmserver-v0.5.2] - 2024-10-19\n\n### Bugfixes\n\n- Use getgid if we want the group ID (6a9c71d25d58ff6b5bc4564b99230d76a6599f0e)\n- Use _exit instead of exit or abort (c33a7b8989121706e0286af5efcdd8b5cf1291f1)\n- Pass locale environment variables through to child apps (8022fd1bdb8e64918e15f38b2b4197361841f9d5)\n\n## [mmserver-v0.5.1] - 2024-10-18\n\n### Bugfixes\n\n- Correctly emulate input nodes in udevfs (3fec928dcb5d7d5054d6ca7821864bae74559b9b)\n- Increase the ready timeout (df5ba10642c5ec18064a67f8279d40d3b12baa76)\n- Stub wl_data_device_manager (af1853aaf34c373617b78ddbfbde2d37a977d3df)\n- Don't discard buffers when resending frame callbacks (3b9ce4164bb617ce7e0fd0840bad74fd281fda99)\n- Organize bug report files slightly better (1806d3eea0e33c124f58d413fc3843e288cc0b0e)\n\n## [mmclient-v0.4.0] - 2024-10-18\n\n### New Features\n\n- Plumb controller input through to the server (990f48cdac4181e69ac3cb5dd1473fe16fca3390)\n- Allow specifying 'permanent' gamepads for a session (1d5b7f0a38017e0589c928a9acb6a10075bfac52)\n- Refactor out most of mmclient into a UniFFI rust lib (e8097e594b72a336ace6ef5fe7247304a18dd364)\n- List applications the server can launch (5d042be0f51095e06bbf68cdc3d3e40523c3e5ad)\n- Add a logging interface (b961041ce28b7da961f193b17cd03f4e36c14ea7)\n\n### Bugfixes\n\n- Remove unecessary clone (87c95e63f6c6ce2f63207f96da839408f4617785)\n- Rename Gamepad* enums to reduce the possibility of collision (5fd2241beff203c5c09089456e9326102213c2c2)\n- Prevent a reattaching doom loop (dfa5d75e8daefa3dc15468145f55a5d06e7cd6e1)\n- Correctly invert joystick direction (a60eb398b5f1dd13e1ac660f856a03857decad5b)\n- Round off window height (d4227e772a7d6c8d30919b1e08876ee4a2e55802)\n- Handle gamepad connected events correctly (aed00821a8ce3add26ef3ff2226b26e0752c1971)\n- Increase the ready timeout (df5ba10642c5ec18064a67f8279d40d3b12baa76)\n\n## [mmserver-v0.5.0] - 2024-10-15\n\n### New Features\n\n- Plumb controller input through to the server (990f48cdac4181e69ac3cb5dd1473fe16fca3390)\n- Allow specifying 'permanent' gamepads for a session (1d5b7f0a38017e0589c928a9acb6a10075bfac52)\n- Add support for native linux containerization (a37b0db8c5006e4c7b02cc98e506cd68a6ac2aa1)\n- Basic gamepad support (f0eceab777fd38cb085e0f5120fe54ab2a71d362)\n- List applications the server can launch (5d042be0f51095e06bbf68cdc3d3e40523c3e5ad)\n\n### Bugfixes\n\n- Remove a bunch of dead code (b5e88bbe9e472866d9ddd5316a7a8187d7676778)\n- Add description field to application configs (d786828a87ce2c5ed18f373e3be06a1808ad5c42)\n- Include more context when reading config files (d39aaf46c09d2c6d4525dfb3b452374cd1476b9d)\n- Require app names to start with a letter (4182a506ea3a15809c42010ef88da1aeac12278d)\n- Handle unknown message types more gracefully (2978f9b2d41e4916f7a18905586466bb66e92c35)\n- Add application name to session spans (eccca93fd50530d7d658e8a69bb22ef1b689b5a4)\n- Sleep the compositor if no client is attached (e03d8f2914867cc733fa4b44f78f00f7f89ea361)\n- Make reattaching slightly more robust (10cfede5b4ef625f9961b3582ac7dab33cba6dd7)\n- If using layers > 0, pass that many rate control layers (3a201510794deaebf262a81e8b02e8a3d9359cfd)\n- Get hierarchical coding working on H265 (7b63cc694b28eb7fd1e9155a182e5446b80ef998)\n- Add some preflight checks at startup (91e00002073a1c07af73fb5a7f1e27a5779d66b3)\n- Improve shutdown behavior (5e77d7719313c2c6d53fa3335aec06840a9fe92a)\n- Use putenv instead of Command::env (0a832c0f606a9d130eeca0bcb334dc6c5d65e169)\n- Remove unshare as a dependency (e5c4575e3cacc9d00656cda7af114a0eb471777c)\n\n## [mmserver-v0.4.1] - 2024-08-16\n\n### Bugfixes\n\n- Time out if the compositor doesn't accept an attachment in a reasonable timeframe (c1d6c6ca82fe3ff5ffcbf204c7f90e149b82f0ae)\n- Explicitly close QUIC streams when a worker finishes (a4b0c18e4af7455dcde689b241e4fe2737e50f57)\n- Never use 0 as a stream_seq (8fc95e4ef0d4a01d9c1809860a633c7417913115)\n- Raise the attachment timeout to account for slow clients (6b60df3e7625da72157b5a6ae8479e9e05469c71)\n- Set a default for video_profile (b4f2e01548ad0d374b4fc816f6a2a5c7c11f1751)\n- Correctly send vertical scroll events (6a25863b00f049d354dda5f598a3f507db653285)\n- Change order of press/release when simulating repeat (6df3f5cea5f8e6b2e2634f1307b2c4ee054ed638)\n\n## [mmserver-v0.4.0] - 2024-08-02\n\n### New Features\n\n- Rewrite compositor from scratch (945a7793abbbc377f8c9ad1a852715203a16b097)\n- Allow attachments to be configured for HDR10 output (0c4b85af422378881f550f61882439b1a4abade1)\n- Support streaming in HDR (713dbbdce931e0ba98cc51bf144a2fe26dd9e2a1)\n\n### Bugfixes\n\n- Improve compositor error messages with s/client/app (e5b24afe2ccd8ce77f74a5732a2e02f723256cda)\n\n## [mmclient-v0.3.0] - 2024-08-02\n\n### New Features\n\n- Allow attachments to be configured for HDR10 output (0c4b85af422378881f550f61882439b1a4abade1)\n- Support playing HDR streams (12ef76930f729af0331bb83c3ceadb110bf22a6f)\n\n### Bugfixes\n\n- Make --detach the default (7ca5ee3ea03bcc19f754c1542675be360e3216af)\n- Take name or id for --kill (7a1f8c1483bd43c292e5ec8189535b0e59fc453c)\n- Move the cursor before locking it (2a5cc571f868c7ade0c9798b41e96ee21209de4d)\n- Calculate RTT correctly (4762c1ab0594897949e4ce81a7897fab30d9c7fe)\n- Make sure session width/height are even (5a344ade0e3cd62c1c8e0f4b99d6be8dee7b513f)\n- Handle ConnectionClosed (953b9d4398ccca75b4108da0c31589c56747ff70)\n- Ensure --ui-scale overrides environment scaling (776b4dc2c5462a05c8520e769361f3136d5bcc6a)\n- Swap order of lock/warp when locking cursor on not-mac (525622b29d46fc8e659d0e3c37cf920faf587866)\n\n## [mmclient-v0.2.0] - 2024-05-08\n\n### New Features\n\n- Cursor locking and relative motion (e11dfec7e42802a528ac8c8b4629044e6d6b1c3f)\n- Add --preset, for setting quality/bandwidth usage dynamically (6c590efaab02e31aae8413b683e8f8d228256b3b)\n\n### Bugfixes\n\n- Don't sync every frame (5a7f1cfe11e6684e11bd618e2f1adf4d043640f5)\n\n## [mmserver-v0.3.0] - 2024-05-08\n\n### New Features\n\n- Cursor locking and relative motion (e11dfec7e42802a528ac8c8b4629044e6d6b1c3f)\n- Add --preset, for setting quality/bandwidth usage dynamically (6c590efaab02e31aae8413b683e8f8d228256b3b)\n\n### Bugfixes\n\n- Remove debugging code (152a1714ca950256f136757f47b7b2cf587d6880)\n- Un-transpose min and max QP (0570a6470b934e62dd4c9dcc42467a6db1a311e4)\n- Correctly set max QP on lower presets (b3f73533bb896c93d4a1d4e5c8efc336e329042c)\n- Prevent a segfault on nvidia (8b331b5de98a50dd3c59671a2dbfe37b966b95b9)\n- Re-send cursor status when reattaching (eba4a368c33a5bcd1cdf27a8b791f31ff466bb29)\n\n## [mmclient-v0.1.2] - 2024-05-05\n\n### Bugfixes\n\n- Actually sync video and audio (4822bda39b4a5f07ed74e4fd76d5b080ea1c2078)\n- Tune verbosity of conn message (e9f0d18da517e1c7f1ab34d9c154b8ba70573f2e)\n- Fix typo in conn init (d8dd70b25952e1d1155bf8e6930d2304ca51c79e)\n\n## [mmserver-v0.2.0] - 2024-05-05\n\n### New Features\n\n- Add enable_datagrams, off by default (e1dc976ee3228b006b874e077cd2c6cf7f784927)\n- Add glxinfo and eglinfo output to --bug-report (696464d9b980f1664e2b9dcce9e6f6dde83407f2)\n\n### Bugfixes\n\n- Don't panic on dmabuf cursors (9f87ce7d99289ba31ad11b5d1796b992fd21c796)\n- Print version after initializing logging (f708ad2d8e5ddc9fb17ac023fef8f81706c31be7)\n- Handle full send queues more gracefully (face8776acea8c22e4d83b62c54ece5682f95cee)\n- Manually enable radv encode (26ba3f93f3da29921f9754181738f2087284a164)\n- Correctly expose a vulkan fn (2c627c94569050d0b53429204e8153119d268560)\n- Write xwayland logs to the bug report dir (0ba97f5f3bd72caf7df815e341c4c4f0a807b094)\n- Support older versions of xwayland with wl_drm (54c9724a476d023547fb1c2ccc5d74bc6eadc6a3)\n- Kill hung clients (5179e6688a2bc8fcceded03c0d92e2a00c38fb99)\n- Implement basic rate control (781c97e3efde247ef437ad2e19e8cdf57b6d216e)\n- Log entire config (b588f198d13122869936b52c0690e980586a7f88)\n- Garbage-collect partial writes (a095994de28ec31bd49a54c2d757493f41fc0c06)\n\n## [mmclient-v0.1.1] - 2024-05-05\n\n### Bugfixes\n\n  - Increase the default timeout when waiting for frames (a8aefcb295803d087349625a37e1fdef3f2ec9d7)\n  - Handle video frames sent over the attachment stream (c0ecfba8fd5f06a64ab2e3c5d02731938a41170b)\n  - Handle VideoChunk messages on the attachment stream (75f409d1b2c0685bf6e4413a44535798a7a53a71)\n  - Handle AudioChunk messages on the attachment stream (3a63b07149fd36308d72378c66b53c41574abb1e)\n  - Be more robust in the face of bad stream data (7c920b66451e615205cea7a8d229c068c340324c)\n  - Respect hidden cursors (003fe97034cbbd71a8845841cf9d26e592c27696)\n\n"
  },
  {
    "path": "LICENSES/BUSL-1.1.txt",
    "content": "Business Source License 1.1\n\nParameters\n----------\n\nLicensor:             Colin Marc <hi@colinmarc.com>\nLicensed Work:        Magic Mirror\n\nAdditional Use Grant: You may make use of the Licensed Work, provided that\n                      you may not use the Licensed Work for a Game Streaming\n                      or Remote Desktop service.\n\n                      A \"Game Streaming or Remote Desktop service” is a\n                      commercial offering that allows third parties (other than\n                      your employees and contractors) to access the\n                      functionality of the Licensed Work, thereby utilizing\n                      graphics processing hardware owned or operated by you.\n\nChange Date:          2029-05-01\nChange License:       MIT License\n\nFor information about alternative licensing arrangements for the Software,\nplease contact the Licensor at hi@colinmarc.com.\n\nNotice\n\nThe Business Source License (this document, or the “License”) is not an Open\nSource license. However, the Licensed Work will eventually be made available\nunder an Open Source License, as stated in this License.\n\nLicense text copyright (c) 2017 MariaDB Corporation Ab, All Rights Reserved.\n“Business Source License” is a trademark of MariaDB Corporation Ab.\n\n-----------------------------------------------------------------------------\n\nBusiness Source License 1.1\n\nTerms\n\nThe Licensor hereby grants you the right to copy, modify, create derivative\nworks, redistribute, and make non-production use of the Licensed Work. The\nLicensor may make an Additional Use Grant, above, permitting limited\nproduction use.\n\nEffective on the Change Date, or the fourth anniversary of the first publicly\navailable distribution of a specific version of the Licensed Work under this\nLicense, whichever comes first, the Licensor hereby grants you rights under\nthe terms of the Change License, and the rights granted in the paragraph\nabove terminate.\n\nIf your use of the Licensed Work does not comply with the requirements\ncurrently in effect as described in this License, you must purchase a\ncommercial license from the Licensor, its affiliated entities, or authorized\nresellers, or you must refrain from using the Licensed Work.\n\nAll copies of the original and modified Licensed Work, and derivative works\nof the Licensed Work, are subject to this License. This License applies\nseparately for each version of the Licensed Work and the Change Date may vary\nfor each version of the Licensed Work released by Licensor.\n\nYou must conspicuously display this License on each original or modified copy\nof the Licensed Work. If you receive the Licensed Work in original or\nmodified form from a third party, the terms and conditions set forth in this\nLicense apply to your use of that work.\n\nAny use of the Licensed Work in violation of this License will automatically\nterminate your rights under this License for the current and all other\nversions of the Licensed Work.\n\nThis License does not grant you any right in any trademark or logo of\nLicensor or its affiliates (provided that you may use a trademark or logo of\nLicensor as expressly required by this License).\n\nTO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON\nAN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS,\nEXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND\nTITLE.\n\nMariaDB hereby grants you permission to use this License’s text to license\nyour works, and to refer to it using the trademark “Business Source License”,\nas long as you comply with the Covenants of Licensor below.\n\nCovenants of Licensor\n\nIn consideration of the right to use this License’s text and the “Business\nSource License” name and trademark, Licensor covenants to MariaDB, and to all\nother recipients of the licensed work to be provided by Licensor:\n\n1. To specify as the Change License the GPL Version 2.0 or any later version,\n   or a license that is compatible with GPL Version 2.0 or a later version,\n   where “compatible” means that software provided under the Change License can\n   be included in a program with software provided under GPL Version 2.0 or a\n   later version. Licensor may specify additional Change Licenses without\n   limitation.\n\n2. To either: (a) specify an additional grant of rights to use that does not\n   impose any additional restriction on the right granted in this License, as\n   the Additional Use Grant; or (b) insert the text “None”.\n\n3. To specify a Change Date.\n\n4. Not to modify this License in any other way.\n"
  },
  {
    "path": "LICENSES/MIT.txt",
    "content": "MIT License\n\nCopyright (c) <year> <copyright holders>\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
  },
  {
    "path": "README.md",
    "content": "# Magic Mirror 🪞✨\n[![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/colinmarc/magic-mirror/tests.yaml)](https://github.com/colinmarc/magic-mirror/actions/workflows/tests.yaml)\n[![Discord](https://img.shields.io/discord/1284975819222945802?style=flat&label=discord&color=7289DA)](https://discord.gg/v22G644DzS)\n\nThis is a game streaming and remote desktop tool for Linux hosts, featuring:\n\n - **Headless multitenant rendering:** Streamed applications are run offscreen, isolated from the rest of the system and any display hardware.\n - **No system dependencies:** The server is a single static binary, and there's no dependency on docker, pipewire, or any other systemwide setup.\n - **Native linux containerization:** apps are isolated in rootless containers with the equivalent of unshare(1), using new Linux namespace features\n - **High quality, tunable, 4k streaming:** See the [list of supported codecs](https://colinmarc.github.io/magic-mirror/setup/server/#hardware-software-encoding). 10-bit HDR support is in progress.\n - **Very low latency:** No extra CPU-GPU copy when using hardware encode. Total latency is less than one frame.\n - **Local cursor rendering:** Use the client-side cursor for minimal input lag.\n - **Client support for macOS and Linux:** A [SwiftUI client](https://github.com/colinmarc/magic-mirror-swiftui/releases/latest) is available for macOS, with tvOS/iOS support coming soon.\n\n> [!WARNING]\n> Alpha software! Please submit any issues you encounter. Run the server with `--bug-report` to generate detailed logs and record videos to attach to your report.\n\n### Quick Links\n\n - [Documentation Book](https://colinmarc.github.io/magic-mirror)\n - [Latest Server Release [mmserver-v0.8.4]](https://github.com/colinmarc/magic-mirror/releases/tag/mmserver-v0.8.4)\n - [Latest CLI Client Release [mmclient-v0.7.0]](https://github.com/colinmarc/magic-mirror/releases/tag/mmclient-v0.7.0)\n - [Latest macOS Client Release](https://github.com/colinmarc/magic-mirror-swiftui/releases/latest)\n - [Discord](https://discord.gg/v22G644DzS)\n"
  },
  {
    "path": "auto-release.sh",
    "content": "#!/bin/sh -e\n\ndie() {\n    RED=\"\\033[31m\"\n    RESET=\"\\033[0m\"\n    echo -e \"${RED}$1${RESET}\"\n    exit 1\n}\n\ncase $1 in\n\"client\" | \"server\")\n    component=$1\n    ;;\n    *)\n    die \"invalid component: $1\"\n    exit 1\n    ;;\nesac\n\nif [ -n \"$(git status --untracked-files=no --porcelain)\" ]; then\n    die \"working directory not clean; exiting\"\n    exit 1\nfi\n\nbranch=\"auto-bump-${component}\"\ngit fetch -q origin \"${branch}\"\ntag=\"$(git show -s --format=%s origin/${branch} | awk '{print $NF}')\"\nif [ -n \"$(git tag | grep ${tag})\" ]; then\n    die \"tag exists\"\nfi\n\necho \"bumping mm${component} to ${tag}...\"\ngit cherry-pick -S \"origin/${branch}\"\n\necho \"generating release notes...\"\nrelease_notes=\"$(git cliff -v -c .github/workflows/cliff.toml \\\n\t\t --tag-pattern \"${component}\" \\\n\t\t --include-path \"mm-${component}*/**/*\" \\\n\t\t --unreleased --tag ${tag})\"\n\ngit tag ${tag} -a -m \"${release_notes}\" --cleanup=verbatim\ngit show ${tag}\n\n\n\n"
  },
  {
    "path": "docs/.gitignore",
    "content": "# autogenerated\ncontent/reference\nbuild/\npublic/\n"
  },
  {
    "path": "docs/config.toml",
    "content": "base_url = \"https://colinmarc.github.io/magic-mirror\"\ntheme = \"anemone\"\ncompile_sass = false\nbuild_search_index = false\n\n[markdown]\nhighlight_code = true\n\n[extra]\ntwitter_card = false\nheader_nav = [\n  { url = \"https://colinmarc.github.io/magic-mirror\", name_en = \"/home/\"},\n  { url = \"https://github.com/colinmarc/magic-mirror\", name_en = \"/github/\"},\n  { url = \"https://discord.gg/v22G644DzS\", name_en = \"/discord/\"},\n]\n"
  },
  {
    "path": "docs/content/_index.md",
    "content": "+++\n+++\n\n# Magic Mirror 🪞✨\n\n<picture>\n  <source srcset=\"header_dark.png\" media=\"(prefers-color-scheme: dark)\" />\n  <img src=\"header_light.png\" />\n</picture>\n\nThis page contains documentation for [Magic Mirror](https://github.com/colinmarc/magic-mirror),\nan open-source game streaming and remote desktop tool for linux hosts.\n\n### Download\n\nThese links always point to the latest release.\n\n - 💾 [Server [mmserver-v0.8.4]](https://github.com/colinmarc/magic-mirror/releases/tag/mmserver-v0.8.4)\n - 💾 [Command-Line Client [mmclient-v0.7.0]](https://github.com/colinmarc/magic-mirror/releases/tag/mmclient-v0.7.0)\n - 💾 [macOS GUI Client](https://github.com/colinmarc/magic-mirror-swiftui/releases/latest)\n\n### Setup Guides\n\nStart here to get things up and running.\n\n - ⚙️  [Server Setup](@/setup/server.md)\n - ⚙️  [Client Setup](@/setup/client.md)\n<!-- - ⚙️  [Running on a Cloud VPS](./setup/vps.md) -->\n<!-- - ⚙️  [Troubleshooting and Known Issues](@/setup/troubleshooting.md) -->\n\n### Reference\n\nAutogenerated from the code.\n\n - 📖 [Configuration Reference](@/reference/config.md)\n - 📖 [Protocol Reference](@/reference/protocol.md)\n - 📖 [Rustdoc for `mm-protocol`](./doc/mm_protocol)\n - 📖 [Rustdoc for `mm-client-common`](./doc/mm_client_common)\n\n### Contact\n\nGet help, report issues, make friends.\n\n - ⁉️  [Issue Tracker](https://github.com/colinmarc/magic-mirror/issues)\n - 💬 [Discord Chat](https://discord.gg/v22G644DzS)\n"
  },
  {
    "path": "docs/content/setup/client.md",
    "content": "+++\ntitle = \"Client Setup\"\n\n[extra]\ntoc = true\n+++\n\n## macOS GUI Client\n\nThe native macOS client can be downloaded from [the releases page](https://github.com/colinmarc/magic-mirror-swiftui/releases/latest).\n\nIt should work out of the box on ARM and Intel Macs running macOS 10.14 or\nlater.\n\n## Installing the commandline client\n\nThere is also a cross-platform commandline client, `mmclient`. You can download\nit [here](https://github.com/colinmarc/magic-mirror/releases/tag/mmclient-v0.7.0).\n\nThe commandline client requires `ffmpeg` 6.0 or later to be installed on the\nsystem. It also requires up-to-date Vulkan drivers.\n\n## Building mmclient\n\nThe following are required to build the client and its dependencies:\n\n```\nrust (MSRV 1.77.2)\nnasm\ncmake\nprotoc\nlibxkbcommon (linux only)\nlibwayland-client (linux only)\nalsa (linux only)\nffmpeg 6.x\n```\n\nBesides Rust itself, the following command will install everything on ubuntu:\n\n```\napt install \\\n    nasm cmake protobuf-compiler libxkbcommon-dev libwayland-dev libasound2-dev \\\n    ffmpeg libavutil-dev libavformat-dev libavdevice-dev libavfilter-dev\n```\n\nOr using homebrew on macOS:\n\n```\nbrew install nasm cmake ffmpeg@6 protobuf\n```\n"
  },
  {
    "path": "docs/content/setup/server.md",
    "content": "+++\ntitle = \"Server Setup\"\n\n[extra]\ntoc = true\n+++\n\n## Quickstart\n\nFirst, grab [the latest server release](https://github.com/colinmarc/magic-mirror/releases/tag/mmserver-v0.8.4) and untar it somewhere:\n\n```sh\ncurl -fsSL \"https://github.com/colinmarc/magic-mirror/releases/download/mmserver-v0.8.4/mmserver-v0.8.4-linux-amd64.tar.gz\" \\\n    | tar zxv\ncd mmserver-v0.8.4\n```\n\nThen, create a [configuration file](@/reference/config.md) with at least one application definition:\n\n```toml\n# mmserver.toml\n[apps.steam-gamepadui]\ncommand = [\"steam\", \"-gamepadui\"]\nxwayland = true\n```\n\nThen you can start the server like so:\n\n```\n$ ./mmserver -C config.toml\n2024-12-09T16:57:30.989261Z  INFO mmserver: listening on [::1]:9599\n```\n\nYou can also create a configuration directory, and add a file (json or toml) for each application:\n\n```sh\nmkdir apps.d\necho 'command = [\"steam\", \"-gamepadui\"]' > apps.d/steam.toml\n./mmserver -i apps.d\n```\n\n## Connectivity\n\nBy default, mmserver only listens on `localhost`, which is not terribly\nuseful. There are a few different options to configure which socket address the\nserver listens for connections on.\n\nThe easiest is to bind to a local IP, or use a VPN like wireguard or tailscale:\n\n```toml\n# config.toml\n[server]\nbind = \"192.168.1.37:9599\"\n```\n\nOr from the command line:\n\n```sh\nmmserver --bind $(tailscale ip -4):9599\n```\n\nIf you'd like to stream on a public IP, or on all interfaces (with `0.0.0.0`),\nmmserver requires that you set up a TLS certificate and key:\n\n```toml\n# config.toml\n[server]\ntls_cert = \"/path/to/tls.key\"\ntls_key = \"/path/to/tls.cert\"\n```\n\nGenerating such certificates and adding them to the client is out of scope for\nthis guide. Note that while all Magic Mirror traffic is encrypted with TLS\n(whether you supply certificates or not), no _authentication_ is performed on\nincoming connections.\n\nFinally, you can also use `--bind-systemd` or `bind_systemd = true` to bind to a\n[systemd socket](https://www.freedesktop.org/software/systemd/man/latest/systemd.socket.html).\n\n## System Requirements\n\nThe following is required to run the server:\n\n - Linux 6.x (for Ubuntu, this means Mantic or Noble)\n - (For AMD/Intel cards) Mesa 24.3.x or later\n - (For NVIDIA cards) [Vulkan drivers](https://developer.nvidia.com/vulkan-driver) version 550 or later\n - XWayland (for X11 apps)\n\n## Hardware encoding\n\nMagic Mirror uses hardware-based video compression codecs to stream the game over the wire.\n\nTo see if your GPU supports video encoding, see the following matrix for your vendor:\n - [AMD](https://en.wikipedia.org/wiki/Unified_Video_Decoder#Format_support)\n - [NVIDIA](https://developer.nvidia.com/video-encode-and-decode-gpu-support-matrix-new)\n\n| Codec | AMD | NVIDIA | Intel |\n| ----- | :-: | :----: | :---: |\n| H.264 |  ✅ |   ✅   |   ❔  |\n| H.265 |  ✅ |   ✅   |   ❔  |\n|  AV1  |  ❌ |   ❌   |   ❌  |\n\n## Building `mmserver` from source\n\nThe following are required to build the server and its dependencies:\n\n```\nrust (MSRV 1.77.2)\nnasm\ncmake\nprotoc\nlibxkbcommon\n```\n\nBesides Rust itself, the following command will install everything on ubuntu:\n\n```\napt install nasm cmake protobuf-compiler libxkbcommon-dev\n```\n\nThen you should be good to go:\n\n```\ncd mm-server\ncargo build --bin mmserver [--release]\n```\n"
  },
  {
    "path": "docs/templates/footer.html",
    "content": ""
  },
  {
    "path": "mm-client/Cargo.toml",
    "content": "# Copyright 2024 Colin Marc <hi@colinmarc.com>\n#\n# SPDX-License-Identifier: MIT\n\n[package]\nname = \"mm-client\"\nversion = \"0.7.0\"\nedition = \"2021\"\n\n[[bin]]\nname = \"mmclient\"\npath = \"src/bin/mmclient.rs\"\n\n[[bin]]\nname = \"latency-test\"\npath = \"src/bin/latency-test.rs\"\n\n[dependencies]\nanyhow = \"1\"\nash = \"0.38\"\nash-window = \"0.13.0\"\nbytes = \"1\"\nclap = { version = \"4\", features = [\"derive\"] }\ncpal = \"0.15\"\ncrossbeam-channel = \"0.5\"\ncstr = \"0.2\"\nffmpeg-next = \"7\"\nffmpeg-sys-next = \"7\"\nfont-kit = \"0.11\"\ngilrs = \"0.10\"\nglam = \"0.26\"\nhisto = \"1\"\nhumantime = \"2\"\nimage = { version = \"0.25\", default-features = false, features = [\"png\"] }\nimgui = { version = \"0.12.0\", features = [\"tables-api\"] }\nimgui-sys = \"0.12.0\"\nimgui-winit-support = \"0.13.0\"\nimgui-rs-vulkan-renderer = { version = \"1.16.0\", features = [\"dynamic-rendering\"] }\nlazy_static = \"1\"\noneshot = { version = \"0.1\", default-features = false, features = [\"std\"] }\nopus = \"0.3\"\npollster = \"0.3\"\nrand = \"0.8\"\nraw-window-handle = \"0.5\"\nsimple_moving_average = \"1\"\ntabwriter = \"1\"\ntracing = \"0.1\"\ntracing-subscriber = { version = \"0.3\", features = [\"time\", \"env-filter\"] }\ntracy-client = { version = \"0.17\", default-features = false }\ntracing-tracy = { version = \"0.11\", default-features = false }\n\n[dependencies.mm-protocol]\npath = \"../mm-protocol\"\n\n[dependencies.mm-client-common]\npath = \"../mm-client-common\"\n\n[dependencies.dasp]\nversion = \"0.11\"\nfeatures = [\"slice\", \"signal\", \"interpolate\", \"interpolate-linear\"]\n\n[dependencies.winit]\nversion = \"0.30\"\ndefault-features = false\nfeatures = [\"wayland\", \"x11\", \"rwh_06\"]\n\n[target.'cfg(target_os = \"macos\")'.dependencies]\nash-molten = { version = \"0.18\", optional = true }\n\n[build-dependencies.slang]\ngit = \"https://github.com/colinmarc/slang-rs\"\nrev = \"075daa4faa8d1ab6d7bfbb5293812b087a527207\"\n# Uses SLANG_DIR if set, otherwise builds slang from source\nfeatures = [\"from-source\"]\n\n[features]\ndefault = []\nmoltenvk_static = [\"dep:ash-molten\"]\ntracy = [\"tracy-client/enable\"]\n"
  },
  {
    "path": "mm-client/build.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\n// extern crate shaderc;\n\nuse std::path::PathBuf;\n\nextern crate slang;\n\nfn main() {\n    let mut session = slang::GlobalSession::new();\n    let out_dir = std::env::var(\"OUT_DIR\").map(PathBuf::from).unwrap();\n\n    compile_shader(\n        &mut session,\n        \"src/render.slang\",\n        out_dir.join(\"shaders/frag.spv\").to_str().unwrap(),\n        \"frag\",\n        slang::Stage::Fragment,\n    );\n\n    compile_shader(\n        &mut session,\n        \"src/render.slang\",\n        out_dir.join(\"shaders/vert.spv\").to_str().unwrap(),\n        \"vert\",\n        slang::Stage::Vertex,\n    );\n}\n\nfn compile_shader(\n    session: &mut slang::GlobalSession,\n    in_path: &str,\n    out_path: &str,\n    entry_point: &str,\n    stage: slang::Stage,\n) {\n    std::fs::create_dir_all(PathBuf::from(out_path).parent().unwrap())\n        .expect(\"failed to create output directory\");\n\n    let mut compile_request = session.create_compile_request();\n\n    compile_request\n        .add_search_path(\"../shader-common\")\n        .set_codegen_target(slang::CompileTarget::Spirv)\n        .set_optimization_level(slang::OptimizationLevel::Maximal)\n        .set_target_profile(session.find_profile(\"glsl_460\"));\n\n    let entry_point = compile_request\n        .add_translation_unit(slang::SourceLanguage::Slang, None)\n        .add_source_file(in_path)\n        .add_entry_point(entry_point, stage);\n\n    let shader_bytecode = compile_request\n        .compile()\n        .expect(\"Shader compilation failed.\");\n\n    std::fs::write(out_path, shader_bytecode.get_entry_point_code(entry_point))\n        .expect(\"failed to write shader bytecode to file\");\n\n    println!(\"cargo::rerun-if-changed={}\", in_path);\n}\n"
  },
  {
    "path": "mm-client/src/audio/buffer.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::collections::VecDeque;\n\npub struct PlaybackBuffer<F>\nwhere\n    F: dasp::Frame,\n{\n    /// A queue of audio frames.\n    samples: VecDeque<F>,\n    /// The PTS and packet length (in frames) for each packet. Kept in sync with\n    /// `samples`.\n    pts: VecDeque<(u64, usize)>,\n}\n\nimpl<F> PlaybackBuffer<F>\nwhere\n    F: dasp::Frame,\n{\n    pub fn new() -> Self {\n        PlaybackBuffer {\n            samples: VecDeque::new(),\n            pts: VecDeque::new(),\n        }\n    }\n\n    /// Returns the number of frames in the buffer.\n    pub fn len(&self) -> usize {\n        self.samples.len()\n    }\n\n    /// Adds frames to the back of the buffer.\n    pub fn buffer(&mut self, pts: u64, frames: &[F]) {\n        self.pts.push_back((pts, frames.len()));\n        self.samples.extend(frames.iter());\n    }\n\n    /// Returns the PTS of the head packet in the audio buffer.\n    pub fn current_pts(&self) -> u64 {\n        self.pts\n            .front()\n            .expect(\"current_pts called before buffer\")\n            .0\n    }\n\n    /// Returns an iterator that pops frames from the front of the buffer.\n    pub fn drain(&mut self) -> Draining<F> {\n        Draining { buffer: self }\n    }\n\n    /// Discards the first N frames from the buffer.\n    pub fn skip(&mut self, frames: usize) {\n        self.samples.drain(..frames);\n\n        let mut remaining = frames;\n        loop {\n            let (_, len) = self.pts.front_mut().expect(\"skip called before buffer\");\n            if *len <= remaining {\n                remaining -= *len;\n                self.pts.pop_front();\n            } else {\n                *len -= remaining;\n                break;\n            }\n        }\n    }\n}\n\npub struct Draining<'a, F>\nwhere\n    F: dasp::Frame,\n{\n    buffer: &'a mut PlaybackBuffer<F>,\n}\n\nimpl<F> Iterator for Draining<'_, F>\nwhere\n    F: dasp::Frame,\n{\n    type Item = F;\n\n    fn next(&mut self) -> Option<Self::Item> {\n        let frame = self.buffer.samples.pop_front()?;\n\n        if let Some((_, remaining)) = self.buffer.pts.front_mut() {\n            *remaining -= 1;\n            if *remaining == 0 {\n                self.buffer.pts.pop_front();\n            }\n        }\n\n        Some(frame)\n    }\n}\n"
  },
  {
    "path": "mm-client/src/audio.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nmod buffer;\n\nuse std::{\n    sync::{Arc, Mutex},\n    time,\n};\n\nuse anyhow::{bail, Context as _};\nuse buffer::PlaybackBuffer;\nuse cpal::traits::{DeviceTrait as _, HostTrait as _, StreamTrait};\nuse crossbeam_channel as crossbeam;\nuse dasp::Signal;\nuse mm_client_common as client;\nuse tracing::{debug, error, info, trace};\n\ntrait DecodePacket<T> {\n    fn decode(&mut self, input: &[u8], output: &mut [T]) -> anyhow::Result<usize>;\n}\n\nimpl DecodePacket<f32> for opus::Decoder {\n    fn decode(&mut self, packet: &[u8], output: &mut [f32]) -> anyhow::Result<usize> {\n        let len = self.decode_float(packet, output, false)?;\n        Ok(len)\n    }\n}\n\nimpl DecodePacket<i16> for opus::Decoder {\n    fn decode(&mut self, packet: &[u8], output: &mut [i16]) -> anyhow::Result<usize> {\n        let len = self.decode(packet, output, false)?;\n        Ok(len)\n    }\n}\n\n// This is a trait object so we can erase the sample/frame generic type.\ntrait StreamWrapper {\n    #[allow(clippy::new_ret_no_self)]\n    fn new(\n        device: &cpal::Device,\n        conf: cpal::StreamConfig,\n    ) -> anyhow::Result<(Box<dyn StreamWrapper>, cpal::Stream)>\n    where\n        Self: Sized;\n\n    fn sync(&mut self, pts: u64);\n    fn send_packet(&mut self, packet: Arc<client::Packet>) -> anyhow::Result<()>;\n}\n\nstruct StreamInner<F: dasp::Frame> {\n    sync_point: Arc<Mutex<Option<(u64, time::Instant)>>>,\n    _buffer: Arc<Mutex<PlaybackBuffer<F>>>,\n    thread_handle: Option<std::thread::JoinHandle<anyhow::Result<()>>>,\n    undecoded_tx: Option<crossbeam::Sender<Arc<client::Packet>>>,\n}\n\nimpl<F> StreamWrapper for StreamInner<F>\nwhere\n    F: dasp::Frame + Send + 'static,\n    F::Sample: cpal::SizedSample + dasp::sample::Duplex<f64> + Default,\n    opus::Decoder: DecodePacket<F::Sample>,\n    for<'a> &'a [F::Sample]: dasp::slice::ToFrameSlice<'a, F>,\n{\n    fn new(\n        device: &cpal::Device,\n        conf: cpal::StreamConfig,\n    ) -> anyhow::Result<(Box<dyn StreamWrapper>, cpal::Stream)> {\n        let sample_rate = conf.sample_rate.0;\n\n        let mut decoder = {\n            let ch = match F::CHANNELS {\n                1 => opus::Channels::Mono,\n                2 => opus::Channels::Stereo,\n                _ => bail!(\"unsupported number of channels: {}\", F::CHANNELS),\n            };\n\n            opus::Decoder::new(sample_rate, ch)?\n        };\n\n        let buffer = Arc::new(Mutex::new(PlaybackBuffer::new()));\n        let (undecoded_tx, undecoded_recv) = crossbeam::unbounded::<Arc<client::Packet>>();\n\n        // Spawn a thread to eagerly decode packets.\n        let buffer_clone = buffer.clone();\n        let thread_handle = std::thread::Builder::new()\n            .name(\"audio decode\".into())\n            .spawn(move || {\n                // Handles up to 100ms of decoded audio.\n                let mut output =\n                    vec![Default::default(); (sample_rate * F::CHANNELS as u32 / 10) as usize];\n\n                loop {\n                    let packet = match undecoded_recv.recv() {\n                        Ok(packet) => packet,\n                        Err(crossbeam::RecvError) => return Ok(()),\n                    };\n\n                    let pts = packet.pts();\n                    let packet = packet.data();\n                    match DecodePacket::decode(&mut decoder, &packet, &mut output) {\n                        Ok(len) => {\n                            if len == 0 {\n                                continue;\n                            }\n\n                            let frames =\n                                dasp::slice::to_frame_slice(&output[..(len * F::CHANNELS)])\n                                    .expect(\"invalid sample count\");\n\n                            let mut guard = buffer_clone.lock().unwrap();\n                            guard.buffer(pts, frames);\n\n                            #[cfg(feature = \"tracy\")]\n                            {\n                                let len_us = guard.len() as f64 / sample_rate as f64 * 1_000_000.0;\n                                tracy_client::plot!(\"audio buffer (μs)\", len_us);\n                            }\n                        }\n                        Err(e) => {\n                            error!(\"opus decode error: {}\", e);\n                            continue;\n                        }\n                    };\n                }\n            })?;\n\n        // The current PTS of the video stream, which we want to sync to.\n        let sync_point = Arc::new(Mutex::new(None));\n\n        let sync_point_clone = sync_point.clone();\n        let buffer_clone = buffer.clone();\n        let stream = device.build_output_stream(\n            &conf,\n            move |out, _info| {\n                let mut buffer = buffer_clone.lock().unwrap();\n\n                let frames_needed = out.len() / F::CHANNELS;\n                let frames_remaining = buffer.len(); // In frames.\n\n                let frames_per_ms = sample_rate / 1000;\n\n                if frames_remaining < frames_needed {\n                    out.fill(Default::default());\n\n                    trace!(\"audio buffer underrun\");\n                    return;\n                }\n\n                let sync_point: Option<(u64, time::Instant)> =\n                    sync_point_clone.lock().unwrap().as_ref().copied();\n                if let Some((pts, ts)) = sync_point {\n                    let target_pts = pts + ts.elapsed().as_millis() as u64;\n                    let pts = buffer.current_pts();\n\n                    let delay = target_pts as i64 - pts as i64;\n\n                    #[cfg(feature = \"tracy\")]\n                    tracy_client::plot!(\"audio drift (ms)\", delay as f64);\n\n                    // Outside these bounds, skip or play silence in order to sync.\n                    const TOO_EARLY: i64 = 20;\n                    const TOO_LATE: i64 = 60;\n\n                    if delay < TOO_EARLY {\n                        // Play silence until the video catches up.\n                        out.fill(Default::default());\n                        return;\n                    }\n\n                    if delay > TOO_LATE {\n                        // Skip ahead.\n                        let skip = std::cmp::min(\n                            (delay * frames_per_ms as i64) as usize,\n                            frames_remaining.saturating_sub(frames_needed * 2),\n                        );\n\n                        buffer.skip(skip);\n                    }\n                }\n\n                let mut signal = dasp::signal::from_iter(buffer.drain()).into_interleaved_samples();\n\n                for sample in out.iter_mut() {\n                    *sample = signal.next_sample();\n                }\n\n                #[cfg(feature = \"tracy\")]\n                {\n                    let len_us = buffer.len() as f64 / sample_rate as f64 * 1_000_000.0;\n                    tracy_client::plot!(\"audio buffer (μs)\", len_us);\n                }\n            },\n            move |err| {\n                error!(\"audio playback error: {}\", err);\n            },\n            None,\n        )?;\n\n        Ok((\n            Box::new(Self {\n                // decoded_packets,\n                _buffer: buffer,\n                sync_point,\n                thread_handle: Some(thread_handle),\n                undecoded_tx: Some(undecoded_tx),\n            }),\n            stream,\n        ))\n    }\n\n    fn sync(&mut self, pts: u64) {\n        *self.sync_point.lock().unwrap() = Some((pts, time::Instant::now()));\n    }\n\n    fn send_packet(&mut self, packet: Arc<client::Packet>) -> anyhow::Result<()> {\n        self.undecoded_tx\n            .as_ref()\n            .unwrap()\n            .send(packet)\n            .map_err(|_| anyhow::anyhow!(\"audio decode thread died\"))?;\n        Ok(())\n    }\n}\n\nimpl<T: dasp::Frame> Drop for StreamInner<T> {\n    fn drop(&mut self) {\n        let _ = self.undecoded_tx.take();\n        if let Some(handle) = self.thread_handle.take() {\n            match handle.join() {\n                Ok(Ok(())) => (),\n                Ok(Err(e)) => {\n                    error!(\"audio decode thread error: {}\", e);\n                }\n                Err(_) => {\n                    error!(\"audio decode thread panicked\");\n                }\n            }\n        }\n    }\n}\n\npub struct AudioStream {\n    device: cpal::Device,\n\n    stream: Option<cpal::Stream>,\n    inner: Option<Box<dyn StreamWrapper>>,\n\n    stream_waiting: bool,\n\n    stream_seq: u64,\n    packet_count: u64,\n}\n\nimpl AudioStream {\n    pub fn new() -> anyhow::Result<Self> {\n        let device = cpal::default_host()\n            .default_output_device()\n            .context(\"unable to find default audio output device\")?;\n\n        info!(\"using audio output device: {}\", device.name()?);\n\n        Ok(Self {\n            device,\n\n            stream: None,\n            inner: None,\n            stream_waiting: true,\n            packet_count: 0,\n\n            stream_seq: 0,\n        })\n    }\n\n    pub fn sync(&mut self, pts: u64) {\n        if let Some(inner) = &mut self.inner {\n            inner.sync(pts);\n        }\n    }\n\n    pub fn reset(\n        &mut self,\n        stream_seq: u64,\n        sample_rate: u32,\n        channels: u32,\n    ) -> anyhow::Result<()> {\n        debug!(\n            stream_seq,\n            sample_rate, channels, \"starting or restarting audio stream\"\n        );\n\n        let (format, conf) = select_conf(&self.device, sample_rate, channels)?;\n\n        let (inner, stream) = match (format, channels) {\n            (cpal::SampleFormat::F32, 1) => StreamInner::<[f32; 1]>::new(&self.device, conf),\n            (cpal::SampleFormat::F32, 2) => StreamInner::<[f32; 2]>::new(&self.device, conf),\n            (cpal::SampleFormat::I16, 1) => StreamInner::<[i16; 1]>::new(&self.device, conf),\n            (cpal::SampleFormat::I16, 2) => StreamInner::<[i16; 2]>::new(&self.device, conf),\n            _ => bail!(\"unsupported sample rate / format\"),\n        }?;\n\n        self.stream_seq = stream_seq;\n        self.stream = Some(stream);\n        self.inner = Some(inner);\n        self.stream_waiting = true;\n        self.packet_count = 0;\n\n        Ok(())\n    }\n\n    pub fn recv_packet(&mut self, packet: Arc<client::Packet>) -> anyhow::Result<()> {\n        if let Some(inner) = &mut self.inner {\n            trace!(\n                stream_seq = packet.stream_seq(),\n                seq = packet.seq(),\n                pts = packet.pts(),\n                len = packet.len(),\n                \"received full audio packet\"\n            );\n\n            self.packet_count += 1;\n            inner.send_packet(packet)?;\n        }\n\n        if self.stream.is_some() && self.stream_waiting && self.packet_count > 2 {\n            self.stream_waiting = false;\n            self.stream.as_ref().unwrap().play()?;\n        }\n\n        Ok(())\n    }\n}\n\nfn select_conf(\n    device: &cpal::Device,\n    sample_rate: u32,\n    channels: u32,\n) -> anyhow::Result<(cpal::SampleFormat, cpal::StreamConfig)> {\n    let mut confs = device\n        .supported_output_configs()\n        .context(\"unable to query supported audio playback formats\")?;\n\n    let valid = |format: cpal::SampleFormat| {\n        move |conf: &cpal::SupportedStreamConfigRange| {\n            conf.sample_format() == format\n                && conf.min_sample_rate() <= cpal::SampleRate(sample_rate)\n                && conf.max_sample_rate() >= cpal::SampleRate(sample_rate)\n                && conf.channels() == channels as u16\n        }\n    };\n\n    if let Some(conf_range) = confs\n        .find(valid(cpal::SampleFormat::F32))\n        .or_else(|| confs.find(valid(cpal::SampleFormat::I16)))\n    {\n        let sample_format = conf_range.sample_format();\n        let buffer_size = match conf_range.buffer_size() {\n            cpal::SupportedBufferSize::Unknown => cpal::BufferSize::Default,\n            cpal::SupportedBufferSize::Range { min, .. } => {\n                cpal::BufferSize::Fixed(std::cmp::max(*min, sample_rate / 100))\n            }\n        };\n\n        let mut conf =\n            cpal::StreamConfig::from(conf_range.with_sample_rate(cpal::SampleRate(sample_rate)));\n        conf.buffer_size = buffer_size;\n\n        return Ok((sample_format, conf));\n    }\n\n    bail!(\"no valid audio output configuration found\");\n}\n"
  },
  {
    "path": "mm-client/src/bin/latency-test.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::{sync::Arc, time};\n\nuse anyhow::{bail, Context as _};\nuse ash::vk;\nuse clap::Parser;\nuse mm_client::{\n    delegate::{AttachmentEvent, AttachmentProxy},\n    video::*,\n    vulkan::*,\n};\nuse mm_client_common as client;\nuse mm_protocol as protocol;\nuse pollster::FutureExt as _;\nuse tracing::{debug, error, warn};\nuse winit::event_loop::EventLoop;\n\nconst APP_DIMENSION: u32 = 256;\nconst DEFAULT_TIMEOUT: time::Duration = time::Duration::from_secs(1);\n\n#[derive(Debug, Parser)]\n#[command(name = \"mmclient\")]\n#[command(about = \"The Magic Mirror reference client\", long_about = None)]\nstruct Cli {\n    /// The server to connect to.\n    #[arg(value_name = \"HOST[:PORT]\")]\n    host: String,\n    /// The codec to use. Defaults to h265.\n    #[arg(long)]\n    codec: Option<String>,\n    /// The framerate to use. Defaults to 60.\n    #[arg(long)]\n    framerate: Option<u32>,\n    /// The number of tests to run. Defaults to 256.\n    #[arg(short('n'), long)]\n    samples: Option<usize>,\n}\n\npub enum AppEvent {\n    VideoStreamReady(Arc<VkImage>, VideoStreamParams),\n    VideoFrameAvailable,\n    AttachmentEvent(AttachmentEvent),\n}\n\nimpl std::fmt::Debug for AppEvent {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        use AppEvent::*;\n\n        match self {\n            VideoStreamReady(_, params) => write!(f, \"VideoStreamReady({params:?})\"),\n            VideoFrameAvailable => write!(f, \"VideoFrameAvailable\"),\n            AttachmentEvent(ev) => std::fmt::Debug::fmt(ev, f),\n        }\n    }\n}\n\nimpl From<AttachmentEvent> for AppEvent {\n    fn from(event: AttachmentEvent) -> Self {\n        Self::AttachmentEvent(event)\n    }\n}\n\nimpl From<VideoStreamEvent> for AppEvent {\n    fn from(event: VideoStreamEvent) -> Self {\n        use VideoStreamEvent::*;\n\n        match event {\n            VideoStreamReady(tex, params) => AppEvent::VideoStreamReady(tex, params),\n            VideoFrameAvailable => AppEvent::VideoFrameAvailable,\n        }\n    }\n}\n\nstruct App {\n    client: client::Client,\n    args: Cli,\n    proxy: winit::event_loop::EventLoopProxy<AppEvent>,\n    win: Option<LatencyTest>,\n}\n\nstruct LatencyTest {\n    attachment: client::Attachment,\n    session_id: u64,\n    stream: VideoStream<AppEvent>,\n    video_texture: Option<Arc<VkImage>>,\n\n    frames_recvd: usize,\n\n    copy_cb: vk::CommandBuffer,\n    copy_fence: vk::Fence,\n    copy_buffer: VkHostBuffer,\n\n    next_block: usize,\n    block_started: time::Instant,\n    num_tests: usize,\n    histogram: histo::Histogram,\n\n    first_frame_recvd: Option<time::Instant>,\n    total_video_bytes: usize,\n\n    vk: Arc<VkContext>,\n}\n\nfn main() -> anyhow::Result<()> {\n    init_logging()?;\n\n    let args = Cli::parse();\n\n    // Invisible window.\n    let event_loop: EventLoop<AppEvent> = EventLoop::with_user_event().build()?;\n    let proxy = event_loop.create_proxy();\n\n    let client = client::Client::new(&args.host, \"latency-test\", time::Duration::from_secs(1))\n        .block_on()\n        .context(\"failed to connect\")?;\n\n    let mut app = App {\n        client,\n        args,\n        proxy,\n        win: None,\n    };\n\n    event_loop.run_app(&mut app)?;\n\n    if let Some(win) = app.win.take() {\n        drop(win.stream);\n        unsafe {\n            win.vk\n                .device\n                .free_command_buffers(win.vk.present_queue.command_pool, &[win.copy_cb]);\n            win.vk.device.destroy_fence(win.copy_fence, None);\n            destroy_host_buffer(&win.vk.device, &win.copy_buffer);\n        }\n\n        println!(\"{}\", win.histogram);\n\n        if let Some(first_frame_recvd) = win.first_frame_recvd {\n            println!(\n                \"transfer rate: {:.2} mpbs ({:.2}kb per frame)\",\n                win.total_video_bytes as f64 * 8.0\n                    / 1_000_000.0\n                    / first_frame_recvd.elapsed().as_secs_f64(),\n                win.total_video_bytes as f64 / 1_000.0 / win.frames_recvd as f64\n            );\n        }\n    }\n\n    Ok(())\n}\n\nimpl winit::application::ApplicationHandler<AppEvent> for App {\n    fn resumed(&mut self, event_loop: &winit::event_loop::ActiveEventLoop) {\n        if self.win.is_some() {\n            return;\n        }\n\n        match start_test(&self.args, &self.client, event_loop, self.proxy.clone()) {\n            Ok(w) => {\n                self.win = Some(w);\n            }\n            Err(e) => {\n                error!(\"failed to start test: {:#}\", e);\n                event_loop.exit();\n            }\n        }\n    }\n\n    fn window_event(\n        &mut self,\n        _event_loop: &winit::event_loop::ActiveEventLoop,\n        _window_id: winit::window::WindowId,\n        _event: winit::event::WindowEvent,\n    ) {\n    }\n\n    fn about_to_wait(&mut self, event_loop: &winit::event_loop::ActiveEventLoop) {\n        let Some(win) = &self.win else {\n            return;\n        };\n\n        if win.block_started.elapsed() > time::Duration::from_secs(3) {\n            error!(\"timed out waiting for block\");\n            event_loop.exit();\n        }\n    }\n\n    fn user_event(&mut self, event_loop: &winit::event_loop::ActiveEventLoop, event: AppEvent) {\n        let Some(win) = &mut self.win else {\n            return;\n        };\n\n        match win.event(event) {\n            Ok(true) => (),\n            Ok(false) => event_loop.exit(),\n            Err(e) => {\n                error!(\"error: {}\", e);\n                event_loop.exit();\n            }\n        }\n    }\n\n    fn exiting(&mut self, _event_loop: &winit::event_loop::ActiveEventLoop) {\n        let Some(win) = &self.win else {\n            return;\n        };\n\n        let _ = win.attachment.detach().block_on();\n        let _ = self\n            .client\n            .end_session(win.session_id, DEFAULT_TIMEOUT)\n            .block_on();\n    }\n}\n\nimpl LatencyTest {\n    fn event(&mut self, event: AppEvent) -> anyhow::Result<bool> {\n        match event {\n            AppEvent::AttachmentEvent(AttachmentEvent::VideoStreamStart(stream_seq, params)) => {\n                assert_eq!(params.width, APP_DIMENSION);\n                assert_eq!(params.height, APP_DIMENSION);\n\n                self.stream\n                    .reset(stream_seq, APP_DIMENSION, APP_DIMENSION, params.codec)?;\n            }\n            AppEvent::AttachmentEvent(AttachmentEvent::VideoPacket(packet)) => {\n                if self.first_frame_recvd.is_none() {\n                    self.first_frame_recvd = Some(time::Instant::now());\n                }\n\n                self.total_video_bytes += packet.len();\n                self.stream.recv_packet(packet)?;\n            }\n            AppEvent::AttachmentEvent(AttachmentEvent::AttachmentEnded) => {\n                bail!(\"server closed connection\");\n            }\n            AppEvent::AttachmentEvent(_) => (),\n            AppEvent::VideoStreamReady(tex, params) => {\n                assert_eq!(params.width, APP_DIMENSION);\n                assert_eq!(params.height, APP_DIMENSION);\n\n                self.video_texture = Some(tex);\n            }\n            AppEvent::VideoFrameAvailable => {\n                if self.stream.prepare_frame()?.is_some() {\n                    self.frames_recvd += 1;\n\n                    match self.frames_recvd.cmp(&100) {\n                        std::cmp::Ordering::Less => (),\n                        std::cmp::Ordering::Equal => {\n                            debug!(\"starting test...\");\n                            self.send_space();\n                            self.block_started = time::Instant::now();\n                            self.next_block = 0;\n                        }\n                        std::cmp::Ordering::Greater => {\n                            self.check_frame()?;\n                            if self.next_block >= self.num_tests {\n                                return Ok(false);\n                            }\n                        }\n                    }\n                }\n            }\n        }\n\n        Ok(true)\n    }\n\n    fn send_space(&mut self) {\n        debug!(\"sending space\");\n\n        self.attachment.keyboard_input(\n            client::input::Key::Space,\n            client::input::KeyState::Pressed,\n            0,\n        );\n\n        self.attachment.keyboard_input(\n            client::input::Key::Space,\n            client::input::KeyState::Released,\n            0,\n        );\n    }\n\n    fn check_frame(&mut self) -> anyhow::Result<()> {\n        unsafe {\n            self.submit_copy()?;\n        }\n\n        // Check the current block.\n        if self.check_block(self.next_block.wrapping_sub(1)) {\n            // Waiting...\n        } else if self.check_block(self.next_block) {\n            // Success!\n            let elapsed = self.block_started.elapsed();\n            debug!(\"block {} took {}ms\", self.next_block, elapsed.as_millis());\n            self.histogram.add(elapsed.as_millis() as u64);\n\n            // Start the next one.\n            // Sleep 10-100ms.\n            use rand::Rng;\n            let ms = (rand::thread_rng().gen::<u64>() % 90) + 10;\n            std::thread::sleep(time::Duration::from_millis(ms));\n\n            self.next_block += 1;\n            self.block_started = time::Instant::now();\n            self.send_space();\n        } else if self.next_block > 0 {\n            warn!(\"neither current or next block are highlighted\");\n        }\n\n        if self.block_started.elapsed() > time::Duration::from_secs(3) {\n            bail!(\"timed out waiting for block {}\", self.next_block);\n        }\n\n        Ok(())\n    }\n\n    fn check_block(&mut self, idx: usize) -> bool {\n        let data =\n            unsafe { std::slice::from_raw_parts(self.copy_buffer.access as *mut u8, 256 * 256) };\n\n        // Blocks are arranged in an 8x8 grid, and are 32x32 pixels.\n        let idx = idx % 64;\n        let y = (idx / 8) * 32 + 16;\n        let x = (idx % 8) * 32 + 16;\n\n        data[y * 256 + x] > 20\n    }\n\n    unsafe fn submit_copy(&mut self) -> anyhow::Result<()> {\n        let device = &self.vk.device;\n        let texture = self.video_texture.as_ref().unwrap();\n\n        // Reset the command buffer.\n        device.reset_command_buffer(self.copy_cb, vk::CommandBufferResetFlags::empty())?;\n\n        // Begin the command buffer.\n        {\n            let begin_info = vk::CommandBufferBeginInfo::default()\n                .flags(vk::CommandBufferUsageFlags::SIMULTANEOUS_USE);\n\n            device.begin_command_buffer(self.copy_cb, &begin_info)?;\n        }\n\n        // Transfer the image to be readable.\n        cmd_image_barrier(\n            device,\n            self.copy_cb,\n            texture.image,\n            vk::PipelineStageFlags::TOP_OF_PIPE,\n            vk::AccessFlags::empty(),\n            vk::PipelineStageFlags::TRANSFER,\n            vk::AccessFlags::TRANSFER_READ,\n            vk::ImageLayout::UNDEFINED,\n            vk::ImageLayout::TRANSFER_SRC_OPTIMAL,\n        );\n\n        // Copy the texture to the staging buffer.\n        {\n            let region = vk::BufferImageCopy::default()\n                .buffer_row_length(256)\n                .buffer_image_height(256)\n                .image_subresource(vk::ImageSubresourceLayers {\n                    aspect_mask: vk::ImageAspectFlags::PLANE_0,\n                    mip_level: 0,\n                    base_array_layer: 0,\n                    layer_count: 1,\n                })\n                .image_extent(vk::Extent3D {\n                    width: 256,\n                    height: 256,\n                    depth: 1,\n                });\n\n            let regions = [region];\n            device.cmd_copy_image_to_buffer(\n                self.copy_cb,\n                texture.image,\n                vk::ImageLayout::TRANSFER_SRC_OPTIMAL,\n                self.copy_buffer.buffer,\n                &regions,\n            )\n        }\n\n        device.end_command_buffer(self.copy_cb)?;\n\n        device.reset_fences(&[self.copy_fence])?;\n        device.queue_submit(\n            self.vk.present_queue.queue,\n            &[vk::SubmitInfo::default().command_buffers(&[self.copy_cb])],\n            self.copy_fence,\n        )?;\n        device.wait_for_fences(&[self.copy_fence], true, u64::MAX)?;\n        Ok(())\n    }\n}\n\nfn start_test(\n    args: &Cli,\n    client: &client::Client,\n    event_loop: &winit::event_loop::ActiveEventLoop,\n    proxy: winit::event_loop::EventLoopProxy<AppEvent>,\n) -> anyhow::Result<LatencyTest> {\n    let attr = winit::window::Window::default_attributes().with_visible(false);\n\n    let window = Arc::new(event_loop.create_window(attr)?);\n    let vk = unsafe { Arc::new(VkContext::new(window.clone(), cfg!(debug_assertions))?) };\n\n    let codec = match args.codec.as_deref() {\n        Some(\"h264\") => protocol::VideoCodec::H264,\n        Some(\"h265\") | None => protocol::VideoCodec::H265,\n        Some(\"av1\") => protocol::VideoCodec::Av1,\n        Some(v) => bail!(\"invalid codec: {:?}\", v),\n    };\n\n    // Create session, attach\n    let sess = client\n        .launch_session(\n            \"latency-test\".to_string(),\n            client::display_params::DisplayParams {\n                width: APP_DIMENSION,\n                height: APP_DIMENSION,\n                framerate: args.framerate.unwrap_or(60),\n                ui_scale: client::pixel_scale::PixelScale::ONE,\n            },\n            vec![],\n            DEFAULT_TIMEOUT,\n        )\n        .block_on()\n        .context(\"failed to create session\")?;\n\n    let config = client::AttachmentConfig {\n        width: APP_DIMENSION,\n        height: APP_DIMENSION,\n        video_codec: codec.into(),\n        video_profile: None,\n        quality_preset: Some(6),\n        audio_codec: None,\n        sample_rate: None,\n        channels: vec![],\n        video_stream_seq_offset: 0,\n        audio_stream_seq_offset: 0,\n    };\n\n    let delegate = Arc::new(AttachmentProxy::new(proxy.clone()));\n    let attachment = client\n        .attach_session(sess.id, config, delegate, DEFAULT_TIMEOUT)\n        .block_on()\n        .context(\"failed to attach\")?;\n\n    // Just big enough for the Y plane.\n    let copy_buffer = create_host_buffer(\n        &vk.device,\n        vk.device_info.host_visible_mem_type_index,\n        vk::BufferUsageFlags::TRANSFER_DST,\n        (APP_DIMENSION * APP_DIMENSION) as usize,\n    )?;\n\n    let copy_cb = create_command_buffer(&vk.device, vk.present_queue.command_pool)?;\n    let copy_fence = create_fence(&vk.device, false)?;\n\n    Ok(LatencyTest {\n        attachment,\n        session_id: sess.id,\n\n        stream: VideoStream::new(vk.clone(), proxy.clone()),\n        video_texture: None,\n        frames_recvd: 0,\n\n        copy_cb,\n        copy_fence,\n        copy_buffer,\n\n        next_block: 0,\n        block_started: time::Instant::now(),\n        num_tests: args.samples.unwrap_or(256),\n        histogram: histo::Histogram::with_buckets(10),\n\n        first_frame_recvd: None,\n        total_video_bytes: 0,\n\n        vk: vk.clone(),\n    })\n}\n\nfn init_logging() -> anyhow::Result<()> {\n    if let Ok(env_filter) = tracing_subscriber::EnvFilter::try_from_default_env() {\n        tracing_subscriber::fmt().with_env_filter(env_filter).init();\n    } else {\n        let filter = tracing_subscriber::EnvFilter::builder()\n            .with_default_directive(tracing::level_filters::LevelFilter::INFO.into())\n            .from_env()?\n            .add_directive(\"mm_client=info\".parse()?)\n            .add_directive(\"mm_client_common=info\".parse()?);\n        tracing_subscriber::fmt().with_env_filter(filter).init();\n    }\n\n    Ok(())\n}\n"
  },
  {
    "path": "mm-client/src/bin/mmclient.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::{sync::Arc, time};\n\nuse anyhow::{anyhow, bail};\nuse clap::Parser;\nuse ffmpeg_sys_next as ffmpeg_sys;\nuse mm_client::{\n    audio,\n    cursor::{cursor_icon_from_proto, load_cursor_image},\n    delegate::{AttachmentEvent, AttachmentProxy},\n    flash::Flash,\n    gamepad::{spawn_gamepad_monitor, GamepadEvent},\n    keys::winit_key_to_proto,\n    overlay::Overlay,\n    render::Renderer,\n    stats::STATS,\n    video::{self, VideoStreamEvent},\n    vulkan,\n};\nuse mm_client_common as client;\nuse mm_protocol as protocol;\nuse pollster::FutureExt as _;\nuse tracing::{debug, error, info, trace, warn};\nuse tracing_subscriber::Layer as _;\nuse winit::{event_loop::ControlFlow, window};\n\nconst DEFAULT_CONNECT_TIMEOUT: time::Duration = time::Duration::from_secs(1);\nconst DEFAULT_REQUEST_TIMEOUT: time::Duration = time::Duration::from_secs(30);\n\nconst MAX_FRAME_TIME: time::Duration = time::Duration::from_nanos(1_000_000_000 / 24);\nconst RESIZE_COOLDOWN: time::Duration = time::Duration::from_millis(500);\n\n#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)]\nenum Resolution {\n    #[default]\n    Auto,\n    Height(u32),\n    Custom(u32, u32),\n}\n\nimpl From<&str> for Resolution {\n    fn from(s: &str) -> Self {\n        if s == \"auto\" {\n            Resolution::Auto\n        } else if let Some((w, h)) = s.split_once('x') {\n            Resolution::Custom(\n                w.parse().expect(\"invalid resolution width\"),\n                h.parse().expect(\"invalid resolution height\"),\n            )\n        } else {\n            Resolution::Height(s.parse().expect(\"invalid resolution height\"))\n        }\n    }\n}\n\n#[derive(Debug, Parser)]\n#[command(name = \"mmclient\")]\n#[command(about = \"The Magic Mirror reference client\", long_about = None)]\nstruct Cli {\n    /// The server to connect to.\n    #[arg(value_name = \"HOST[:PORT]\")]\n    host: String,\n    /// The id of the app, or the ID of an existing session.\n    app: Option<String>,\n    /// Print a list of launchable applications and exit.\n    #[arg(long)]\n    list_apps: bool,\n    /// Print a list of matching sessions and exit.\n    #[arg(short = 'L', long)]\n    list: bool,\n    /// End a session (which may be specified by name or ID) and exit.\n    #[arg(short = 'K', long)]\n    kill: bool,\n    /// Always resume an existing session, and error if none match.\n    #[arg(short, long)]\n    resume: bool,\n    /// Always launch a new session, even if one exists that matches.\n    #[arg(short, long)]\n    launch: bool,\n    /// On exit, automatically kill the session.\n    #[arg(short = 'x', long)]\n    kill_on_exit: bool,\n    /// The streaming resolution to use. If not specified, this will be tied to\n    /// the client resolution, and automatically change when the client window\n    /// resizes.\n    #[arg(long, required = false, default_value = \"auto\")]\n    resolution: Resolution,\n    /// Request 10-bit video output from the server. This will only work if\n    /// both your display and the application in question support rendering\n    /// HDR color.\n    #[arg(long, required = false)]\n    hdr: bool,\n    /// The UI scale to communicate to the server. If not specified, this will\n    /// be determined from the client-side window scale factor.\n    #[arg(long, required = false)]\n    ui_scale: Option<f64>,\n    /// Video codec to use.\n    #[arg(long, default_value = \"h265\")]\n    codec: Option<String>,\n    /// Framerate to render at on the server side.\n    #[arg(long, default_value = \"30\")]\n    framerate: u32,\n    /// The quality preset to use, from 0-9.\n    #[arg(short, long, default_value = \"6\")]\n    preset: u32,\n    /// Open in fullscreen mode.\n    #[arg(long)]\n    fullscreen: bool,\n    /// Enable the overlay, which shows various stats.\n    #[arg(long)]\n    overlay: bool,\n}\n\nstruct AttachmentWindow {\n    configured_resolution: Resolution,\n    configured_ui_scale: Option<f64>,\n    configured_framerate: u32,\n\n    window: Arc<winit::window::Window>,\n    attachment: client::Attachment,\n    attachment_config: client::AttachmentConfig,\n    delegate: Arc<AttachmentProxy<AppEvent>>,\n\n    session: client::Session,\n\n    video_stream: video::VideoStream<AppEvent>,\n    audio_stream: audio::AudioStream,\n\n    renderer: Renderer,\n    window_width: u32,\n    window_height: u32,\n    window_ui_scale: f64,\n\n    minimized: bool,\n    next_frame: time::Instant,\n    last_frame_received: time::Instant,\n    resize_cooldown: Option<time::Instant>,\n\n    needs_refresh: Option<u64>,\n    refresh_cooldown: Option<time::Instant>,\n\n    cursor_modifiers: winit::keyboard::ModifiersState,\n    cursor_pos: Option<(f64, f64)>,\n\n    flash: Flash,\n    overlay: Option<Overlay>,\n\n    stats_timer: time::Instant,\n\n    _vk: Arc<vulkan::VkContext>,\n}\n\nstruct App {\n    client: client::Client,\n    args: Cli,\n    attachment_window: Option<AttachmentWindow>,\n    proxy: winit::event_loop::EventLoopProxy<AppEvent>,\n\n    end_session_on_exit: bool,\n}\n\npub enum AppEvent {\n    VideoStreamReady(Arc<vulkan::VkImage>, video::VideoStreamParams),\n    VideoFrameAvailable,\n    AttachmentEvent(AttachmentEvent),\n    GamepadEvent(GamepadEvent),\n}\n\nimpl From<VideoStreamEvent> for AppEvent {\n    fn from(event: VideoStreamEvent) -> Self {\n        use VideoStreamEvent::*;\n\n        match event {\n            VideoStreamReady(tex, params) => AppEvent::VideoStreamReady(tex, params),\n            VideoFrameAvailable => AppEvent::VideoFrameAvailable,\n        }\n    }\n}\n\nimpl From<AttachmentEvent> for AppEvent {\n    fn from(value: AttachmentEvent) -> Self {\n        Self::AttachmentEvent(value)\n    }\n}\n\nimpl From<GamepadEvent> for AppEvent {\n    fn from(event: GamepadEvent) -> Self {\n        Self::GamepadEvent(event)\n    }\n}\n\nimpl std::fmt::Debug for AppEvent {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        match self {\n            AppEvent::VideoStreamReady(_, params) => write!(f, \"VideoStreamReady({params:?})\"),\n            AppEvent::VideoFrameAvailable => write!(f, \"VideoFrameAvailable\"),\n            AppEvent::AttachmentEvent(ev) => std::fmt::Debug::fmt(ev, f),\n            AppEvent::GamepadEvent(ev) => std::fmt::Debug::fmt(ev, f),\n        }\n    }\n}\n\nimpl winit::application::ApplicationHandler<AppEvent> for App {\n    fn resumed(&mut self, event_loop: &winit::event_loop::ActiveEventLoop) {\n        if self.attachment_window.is_none() {\n            let window = match init_window(&self.args, &self.client, event_loop, &self.proxy) {\n                Ok(w) => w,\n                Err(e) => {\n                    error!(\"failed to attach to session: {:#}\", e);\n                    event_loop.exit();\n                    return;\n                }\n            };\n\n            self.attachment_window = Some(window);\n        }\n    }\n\n    fn window_event(\n        &mut self,\n        event_loop: &winit::event_loop::ActiveEventLoop,\n        window_id: winit::window::WindowId,\n        event: winit::event::WindowEvent,\n    ) {\n        let Some(win) = &mut self.attachment_window else {\n            return;\n        };\n\n        if win.window.id() != window_id {\n            return;\n        }\n\n        if let Err(e) = win.renderer.handle_event(&event) {\n            error!(\"renderer error: {:#}\", e);\n            event_loop.exit();\n            return;\n        }\n\n        let res = win.handle_window_event(event);\n        win.schedule_next_frame(event_loop, res);\n    }\n\n    fn device_event(\n        &mut self,\n        _event_loop: &winit::event_loop::ActiveEventLoop,\n        _device_id: winit::event::DeviceId,\n        event: winit::event::DeviceEvent,\n    ) {\n        let Some(win) = &mut self.attachment_window else {\n            return;\n        };\n\n        let winit::event::DeviceEvent::MouseMotion { delta: (x, y) } = event else {\n            return;\n        };\n\n        if let Some((x, y)) = win.motion_vector_to_attachment_space(x, y) {\n            win.attachment.relative_pointer_motion(x, y)\n        }\n    }\n\n    fn user_event(&mut self, event_loop: &winit::event_loop::ActiveEventLoop, event: AppEvent) {\n        let Some(win) = &mut self.attachment_window else {\n            return;\n        };\n\n        let res = win.handle_app_event(event_loop, &self.client, event);\n        win.schedule_next_frame(event_loop, res);\n    }\n\n    fn about_to_wait(&mut self, event_loop: &winit::event_loop::ActiveEventLoop) {\n        let Some(win) = &mut self.attachment_window else {\n            return;\n        };\n\n        let res = win.idle(&self.client);\n        win.schedule_next_frame(event_loop, res);\n    }\n\n    fn exiting(&mut self, _event_loop: &winit::event_loop::ActiveEventLoop) {\n        if let Some(AttachmentWindow {\n            attachment,\n            session,\n            ..\n        }) = self.attachment_window.take()\n        {\n            debug!(\"detaching from session\");\n            match attachment.detach().block_on() {\n                Ok(()) | Err(client::ClientError::Detached) => (),\n                Err(err) => error!(?err, \"failed to detach cleanly\"),\n            }\n\n            if self.end_session_on_exit {\n                debug!(\"ending session\");\n\n                match self\n                    .client\n                    .end_session(session.id, DEFAULT_REQUEST_TIMEOUT)\n                    .block_on()\n                {\n                    Ok(()) => (),\n                    Err(client::ClientError::ServerError(err))\n                        if err.err_code() == protocol::error::ErrorCode::ErrorSessionNotFound => {}\n                    Err(err) => error!(?err, \"failed to end session\"),\n                }\n            }\n        }\n    }\n}\n\nimpl AttachmentWindow {\n    fn handle_window_event(&mut self, event: winit::event::WindowEvent) -> anyhow::Result<bool> {\n        trace!(?event, \"handling window event\");\n\n        use winit::event::*;\n        match event {\n            WindowEvent::RedrawRequested => {\n                self.video_stream.prepare_frame()?;\n                self.video_stream.mark_frame_rendered();\n\n                if !self.minimized && self.video_stream.is_ready() {\n                    unsafe {\n                        self.renderer.render(|ui| {\n                            self.flash.build(ui)?;\n                            if let Some(ref mut overlay) = self.overlay {\n                                overlay.build(ui)?;\n                            }\n\n                            Ok(())\n                        })?;\n                    };\n                }\n\n                self.next_frame = time::Instant::now() + MAX_FRAME_TIME;\n            }\n            WindowEvent::CloseRequested => return Ok(false),\n            WindowEvent::Resized(size) => {\n                if size.width == 0 || size.height == 0 {\n                    self.minimized = true;\n                } else {\n                    debug!(\"resize event: {}x{}\", size.width, size.height);\n                    if size.width != self.window_width || size.height != self.window_height {\n                        if let Some(ref mut overlay) = self.overlay {\n                            overlay.reposition();\n                        }\n\n                        // Trigger a stream resize, but debounce first.\n                        self.resize_cooldown = Some(time::Instant::now() + RESIZE_COOLDOWN);\n                    }\n\n                    self.minimized = false;\n                }\n            }\n            WindowEvent::ScaleFactorChanged { scale_factor, .. } => {\n                debug!(\"window scale factor changed to {}\", scale_factor);\n\n                // Winit sends us a Resized event, immediately after this\n                // one, with the new physical resolution.\n            }\n            WindowEvent::ModifiersChanged(modifiers) => {\n                self.cursor_modifiers = modifiers.state();\n            }\n            WindowEvent::KeyboardInput {\n                event:\n                    KeyEvent {\n                        physical_key: winit::keyboard::PhysicalKey::Code(code),\n                        logical_key,\n                        state,\n                        repeat,\n                        ..\n                    },\n                ..\n            } => {\n                if state == ElementState::Pressed\n                    && logical_key == winit::keyboard::Key::Character(\"d\".into())\n                    && self.cursor_modifiers.control_key()\n                {\n                    return Ok(false);\n                } else {\n                    let char = match logical_key {\n                        winit::keyboard::Key::Character(text) => text.chars().next(),\n                        _ => None,\n                    };\n\n                    let state = match state {\n                        _ if repeat => client::input::KeyState::Repeat,\n                        ElementState::Pressed => client::input::KeyState::Pressed,\n                        ElementState::Released => client::input::KeyState::Released,\n                    };\n\n                    let key = winit_key_to_proto(code);\n                    if key == protocol::keyboard_input::Key::Unknown {\n                        debug!(\"unknown key: {:?}\", code);\n                    } else {\n                        self.attachment\n                            .keyboard_input(key, state, char.map_or(0, Into::into));\n                    }\n                }\n            }\n            WindowEvent::CursorMoved { position, .. } => {\n                let new_position = self.renderer.get_texture_aspect().and_then(|aspect| {\n                    // Calculate coordinates in [-1.0, 1.0];\n                    let (clip_x, clip_y) = (\n                        (position.x / self.window_width as f64) * 2.0 - 1.0,\n                        (position.y / self.window_height as f64) * 2.0 - 1.0,\n                    );\n\n                    // Stretch the space to account for letterboxing.\n                    let clip_x = clip_x * aspect.0;\n                    let clip_y = clip_y * aspect.1;\n\n                    // In the letterbox.\n                    if clip_x.abs() > 1.0 || clip_y.abs() > 1.0 {\n                        return None;\n                    }\n\n                    // Convert to texture coordinates.\n                    let x = (clip_x + 1.0) / 2.0;\n                    let y = (clip_y + 1.0) / 2.0;\n\n                    // Convert the position to physical coordinates in the remote display.\n                    let cursor_x = x * self.attachment_config.width as f64;\n                    let cursor_y = y * self.attachment_config.height as f64;\n\n                    Some((cursor_x, cursor_y))\n                });\n\n                if let Some((cursor_x, cursor_y)) = new_position {\n                    self.attachment.pointer_motion(cursor_x, cursor_y);\n\n                    if new_position.is_some() && self.cursor_pos.is_none() {\n                        self.attachment.pointer_entered();\n                    } else if new_position.is_none() && self.cursor_pos.is_some() {\n                        self.attachment.pointer_left();\n                    }\n\n                    self.cursor_pos = new_position;\n                }\n            }\n            WindowEvent::CursorEntered { .. } => {\n                // Handled on the CursorMoved event.\n            }\n            WindowEvent::CursorLeft { .. } => {\n                if self.cursor_pos.take().is_some() {\n                    self.attachment.pointer_left()\n                }\n            }\n            WindowEvent::MouseInput { state, button, .. } => {\n                use protocol::pointer_input::*;\n\n                if self.cursor_pos.is_none() {\n                    return Ok(true);\n                }\n\n                let button = match button {\n                    winit::event::MouseButton::Left => Button::Left,\n                    winit::event::MouseButton::Right => Button::Right,\n                    winit::event::MouseButton::Middle => Button::Middle,\n                    winit::event::MouseButton::Back => Button::Back,\n                    winit::event::MouseButton::Forward => Button::Forward,\n                    winit::event::MouseButton::Other(id) => {\n                        debug!(\"skipping unknown mouse button: {}\", id);\n                        return Ok(true);\n                    }\n                };\n\n                let state = match state {\n                    ElementState::Pressed => ButtonState::Pressed,\n                    ElementState::Released => ButtonState::Released,\n                };\n\n                let (cursor_x, cursor_y) = self.cursor_pos.unwrap();\n                self.attachment\n                    .pointer_input(button, state, cursor_x, cursor_y);\n            }\n            WindowEvent::MouseWheel {\n                delta: MouseScrollDelta::LineDelta(x, y),\n                phase: TouchPhase::Moved,\n                ..\n            } => self.attachment.pointer_scroll(\n                client::input::ScrollType::Discrete,\n                x as f64,\n                y as f64,\n            ),\n            WindowEvent::MouseWheel {\n                delta: MouseScrollDelta::PixelDelta(vector),\n                phase: TouchPhase::Moved,\n                ..\n            } => {\n                if let Some((x, y)) = self.motion_vector_to_attachment_space(vector.x, vector.y) {\n                    self.attachment\n                        .pointer_scroll(client::input::ScrollType::Continuous, x, y);\n                }\n            }\n            _ => (),\n        }\n\n        Ok(true)\n    }\n\n    fn handle_app_event(\n        &mut self,\n        event_loop: &winit::event_loop::ActiveEventLoop,\n        client: &client::Client,\n        event: AppEvent,\n    ) -> anyhow::Result<bool> {\n        trace!(?event, \"handling event\");\n\n        use AttachmentEvent::*;\n        match event {\n            AppEvent::AttachmentEvent(ev) => match ev {\n                VideoStreamStart(stream_seq, params) => {\n                    self.attachment_config.video_stream_seq_offset =\n                        stream_seq.max(self.attachment_config.video_stream_seq_offset);\n                    self.video_stream.reset(\n                        stream_seq,\n                        params.width,\n                        params.height,\n                        params.codec,\n                    )?;\n                    self.needs_refresh = None;\n                }\n                VideoPacket(packet) => {\n                    self.last_frame_received = time::Instant::now();\n                    self.video_stream.recv_packet(packet)?;\n                }\n                DroppedVideoPacket(dropped) => {\n                    // Only request a keyframe once every ten seconds.\n                    if dropped.hierarchical_layer == 0 {\n                        self.needs_refresh = Some(dropped.stream_seq);\n                    }\n                }\n                AudioStreamStart(stream_seq, params) => {\n                    self.attachment_config.audio_stream_seq_offset =\n                        stream_seq.max(self.attachment_config.audio_stream_seq_offset);\n                    self.audio_stream.reset(\n                        stream_seq,\n                        params.sample_rate,\n                        params.channels.len() as u32,\n                    )?;\n                }\n                AudioPacket(packet) => {\n                    self.audio_stream.recv_packet(packet)?;\n                }\n                UpdateCursor {\n                    icon,\n                    image,\n                    hotspot_x,\n                    hotspot_y,\n                } => {\n                    if let Some(image) = image {\n                        if let Ok(cursor) = load_cursor_image(&image, hotspot_x, hotspot_y)\n                            .map(|src| event_loop.create_custom_cursor(src))\n                        {\n                            self.window.set_cursor(cursor);\n                            self.window.set_cursor_visible(true);\n                        } else {\n                            error!(image_len = image.len(), \"custom cursor image update failed\");\n                        }\n                    } else if icon == protocol::update_cursor::CursorIcon::None {\n                        self.window.set_cursor_visible(false);\n                    } else {\n                        self.window.set_cursor(cursor_icon_from_proto(icon));\n                        self.window.set_cursor_visible(true);\n                    }\n                }\n                LockPointer(x, y) => {\n                    debug!(x, y, \"cursor locked\");\n\n                    // On most platforms, we have to lock the cursor before we\n                    // warp it. On mac, it's the other way around.\n                    #[cfg(not(target_vendor = \"apple\"))]\n                    self.window\n                        .set_cursor_grab(winit::window::CursorGrabMode::Locked)?;\n\n                    if let Some(aspect) = self.renderer.get_texture_aspect() {\n                        let width = self.attachment_config.width;\n                        let height = self.attachment_config.height;\n\n                        // Map vector to [-0.5, 0.5].\n                        let x = (x / width as f64) - 0.5;\n                        let y = (y / height as f64) - 0.5;\n\n                        // Squish the space to account for letterboxing.\n                        let x = x / aspect.0;\n                        let y = y / aspect.1;\n\n                        // Map to the screen size.\n                        let x = (x + 0.5) * self.window_width as f64;\n                        let y = (y + 0.5) * self.window_height as f64;\n\n                        let pos: winit::dpi::PhysicalPosition<f64> = (x, y).into();\n                        self.window.set_cursor_position(pos)?;\n                    }\n\n                    #[cfg(target_vendor = \"apple\")]\n                    self.window\n                        .set_cursor_grab(winit::window::CursorGrabMode::Locked)?;\n                }\n                ReleasePointer => {\n                    self.window\n                        .set_cursor_grab(winit::window::CursorGrabMode::None)?;\n                }\n                DisplayParamsChanged {\n                    params,\n                    reattach_required,\n                } => {\n                    if reattach_required {\n                        self.attachment_config.width = params.width;\n                        self.attachment_config.height = params.height;\n\n                        // TODO: this blocks the app, which is not ideal.\n                        // We could spawn a thread for this, or reuse one.\n                        debug!(\"reattaching to session after resize\");\n                        self.attachment = client\n                            .attach_session(\n                                self.session.id,\n                                self.attachment_config.clone(),\n                                self.delegate.clone(),\n                                DEFAULT_REQUEST_TIMEOUT,\n                            )\n                            .block_on()?;\n                    }\n\n                    self.session.display_params = params;\n                }\n                AttachmentEnded => {\n                    info!(\"attachment ended by server\");\n\n                    return Ok(false);\n                }\n            },\n            AppEvent::VideoStreamReady(texture, params) => {\n                self.renderer.bind_video_texture(texture, params)?;\n            }\n            AppEvent::VideoFrameAvailable => {\n                if self.video_stream.prepare_frame()?.is_some() {\n                    self.window.request_redraw();\n                }\n            }\n            AppEvent::GamepadEvent(gev) => match gev {\n                GamepadEvent::Available(pad) => self.attachment.gamepad_available(pad),\n                GamepadEvent::Unavailable(id) => self.attachment.gamepad_unavailable(id),\n                GamepadEvent::Input(id, button, state) => {\n                    self.attachment.gamepad_input(id, button, state)\n                }\n                GamepadEvent::Motion(id, axis, value) => {\n                    self.attachment.gamepad_motion(id, axis, value)\n                }\n            },\n        }\n\n        Ok(true)\n    }\n\n    fn idle(&mut self, client: &client::Client) -> anyhow::Result<bool> {\n        if self.next_frame.elapsed() > time::Duration::ZERO {\n            self.window.request_redraw();\n        }\n\n        if self.stats_timer.elapsed() > time::Duration::from_millis(100) {\n            STATS.set_connection_rtt(client.stats().rtt)\n        }\n\n        let last_frame = self.last_frame_received.elapsed();\n        if last_frame > time::Duration::from_secs(1) {\n            if last_frame > DEFAULT_REQUEST_TIMEOUT {\n                // TODO: this fires when we've tabbed away.\n                bail!(\"timed out waiting for video frames\");\n            } else {\n                self.flash.set_message(\"waiting for server...\");\n            }\n        }\n\n        // Debounced processing of the resize event.\n        if self.resize_cooldown.is_some()\n            && self.resize_cooldown.unwrap().elapsed() > time::Duration::ZERO\n        {\n            let size = self.window.inner_size();\n            let scale_factor = self.window.scale_factor();\n\n            if size.width != self.window_width\n                || size.height != self.window_height\n                || scale_factor != self.window_ui_scale\n            {\n                debug!(\n                    width = size.width,\n                    height = size.height,\n                    scale_factor,\n                    \"window resized\"\n                );\n\n                self.window_width = size.width;\n                self.window_height = size.height;\n                self.window_ui_scale = scale_factor;\n\n                let desired_ui_scale = determine_ui_scale(\n                    self.configured_ui_scale\n                        .unwrap_or(self.window.scale_factor()),\n                );\n\n                let (desired_width, desired_height) = determine_resolution(\n                    self.configured_resolution,\n                    self.window_width,\n                    self.window_height,\n                );\n\n                let desired_params = client::display_params::DisplayParams {\n                    width: desired_width,\n                    height: desired_height,\n                    ui_scale: desired_ui_scale,\n                    framerate: self.configured_framerate,\n                };\n\n                // Update the session to match our desired resolution or\n                // scale. Note that this is skipped if there is no\n                // current attachment (and `current_streaming_res` is\n                // None).\n                if desired_params != self.session.display_params {\n                    debug!(\n                        \"resizing session to {}x{}@{} (scale: {})\",\n                        desired_width, desired_height, self.configured_framerate, desired_ui_scale,\n                    );\n\n                    self.flash.set_message(\"resizing...\");\n\n                    // TODO: this blocks the app.\n                    client\n                        .update_session_display_params(\n                            self.session.id,\n                            desired_params,\n                            DEFAULT_REQUEST_TIMEOUT,\n                        )\n                        .block_on()?;\n                }\n            }\n\n            self.resize_cooldown = None;\n        }\n\n        // Request a video refresh if we need one, but only every ten seconds.\n        if self.needs_refresh.is_some()\n            && self\n                .refresh_cooldown\n                .is_none_or(|t| t.elapsed() > time::Duration::from_secs(10))\n        {\n            let stream_seq = self.needs_refresh.unwrap();\n\n            debug!(stream_seq, \"requesting video refresh\");\n            self.attachment.request_video_refresh(stream_seq);\n            self.refresh_cooldown = Some(time::Instant::now());\n            self.needs_refresh = None;\n        }\n\n        Ok(true)\n    }\n\n    fn schedule_next_frame(\n        &mut self,\n        event_loop: &winit::event_loop::ActiveEventLoop,\n        res: anyhow::Result<bool>,\n    ) {\n        match res {\n            Ok(true) => {\n                event_loop.set_control_flow(ControlFlow::WaitUntil(self.next_frame));\n            }\n            Ok(false) => event_loop.exit(),\n            Err(e) => {\n                error!(\"{:#}\", e);\n                event_loop.exit()\n            }\n        }\n    }\n\n    fn motion_vector_to_attachment_space(&self, x: f64, y: f64) -> Option<(f64, f64)> {\n        let (aspect_x, aspect_y) = self.renderer.get_texture_aspect()?;\n\n        // Map vector to [0, 1]. (It can also be negative.)\n        let (x, y) = (\n            (x / self.window_width as f64),\n            (y / self.window_height as f64),\n        );\n\n        // Stretch the space to account for letterboxing. For\n        // example, if the video texture only takes up one third\n        // of the screen vertically, and we scroll up one third\n        // of the window height, the resulting vector should be [0,\n        // -1.0].\n        let x = x * aspect_x;\n        let y = y * aspect_y;\n\n        Some((\n            x * self.attachment_config.width as f64,\n            y * self.attachment_config.height as f64,\n        ))\n    }\n}\n\npub fn main() -> anyhow::Result<()> {\n    init_logging()?;\n\n    let args = Cli::parse();\n    let cmds: u8 = vec![\n        args.list_apps,\n        args.list,\n        args.kill,\n        args.launch,\n        args.resume,\n    ]\n    .into_iter()\n    .map(|b| b as u8)\n    .sum();\n    if cmds > 1 {\n        bail!(\"only one of --launch, --resume, --list, or --kill may be specified\");\n    } else if !(args.list || args.list_apps) && args.app.is_none() {\n        bail!(\"an app name or session ID must be specified\");\n    } else if args.list_apps && args.app.is_some() {\n        bail!(\"an app name or session ID may not be specified alongside --list-apps\")\n    }\n\n    debug!(\"establishing connection to {:}\", &args.host);\n    let client = client::Client::new(&args.host, \"mmclient\", DEFAULT_CONNECT_TIMEOUT).block_on()?;\n\n    if args.list_apps {\n        return cmd_list_apps(&client);\n    } else if args.list {\n        return cmd_list_sessions(&args, &client);\n    } else if args.kill {\n        return cmd_kill(&args, &client);\n    }\n\n    let event_loop = winit::event_loop::EventLoop::with_user_event().build()?;\n    let proxy = event_loop.create_proxy();\n\n    let end_session_on_exit = args.kill_on_exit;\n    let mut app = App {\n        client,\n        args,\n        attachment_window: None,\n        proxy,\n\n        end_session_on_exit,\n    };\n\n    event_loop.run_app(&mut app)?;\n\n    Ok(())\n}\n\nfn init_window(\n    args: &Cli,\n    client: &client::Client,\n    event_loop: &winit::event_loop::ActiveEventLoop,\n    proxy: &winit::event_loop::EventLoopProxy<AppEvent>,\n) -> anyhow::Result<AttachmentWindow> {\n    let sessions = client.list_sessions(DEFAULT_REQUEST_TIMEOUT).block_on()?;\n    let target = args.app.clone().unwrap();\n    let matched = filter_sessions(sessions, args.app.as_ref().unwrap());\n\n    if !args.launch && matched.len() > 1 {\n        bail!(\n            \"multiple sessions found matching {:?}, specify a session ID to attach or use \\\n             --launch to create a new one.\",\n            target,\n        );\n    } else if args.resume && matched.is_empty() {\n        bail!(\"no session found matching {:?}\", target);\n    }\n\n    let configured_codec = match args.codec.as_deref() {\n        Some(\"h264\") => client::codec::VideoCodec::H264,\n        Some(\"h265\") | None => client::codec::VideoCodec::H265,\n        Some(\"av1\") => client::codec::VideoCodec::Av1,\n        Some(v) => bail!(\"invalid codec: {:?}\", v),\n    };\n\n    let configured_profile = if args.hdr {\n        protocol::VideoProfile::Hdr10\n    } else {\n        protocol::VideoProfile::Hd\n    };\n\n    let session = if args.launch || matched.is_empty() {\n        None\n    } else {\n        Some(matched[0].clone())\n    };\n\n    let window_attr = if args.fullscreen {\n        window::Window::default_attributes()\n            .with_fullscreen(Some(window::Fullscreen::Borderless(None)))\n    } else {\n        window::Window::default_attributes()\n    };\n\n    let window = Arc::new(event_loop.create_window(window_attr)?);\n    let vk = unsafe {\n        Arc::new(vulkan::VkContext::new(\n            window.clone(),\n            cfg!(debug_assertions),\n        )?)\n    };\n\n    let renderer = Renderer::new(vk.clone(), window.clone(), args.hdr)?;\n\n    let window_size = window.inner_size();\n    let window_ui_scale = window.scale_factor();\n\n    let (width, height) =\n        determine_resolution(args.resolution, window_size.width, window_size.height);\n\n    let desired_params = client::display_params::DisplayParams {\n        width,\n        height,\n        framerate: args.framerate,\n        ui_scale: determine_ui_scale(args.ui_scale.unwrap_or(window_ui_scale)),\n    };\n\n    let initial_gamepads = spawn_gamepad_monitor(proxy.clone())?;\n\n    let session_id = if let Some(session) = session {\n        if session.display_params != desired_params {\n            debug!(\"updating session params to {:?}\", desired_params);\n            client\n                .update_session_display_params(session.id, desired_params, DEFAULT_REQUEST_TIMEOUT)\n                .block_on()?;\n        }\n\n        session.id\n    } else {\n        let target = args.app.as_ref().unwrap();\n        let target = target.rsplit(\"/\").next().unwrap();\n\n        info!(\"launching a new session for for app {:?}\", target);\n\n        client\n            .launch_session(\n                target.into(),\n                desired_params.clone(),\n                initial_gamepads.clone(),\n                DEFAULT_REQUEST_TIMEOUT,\n            )\n            .block_on()?\n            .id\n    };\n\n    // Refetch the session params.\n    let session = client\n        .list_sessions(DEFAULT_REQUEST_TIMEOUT)\n        .block_on()?\n        .into_iter()\n        .find(|s| s.id == session_id)\n        .ok_or(anyhow!(\"new session not found in session list\"))?;\n\n    let now = time::Instant::now();\n\n    let mut flash = Flash::new();\n    flash.set_message(\"connecting...\");\n\n    let overlay = if args.overlay {\n        Some(Overlay::new(args.framerate))\n    } else {\n        None\n    };\n\n    let delegate = Arc::new(AttachmentProxy::new(proxy.clone()));\n\n    let audio_stream = audio::AudioStream::new()?;\n    let video_stream = video::VideoStream::new(vk.clone(), proxy.clone());\n    spawn_gamepad_monitor(proxy.clone())?;\n\n    let attachment_config = client::AttachmentConfig {\n        width: session.display_params.width,\n        height: session.display_params.height,\n        video_codec: Some(configured_codec),\n        video_profile: Some(configured_profile),\n        quality_preset: Some(args.preset + 1),\n        audio_codec: None,\n        sample_rate: None,\n        channels: Vec::new(),\n        video_stream_seq_offset: 0,\n        audio_stream_seq_offset: 0,\n    };\n\n    debug!(session_id = session.id, \"attaching to session\");\n    let attachment = client\n        .attach_session(\n            session.id,\n            attachment_config.clone(),\n            delegate.clone(),\n            DEFAULT_REQUEST_TIMEOUT,\n        )\n        .block_on()?;\n\n    Ok(AttachmentWindow {\n        configured_resolution: args.resolution,\n        configured_framerate: args.framerate,\n        configured_ui_scale: args.ui_scale,\n\n        window,\n        attachment,\n        attachment_config,\n        delegate,\n\n        session,\n\n        video_stream,\n        audio_stream,\n\n        renderer,\n        window_width: window_size.width,\n        window_height: window_size.height,\n        window_ui_scale,\n\n        minimized: false,\n        next_frame: now + MAX_FRAME_TIME,\n        last_frame_received: now,\n        resize_cooldown: None,\n\n        needs_refresh: None,\n        refresh_cooldown: None,\n\n        cursor_modifiers: winit::keyboard::ModifiersState::default(),\n        cursor_pos: None,\n\n        flash,\n        overlay,\n\n        stats_timer: now,\n\n        _vk: vk,\n    })\n}\n\nfn init_logging() -> anyhow::Result<()> {\n    if cfg!(feature = \"tracy\") {\n        use tracing_subscriber::layer::SubscriberExt;\n\n        let filter = tracing_subscriber::EnvFilter::builder()\n            .with_default_directive(tracing::level_filters::LevelFilter::INFO.into())\n            .from_env()?\n            .add_directive(\"mmclient=trace\".parse()?)\n            .add_directive(\"mm_client=trace\".parse()?)\n            .add_directive(\"mm_client_common=trace\".parse()?);\n\n        tracing::subscriber::set_global_default(\n            tracing_subscriber::registry()\n                .with(tracing_tracy::TracyLayer::default().with_filter(filter)),\n        )\n        .expect(\"setup tracy layer\");\n    } else if let Ok(env_filter) = tracing_subscriber::EnvFilter::try_from_default_env() {\n        tracing_subscriber::fmt().with_env_filter(env_filter).init();\n    } else {\n        let filter = tracing_subscriber::EnvFilter::builder()\n            .with_default_directive(tracing::level_filters::LevelFilter::INFO.into())\n            .from_env()?\n            .add_directive(\"mmclient=info\".parse()?)\n            .add_directive(\"mm_client=info\".parse()?)\n            .add_directive(\"mm_client_common=info\".parse()?);\n        tracing_subscriber::fmt().with_env_filter(filter).init();\n    }\n\n    // Squash ffmpeg logs.\n    unsafe {\n        ffmpeg_sys::av_log_set_level(ffmpeg_sys::AV_LOG_QUIET);\n        // TODO: the callback has to be variadic, which means using nightly\n        // rust.\n        // ffmpeg_sys::av_log_set_callback(Some(ffmpeg_log_callback))\n    }\n\n    Ok(())\n}\n\nfn determine_ui_scale(scale_factor: f64) -> client::pixel_scale::PixelScale {\n    let scale = match scale_factor {\n        x if x < 1.0 => client::pixel_scale::PixelScale::ONE,\n        _ => {\n            // Multiplying by 6/6 captures most possible fractional scales.\n            let numerator = (scale_factor * 6.0).round() as u32;\n            let denominator = 6;\n            if numerator % denominator == 0 {\n                client::pixel_scale::PixelScale::new(numerator / denominator, 1)\n            } else {\n                client::pixel_scale::PixelScale::new(numerator, denominator)\n            }\n        }\n    };\n\n    if scale.is_fractional() {\n        let rounded = scale.round_up();\n        warn!(\n            requested = %scale,\n            using = %rounded,\n            \"fractional scale not supported, rounding up\"\n        );\n\n        return rounded;\n    }\n\n    scale\n}\n\nfn determine_resolution(resolution: Resolution, width: u32, height: u32) -> (u32, u32) {\n    match resolution {\n        Resolution::Auto => (width.next_multiple_of(2), height.next_multiple_of(2)),\n        Resolution::Height(h) => {\n            let h = std::cmp::min(h, height).next_multiple_of(2);\n            let w = (h * width / height).next_multiple_of(2);\n            (w, h)\n        }\n        Resolution::Custom(w, h) => (w, h),\n    }\n}\n\nfn filter_sessions(sessions: Vec<client::Session>, app: &str) -> Vec<client::Session> {\n    if let Ok(id) = app.parse::<u64>() {\n        return match sessions.into_iter().find(|s| s.id == id) {\n            Some(s) => vec![s],\n            None => vec![],\n        };\n    }\n\n    sessions\n        .into_iter()\n        .filter(|s| s.application_id == app)\n        .collect()\n}\n\nfn cmd_list_apps(client: &client::Client) -> anyhow::Result<()> {\n    let apps = client\n        .list_applications(DEFAULT_REQUEST_TIMEOUT)\n        .block_on()?;\n    if apps.is_empty() {\n        println!(\"No launchable applications found.\");\n        return Ok(());\n    }\n\n    let mut apps = apps\n        .into_iter()\n        .map(|app| {\n            let mut name = String::new();\n            for dir in &app.folder {\n                name.push_str(dir);\n                name.push('/');\n            }\n            name.push_str(&app.id);\n\n            (name, app.description)\n        })\n        .collect::<Vec<_>>();\n    apps.sort();\n\n    let mut tw = tabwriter::TabWriter::new(std::io::stdout()).padding(4);\n\n    use std::io::Write as _;\n    writeln!(&mut tw, \"Name\\tDescription\")?;\n    writeln!(&mut tw, \"----\\t-----------\")?;\n\n    for (name, desc) in apps {\n        if desc.len() <= 80 {\n            writeln!(&mut tw, \"{}\\t{}\", name, desc)?;\n        } else {\n            writeln!(&mut tw, \"{}\\t{}...\", name, &desc[..77])?;\n        }\n    }\n\n    tw.flush()?;\n    Ok(())\n}\n\nfn cmd_list_sessions(args: &Cli, client: &client::Client) -> anyhow::Result<()> {\n    let sessions = client.list_sessions(DEFAULT_REQUEST_TIMEOUT).block_on()?;\n    let sessions = if let Some(target) = args.app.as_ref() {\n        filter_sessions(sessions, target)\n    } else {\n        sessions\n    };\n\n    if sessions.is_empty() {\n        println!(\"No (matching) sessions found.\");\n        return Ok(());\n    }\n\n    let now = time::SystemTime::now();\n    let mut tw = tabwriter::TabWriter::new(std::io::stdout()).padding(4);\n\n    use std::io::Write as _;\n    writeln!(&mut tw, \"Session ID\\tApplication Name\\tRuntime\")?;\n    writeln!(&mut tw, \"----------\\t----------------\\t-------\")?;\n\n    for session in sessions {\n        let runtime = {\n            // Round to seconds.\n            let secs = now.duration_since(session.start)?.as_secs();\n            humantime::format_duration(time::Duration::from_secs(secs)).to_string()\n        };\n\n        writeln!(\n            &mut tw,\n            \"{}\\t{}\\t{}\",\n            session.id, session.application_id, runtime,\n        )?;\n    }\n\n    tw.flush()?;\n    Ok(())\n}\n\nfn cmd_kill(args: &Cli, client: &client::Client) -> anyhow::Result<()> {\n    let target = args.app.as_ref().unwrap();\n    let sessions = filter_sessions(\n        client.list_sessions(DEFAULT_REQUEST_TIMEOUT).block_on()?,\n        target,\n    );\n\n    if sessions.is_empty() {\n        println!(\"No (matching) sessions found.\");\n        return Ok(());\n    } else if sessions.len() > 1 {\n        bail!(\"Multiple sessions matched!\");\n    }\n\n    client\n        .end_session(sessions[0].id, DEFAULT_REQUEST_TIMEOUT)\n        .block_on()?;\n    Ok(())\n}\n"
  },
  {
    "path": "mm-client/src/cursor.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse mm_protocol as protocol;\nuse winit::window::{CursorIcon, CustomCursor, CustomCursorSource};\n\npub fn load_cursor_image(image: &[u8], hs_x: u32, hs_y: u32) -> anyhow::Result<CustomCursorSource> {\n    let cursor = image::load_from_memory_with_format(image, image::ImageFormat::Png)?;\n\n    let w = cursor.width().try_into()?;\n    let h = cursor.height().try_into()?;\n    let hs_x = hs_x.try_into()?;\n    let hs_y = hs_y.try_into()?;\n\n    Ok(CustomCursor::from_rgba(\n        cursor.to_rgba8().into_raw(),\n        w,\n        h,\n        hs_x,\n        hs_y,\n    )?)\n}\n\npub fn cursor_icon_from_proto(icon: protocol::update_cursor::CursorIcon) -> CursorIcon {\n    match icon {\n        protocol::update_cursor::CursorIcon::ContextMenu => CursorIcon::ContextMenu,\n        protocol::update_cursor::CursorIcon::Help => CursorIcon::Help,\n        protocol::update_cursor::CursorIcon::Pointer => CursorIcon::Pointer,\n        protocol::update_cursor::CursorIcon::Progress => CursorIcon::Progress,\n        protocol::update_cursor::CursorIcon::Wait => CursorIcon::Wait,\n        protocol::update_cursor::CursorIcon::Cell => CursorIcon::Cell,\n        protocol::update_cursor::CursorIcon::Crosshair => CursorIcon::Crosshair,\n        protocol::update_cursor::CursorIcon::Text => CursorIcon::Text,\n        protocol::update_cursor::CursorIcon::VerticalText => CursorIcon::VerticalText,\n        protocol::update_cursor::CursorIcon::Alias => CursorIcon::Alias,\n        protocol::update_cursor::CursorIcon::Copy => CursorIcon::Copy,\n        protocol::update_cursor::CursorIcon::Move => CursorIcon::Move,\n        protocol::update_cursor::CursorIcon::NoDrop => CursorIcon::NoDrop,\n        protocol::update_cursor::CursorIcon::NotAllowed => CursorIcon::NotAllowed,\n        protocol::update_cursor::CursorIcon::Grab => CursorIcon::Grab,\n        protocol::update_cursor::CursorIcon::Grabbing => CursorIcon::Grabbing,\n        protocol::update_cursor::CursorIcon::EResize => CursorIcon::EResize,\n        protocol::update_cursor::CursorIcon::NResize => CursorIcon::NResize,\n        protocol::update_cursor::CursorIcon::NeResize => CursorIcon::NeResize,\n        protocol::update_cursor::CursorIcon::NwResize => CursorIcon::NwResize,\n        protocol::update_cursor::CursorIcon::SResize => CursorIcon::SResize,\n        protocol::update_cursor::CursorIcon::SeResize => CursorIcon::SeResize,\n        protocol::update_cursor::CursorIcon::SwResize => CursorIcon::SwResize,\n        protocol::update_cursor::CursorIcon::WResize => CursorIcon::WResize,\n        protocol::update_cursor::CursorIcon::EwResize => CursorIcon::EwResize,\n        protocol::update_cursor::CursorIcon::NsResize => CursorIcon::NsResize,\n        protocol::update_cursor::CursorIcon::NeswResize => CursorIcon::NeswResize,\n        protocol::update_cursor::CursorIcon::NwseResize => CursorIcon::NwseResize,\n        protocol::update_cursor::CursorIcon::ColResize => CursorIcon::ColResize,\n        protocol::update_cursor::CursorIcon::RowResize => CursorIcon::RowResize,\n        protocol::update_cursor::CursorIcon::AllScroll => CursorIcon::AllScroll,\n        protocol::update_cursor::CursorIcon::ZoomIn => CursorIcon::ZoomIn,\n        protocol::update_cursor::CursorIcon::ZoomOut => CursorIcon::ZoomOut,\n        _ => CursorIcon::Default,\n    }\n}\n"
  },
  {
    "path": "mm-client/src/delegate.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::sync::Arc;\n\nuse mm_client_common as client;\nuse tracing::error;\n\n// An implementation of client-common's AttachmentDelegate that converts\n// callbacks into winit events.\n#[derive(Debug)]\npub struct AttachmentProxy<T: From<AttachmentEvent> + std::fmt::Debug + Send + 'static>(\n    winit::event_loop::EventLoopProxy<T>,\n);\n\nimpl<T: From<AttachmentEvent> + std::fmt::Debug + Send + 'static> AttachmentProxy<T> {\n    pub fn new(proxy: winit::event_loop::EventLoopProxy<T>) -> Self {\n        Self(proxy)\n    }\n\n    fn proxy(&self, ev: AttachmentEvent) {\n        let _ = self.0.send_event(ev.into());\n    }\n}\n\npub enum AttachmentEvent {\n    VideoStreamStart(u64, client::VideoStreamParams),\n    VideoPacket(Arc<client::Packet>),\n    DroppedVideoPacket(client::DroppedPacket),\n    AudioStreamStart(u64, client::AudioStreamParams),\n    AudioPacket(Arc<client::Packet>),\n    UpdateCursor {\n        icon: client::input::CursorIcon,\n        image: Option<Vec<u8>>,\n        hotspot_x: u32,\n        hotspot_y: u32,\n    },\n    LockPointer(f64, f64),\n    ReleasePointer,\n    DisplayParamsChanged {\n        params: client::display_params::DisplayParams,\n        reattach_required: bool,\n    },\n    AttachmentEnded,\n}\n\nimpl std::fmt::Debug for AttachmentEvent {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        match self {\n            AttachmentEvent::VideoStreamStart(stream_seq, _) => {\n                write!(f, \"VideoStreamStart({})\", stream_seq)\n            }\n            AttachmentEvent::VideoPacket(packet) => {\n                write!(f, \"VideoPacket({}, {})\", packet.stream_seq(), packet.seq())\n            }\n            AttachmentEvent::DroppedVideoPacket(dropped) => {\n                write!(\n                    f,\n                    \"DroppedVideoPacket({}, {}, layer={})\",\n                    dropped.stream_seq, dropped.seq, dropped.hierarchical_layer\n                )\n            }\n            AttachmentEvent::AudioStreamStart(stream_seq, _) => {\n                write!(f, \"AudioStreamStart({})\", stream_seq)\n            }\n            AttachmentEvent::AudioPacket(packet) => {\n                write!(f, \"AudioPacket({}, {})\", packet.stream_seq(), packet.seq())\n            }\n            AttachmentEvent::UpdateCursor { icon, image, .. } => {\n                let len = image.as_ref().map(|img| img.len()).unwrap_or_default();\n                write!(f, \"UpdateCursor({icon:?} image_len={len})\",)\n            }\n            AttachmentEvent::LockPointer(x, y) => {\n                write!(f, \"LockPointer({}, {})\", x, y)\n            }\n            AttachmentEvent::ReleasePointer => {\n                write!(f, \"ReleasePointer()\")\n            }\n            AttachmentEvent::DisplayParamsChanged {\n                reattach_required, ..\n            } => {\n                write!(f, \"DisplayParamsChanged(reattach={})\", reattach_required)\n            }\n            AttachmentEvent::AttachmentEnded => {\n                write!(f, \"AttachmentEnded\")\n            }\n        }\n    }\n}\n\nimpl<T: From<AttachmentEvent> + std::fmt::Debug + Send + 'static> client::AttachmentDelegate\n    for AttachmentProxy<T>\n{\n    fn video_stream_start(&self, stream_seq: u64, params: client::VideoStreamParams) {\n        self.proxy(AttachmentEvent::VideoStreamStart(stream_seq, params))\n    }\n\n    fn video_packet(&self, packet: Arc<client::Packet>) {\n        self.proxy(AttachmentEvent::VideoPacket(packet))\n    }\n\n    fn dropped_video_packet(&self, dropped: client::DroppedPacket) {\n        self.proxy(AttachmentEvent::DroppedVideoPacket(dropped))\n    }\n\n    fn audio_stream_start(&self, stream_seq: u64, params: client::AudioStreamParams) {\n        self.proxy(AttachmentEvent::AudioStreamStart(stream_seq, params))\n    }\n\n    fn audio_packet(&self, packet: Arc<client::Packet>) {\n        self.proxy(AttachmentEvent::AudioPacket(packet))\n    }\n\n    fn update_cursor(\n        &self,\n        icon: client::input::CursorIcon,\n        image: Option<Vec<u8>>,\n        hotspot_x: u32,\n        hotspot_y: u32,\n    ) {\n        self.proxy(AttachmentEvent::UpdateCursor {\n            icon,\n            image,\n            hotspot_x,\n            hotspot_y,\n        })\n    }\n\n    fn lock_pointer(&self, x: f64, y: f64) {\n        self.proxy(AttachmentEvent::LockPointer(x, y))\n    }\n\n    fn release_pointer(&self) {\n        self.proxy(AttachmentEvent::ReleasePointer)\n    }\n\n    fn display_params_changed(\n        &self,\n        params: client::display_params::DisplayParams,\n        reattach_required: bool,\n    ) {\n        self.proxy(AttachmentEvent::DisplayParamsChanged {\n            params,\n            reattach_required,\n        })\n    }\n\n    fn error(&self, err: client::ClientError) {\n        error!(\"error: {err:?}\");\n        self.proxy(AttachmentEvent::AttachmentEnded)\n    }\n\n    fn attachment_ended(&self) {\n        self.proxy(AttachmentEvent::AttachmentEnded)\n    }\n}\n"
  },
  {
    "path": "mm-client/src/flash.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::time;\n\nconst FLASH_DURATION: time::Duration = time::Duration::from_millis(1350);\nconst FADE_OUT_AFTER: time::Duration = time::Duration::from_millis(1000);\n\npub struct Flash {\n    message: Option<(String, time::Instant)>,\n}\n\nimpl Flash {\n    pub fn new() -> Self {\n        Self { message: None }\n    }\n\n    pub fn set_message(&mut self, s: &str) {\n        self.message = Some((s.to_owned(), time::Instant::now()));\n    }\n\n    pub fn build(&mut self, ui: &imgui::Ui) -> anyhow::Result<()> {\n        if self.message.is_none() {\n            return Ok(());\n        }\n\n        let start = self.message.as_ref().unwrap().1;\n        if start.elapsed() > FLASH_DURATION {\n            self.message = None;\n            return Ok(());\n        }\n\n        let alpha = if start.elapsed() > FADE_OUT_AFTER {\n            let remaining = FLASH_DURATION - start.elapsed();\n            remaining.as_secs_f32() / (FLASH_DURATION - FADE_OUT_AFTER).as_secs_f32()\n        } else {\n            1.0\n        };\n\n        // Exponentially ease the alpha.\n        let alpha = alpha * alpha;\n\n        let _style_alpha = ui.push_style_var(imgui::StyleVar::Alpha(alpha));\n        let _style_border = ui.push_style_var(imgui::StyleVar::WindowBorderSize(0.0));\n\n        let [_width, height] = ui.io().display_size;\n\n        if let Some(_window) = ui\n            .window(\"flash\")\n            .position([0.0, height], imgui::Condition::Always)\n            .position_pivot([0.0, 1.0])\n            .no_decoration()\n            .no_nav()\n            .movable(false)\n            .always_auto_resize(true)\n            .bg_alpha(0.5 * alpha)\n            .begin()\n        {\n            ui.set_window_font_scale(2.0);\n            ui.text(&self.message.as_ref().unwrap().0);\n        }\n\n        Ok(())\n    }\n}\n\nimpl Default for Flash {\n    fn default() -> Self {\n        Self::new()\n    }\n}\n"
  },
  {
    "path": "mm-client/src/font.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse font_kit::{\n    family_name::FamilyName,\n    font::Font,\n    properties::{Properties, Weight},\n    source::SystemSource,\n};\nuse tracing::debug;\n\npub fn load_ui_font() -> anyhow::Result<Font> {\n    let font = SystemSource::new()\n        .select_best_match(\n            &[FamilyName::Monospace, FamilyName::SansSerif],\n            Properties::new().weight(Weight::THIN),\n        )?\n        .load()?;\n\n    debug!(\"font: {:?}\", font);\n\n    Ok(font)\n}\n\n// #[cfg(target_os = \"macos\")]\n// pub fn load_ui_font() -> anyhow::Result<Font> {\n//     let ctf = core_text::font::new_ui_font_for_language();\n\n//     let font = unsafe { Font::from_native_font(ctf) };\n//     Ok(font)\n// }\n"
  },
  {
    "path": "mm-client/src/gamepad.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::{collections::HashMap, time};\n\nuse anyhow::{anyhow, bail};\nuse gilrs::{Event, EventType};\nuse mm_client_common::input::{\n    Gamepad, GamepadAxis, GamepadButton, GamepadButtonState, GamepadLayout,\n};\nuse tracing::{debug, error, trace};\n\n#[derive(Debug, Clone)]\npub enum GamepadEvent {\n    Available(Gamepad),\n    Unavailable(u64),\n    Input(u64, GamepadButton, GamepadButtonState),\n    Motion(u64, GamepadAxis, f64),\n}\n\n#[derive(Debug, Default, Clone, Copy)]\nstruct RemoteGamepad {\n    id: u64,\n    dpad: DpadState,\n}\n\n// Some gamepads treat the dpad as an axis, but we treat it as a\n// bunch of buttons. Therefore, it requires a bit of special handling.\n#[derive(Debug, Default, Clone, Copy)]\nstruct DpadState {\n    up: bool,\n    down: bool,\n    left: bool,\n    right: bool,\n}\n\nimpl RemoteGamepad {\n    fn update_dpad<T>(\n        &mut self,\n        axis: gilrs::Axis,\n        value: f32,\n        proxy: &winit::event_loop::EventLoopProxy<T>,\n    ) -> Result<(), winit::event_loop::EventLoopClosed<T>>\n    where\n        T: From<GamepadEvent> + Send,\n    {\n        let set_pressed = |state: &mut bool, button| {\n            if !*state {\n                proxy.send_event(\n                    GamepadEvent::Input(self.id, button, GamepadButtonState::Pressed).into(),\n                )?;\n            }\n\n            *state = true;\n            Ok(())\n        };\n\n        let set_released = |state: &mut bool, button| {\n            if *state {\n                proxy.send_event(\n                    GamepadEvent::Input(self.id, button, GamepadButtonState::Released).into(),\n                )?;\n            }\n\n            *state = false;\n            Ok(())\n        };\n\n        match axis {\n            gilrs::Axis::DPadX if value == 0.0 => {\n                set_released(&mut self.dpad.left, GamepadButton::DpadLeft)?;\n                set_released(&mut self.dpad.right, GamepadButton::DpadRight)?;\n            }\n            gilrs::Axis::DPadX if value < 0.0 => {\n                set_pressed(&mut self.dpad.left, GamepadButton::DpadLeft)?;\n                set_released(&mut self.dpad.right, GamepadButton::DpadRight)?;\n            }\n            gilrs::Axis::DPadX if value > 0.0 => {\n                set_released(&mut self.dpad.left, GamepadButton::DpadLeft)?;\n                set_pressed(&mut self.dpad.right, GamepadButton::DpadRight)?;\n            }\n            gilrs::Axis::DPadY if value == 0.0 => {\n                set_released(&mut self.dpad.up, GamepadButton::DpadUp)?;\n                set_released(&mut self.dpad.down, GamepadButton::DpadDown)?;\n            }\n            gilrs::Axis::DPadY if value < 0.0 => {\n                set_pressed(&mut self.dpad.up, GamepadButton::DpadUp)?;\n                set_released(&mut self.dpad.down, GamepadButton::DpadDown)?;\n            }\n            gilrs::Axis::DPadY if value > 0.0 => {\n                set_released(&mut self.dpad.up, GamepadButton::DpadUp)?;\n                set_pressed(&mut self.dpad.down, GamepadButton::DpadDown)?;\n            }\n            _ => unreachable!(),\n        };\n\n        Ok(())\n    }\n}\n\n/// Spawns a thread to watch for gamepad events. Returns the initial list of\n/// available gamepads.\npub fn spawn_gamepad_monitor<T>(\n    proxy: winit::event_loop::EventLoopProxy<T>,\n) -> anyhow::Result<Vec<Gamepad>>\nwhere\n    T: From<GamepadEvent> + Send,\n{\n    let mut gilrs =\n        gilrs::Gilrs::new().map_err(|e| anyhow!(\"failed to create gilrs context: {e:?}\"))?;\n\n    let (initial_tx, initial_rx) = oneshot::channel();\n\n    std::thread::spawn(move || {\n        let mut remote_gamepads = HashMap::new();\n        let mut initial = Vec::new();\n\n        for (id, pad) in gilrs.gamepads() {\n            let protocol_id = gamepad_id(pad.uuid());\n            let layout = layout(pad);\n\n            remote_gamepads.insert(\n                id,\n                RemoteGamepad {\n                    id: protocol_id,\n                    ..Default::default()\n                },\n            );\n\n            initial.push(Gamepad {\n                id: protocol_id,\n                layout,\n            });\n        }\n\n        if initial_tx.send(initial).is_err() {\n            return;\n        }\n\n        loop {\n            let Some(Event { id, event: ev, .. }) = gilrs.next_event_blocking(None) else {\n                continue;\n            };\n\n            trace!(?id, ?ev, \"gamepad event\");\n\n            if let EventType::Disconnected = ev {\n                if let Some(pad) = remote_gamepads.remove(&id) {\n                    if proxy\n                        .send_event(GamepadEvent::Unavailable(pad.id).into())\n                        .is_err()\n                    {\n                        break;\n                    }\n                }\n\n                continue;\n            };\n\n            if let EventType::Connected = ev {\n                let Some(pad) = gilrs.connected_gamepad(id) else {\n                    error!(?ev, \"no gamepad matching event\");\n                    continue;\n                };\n\n                let protocol_id = gamepad_id(pad.uuid());\n                remote_gamepads.insert(\n                    id,\n                    RemoteGamepad {\n                        id: protocol_id,\n                        ..Default::default()\n                    },\n                );\n\n                if proxy\n                    .send_event(\n                        GamepadEvent::Available(Gamepad {\n                            id: protocol_id,\n                            layout: layout(pad),\n                        })\n                        .into(),\n                    )\n                    .is_err()\n                {\n                    break;\n                }\n\n                continue;\n            }\n\n            let pad = remote_gamepads.get_mut(&id).unwrap();\n            if handle_gilrs_event(&proxy, pad, ev).is_err() {\n                break;\n            };\n        }\n    });\n\n    match initial_rx.recv_timeout(time::Duration::from_secs(1)) {\n        Ok(initial) => Ok(initial),\n        Err(_) => bail!(\"gamepad monitor thread panicked\"),\n    }\n}\n\nfn handle_gilrs_event<T>(\n    proxy: &winit::event_loop::EventLoopProxy<T>,\n    pad: &mut RemoteGamepad,\n    ev: gilrs::EventType,\n) -> Result<(), winit::event_loop::EventLoopClosed<T>>\nwhere\n    T: From<GamepadEvent> + Send,\n{\n    let gev = match ev {\n        EventType::ButtonPressed(button, _) => {\n            input_event(pad.id, button, GamepadButtonState::Pressed)\n        }\n        EventType::ButtonReleased(button, _) => {\n            input_event(pad.id, button, GamepadButtonState::Released)\n        }\n        EventType::AxisChanged(axis, mut value, _) => {\n            // Some gamepads treat the dpad as an axis. The protocol\n            // treats it as a bunch of buttons.\n            if matches!(axis, gilrs::Axis::DPadX | gilrs::Axis::DPadY) {\n                pad.update_dpad(axis, value, proxy)?;\n                return Ok(());\n            }\n\n            let Some(axis) = girls_axis_to_proto(axis) else {\n                debug!(?ev, \"skipping unknown axis event\");\n                return Ok(());\n            };\n\n            // Gilrs treats 1.0 as up.\n            if matches!(axis, GamepadAxis::LeftY | GamepadAxis::RightY) {\n                value *= -1.0;\n            }\n\n            Some(GamepadEvent::Motion(pad.id, axis, value as _))\n        }\n        EventType::ButtonChanged(button, value, _) => {\n            // Not sure why gilrs doesn't consider this an axis.\n            match button {\n                gilrs::Button::LeftTrigger2 => Some(GamepadEvent::Motion(\n                    pad.id,\n                    GamepadAxis::LeftTrigger,\n                    value.max(0.0) as _,\n                )),\n                gilrs::Button::RightTrigger2 => Some(GamepadEvent::Motion(\n                    pad.id,\n                    GamepadAxis::RightTrigger,\n                    value.max(0.0) as _,\n                )),\n                _ => None,\n            }\n        }\n        EventType::Dropped => None,\n        // TODO: do we need these?\n        EventType::ButtonRepeated(_, _) => None,\n        // Handled above.\n        EventType::Connected | EventType::Disconnected => unreachable!(),\n    };\n\n    if let Some(ev) = gev {\n        proxy.send_event(ev.into())?;\n    } else {\n        debug!(?ev, \"ignoring gamepad event\")\n    }\n\n    Ok(())\n}\n\nfn input_event(\n    protocol_id: u64,\n    button: gilrs::Button,\n    state: GamepadButtonState,\n) -> Option<GamepadEvent> {\n    gilrs_button_to_proto(button).map(|button| GamepadEvent::Input(protocol_id, button, state))\n}\n\nfn gamepad_id(uuid: [u8; 16]) -> u64 {\n    // Truncating a UUID is squicky, but serves our purposes fine.\n    let (_, last_64) = uuid.split_at(8);\n    let last_64: [u8; 8] = last_64.try_into().unwrap();\n    u64::from_ne_bytes(last_64)\n}\n\nfn layout(pad: gilrs::Gamepad) -> GamepadLayout {\n    match pad.vendor_id() {\n        Some(0x54c) => GamepadLayout::SonyDualshock,\n        _ => GamepadLayout::GenericDualStick,\n    }\n}\n\nfn girls_axis_to_proto(axis: gilrs::Axis) -> Option<GamepadAxis> {\n    let axis = match axis {\n        gilrs::Axis::LeftStickX => GamepadAxis::LeftX,\n        gilrs::Axis::LeftStickY => GamepadAxis::LeftY,\n        gilrs::Axis::RightStickX => GamepadAxis::RightX,\n        gilrs::Axis::RightStickY => GamepadAxis::RightY,\n        gilrs::Axis::LeftZ => GamepadAxis::RightTrigger,\n        gilrs::Axis::RightZ => GamepadAxis::RightTrigger,\n        _ => return None,\n    };\n\n    Some(axis)\n}\n\nfn gilrs_button_to_proto(button: gilrs::Button) -> Option<GamepadButton> {\n    let button = match button {\n        gilrs::Button::South => GamepadButton::South,\n        gilrs::Button::East => GamepadButton::East,\n        gilrs::Button::North => GamepadButton::North,\n        gilrs::Button::West => GamepadButton::West,\n        gilrs::Button::C => GamepadButton::C,\n        gilrs::Button::Z => GamepadButton::Z,\n        gilrs::Button::LeftTrigger => GamepadButton::ShoulderLeft,\n        gilrs::Button::LeftTrigger2 => GamepadButton::TriggerLeft,\n        gilrs::Button::RightTrigger => GamepadButton::ShoulderRight,\n        gilrs::Button::RightTrigger2 => GamepadButton::TriggerRight,\n        gilrs::Button::Select => GamepadButton::Select,\n        gilrs::Button::Start => GamepadButton::Start,\n        gilrs::Button::Mode => GamepadButton::Logo,\n        gilrs::Button::LeftThumb => GamepadButton::JoystickLeft,\n        gilrs::Button::RightThumb => GamepadButton::JoystickRight,\n        gilrs::Button::DPadUp => GamepadButton::DpadUp,\n        gilrs::Button::DPadDown => GamepadButton::DpadDown,\n        gilrs::Button::DPadLeft => GamepadButton::DpadLeft,\n        gilrs::Button::DPadRight => GamepadButton::DpadRight,\n        _ => return None,\n    };\n\n    Some(button)\n}\n"
  },
  {
    "path": "mm-client/src/keys.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse mm_protocol::keyboard_input::Key;\nuse winit::keyboard::KeyCode;\n\npub fn winit_key_to_proto(key: KeyCode) -> Key {\n    match key {\n        KeyCode::Backquote => Key::Backquote,\n        KeyCode::Backslash => Key::Backslash,\n        KeyCode::BracketLeft => Key::BracketLeft,\n        KeyCode::BracketRight => Key::BracketRight,\n        KeyCode::Comma => Key::Comma,\n        KeyCode::Digit0 => Key::Digit0,\n        KeyCode::Digit1 => Key::Digit1,\n        KeyCode::Digit2 => Key::Digit2,\n        KeyCode::Digit3 => Key::Digit3,\n        KeyCode::Digit4 => Key::Digit4,\n        KeyCode::Digit5 => Key::Digit5,\n        KeyCode::Digit6 => Key::Digit6,\n        KeyCode::Digit7 => Key::Digit7,\n        KeyCode::Digit8 => Key::Digit8,\n        KeyCode::Digit9 => Key::Digit9,\n        KeyCode::Equal => Key::Equal,\n        KeyCode::IntlBackslash => Key::IntlBackslash,\n        KeyCode::IntlRo => Key::IntlRo,\n        KeyCode::IntlYen => Key::IntlYen,\n        KeyCode::KeyA => Key::A,\n        KeyCode::KeyB => Key::B,\n        KeyCode::KeyC => Key::C,\n        KeyCode::KeyD => Key::D,\n        KeyCode::KeyE => Key::E,\n        KeyCode::KeyF => Key::F,\n        KeyCode::KeyG => Key::G,\n        KeyCode::KeyH => Key::H,\n        KeyCode::KeyI => Key::I,\n        KeyCode::KeyJ => Key::J,\n        KeyCode::KeyK => Key::K,\n        KeyCode::KeyL => Key::L,\n        KeyCode::KeyM => Key::M,\n        KeyCode::KeyN => Key::N,\n        KeyCode::KeyO => Key::O,\n        KeyCode::KeyP => Key::P,\n        KeyCode::KeyQ => Key::Q,\n        KeyCode::KeyR => Key::R,\n        KeyCode::KeyS => Key::S,\n        KeyCode::KeyT => Key::T,\n        KeyCode::KeyU => Key::U,\n        KeyCode::KeyV => Key::V,\n        KeyCode::KeyW => Key::W,\n        KeyCode::KeyX => Key::X,\n        KeyCode::KeyY => Key::Y,\n        KeyCode::KeyZ => Key::Z,\n        KeyCode::Minus => Key::Minus,\n        KeyCode::Period => Key::Period,\n        KeyCode::Quote => Key::Quote,\n        KeyCode::Semicolon => Key::Semicolon,\n        KeyCode::Slash => Key::Slash,\n        KeyCode::AltLeft => Key::AltLeft,\n        KeyCode::AltRight => Key::AltRight,\n        KeyCode::Backspace => Key::Backspace,\n        KeyCode::CapsLock => Key::CapsLock,\n        KeyCode::ContextMenu => Key::ContextMenu,\n        KeyCode::ControlLeft => Key::ControlLeft,\n        KeyCode::ControlRight => Key::ControlRight,\n        KeyCode::Enter => Key::Enter,\n        KeyCode::SuperLeft => Key::MetaLeft,\n        KeyCode::SuperRight => Key::MetaRight,\n        KeyCode::ShiftLeft => Key::ShiftLeft,\n        KeyCode::ShiftRight => Key::ShiftRight,\n        KeyCode::Space => Key::Space,\n        KeyCode::Tab => Key::Tab,\n        KeyCode::Convert => Key::Convert,\n        KeyCode::KanaMode => Key::KanaMode,\n        KeyCode::Lang1 => Key::Lang1,\n        KeyCode::Lang2 => Key::Lang2,\n        KeyCode::Lang3 => Key::Lang3,\n        KeyCode::Lang4 => Key::Lang4,\n        KeyCode::Lang5 => Key::Lang5,\n        KeyCode::NonConvert => Key::NonConvert,\n        KeyCode::Delete => Key::Delete,\n        KeyCode::End => Key::End,\n        KeyCode::Help => Key::Help,\n        KeyCode::Home => Key::Home,\n        KeyCode::Insert => Key::Insert,\n        KeyCode::PageDown => Key::PageDown,\n        KeyCode::PageUp => Key::PageUp,\n        KeyCode::ArrowDown => Key::ArrowDown,\n        KeyCode::ArrowLeft => Key::ArrowLeft,\n        KeyCode::ArrowRight => Key::ArrowRight,\n        KeyCode::ArrowUp => Key::ArrowUp,\n        KeyCode::NumLock => Key::NumLock,\n        KeyCode::Numpad0 => Key::Numpad0,\n        KeyCode::Numpad1 => Key::Numpad1,\n        KeyCode::Numpad2 => Key::Numpad2,\n        KeyCode::Numpad3 => Key::Numpad3,\n        KeyCode::Numpad4 => Key::Numpad4,\n        KeyCode::Numpad5 => Key::Numpad5,\n        KeyCode::Numpad6 => Key::Numpad6,\n        KeyCode::Numpad7 => Key::Numpad7,\n        KeyCode::Numpad8 => Key::Numpad8,\n        KeyCode::Numpad9 => Key::Numpad9,\n        KeyCode::NumpadAdd => Key::NumpadAdd,\n        KeyCode::NumpadBackspace => Key::NumpadBackspace,\n        KeyCode::NumpadClear => Key::NumpadClear,\n        KeyCode::NumpadClearEntry => Key::NumpadClearEntry,\n        KeyCode::NumpadComma => Key::NumpadComma,\n        KeyCode::NumpadDecimal => Key::NumpadDecimal,\n        KeyCode::NumpadDivide => Key::NumpadDivide,\n        KeyCode::NumpadEnter => Key::NumpadEnter,\n        KeyCode::NumpadEqual => Key::NumpadEqual,\n        KeyCode::NumpadHash => Key::NumpadHash,\n        KeyCode::NumpadMemoryAdd => Key::NumpadMemoryAdd,\n        KeyCode::NumpadMemoryClear => Key::NumpadMemoryClear,\n        KeyCode::NumpadMemoryRecall => Key::NumpadMemoryRecall,\n        KeyCode::NumpadMemoryStore => Key::NumpadMemoryStore,\n        KeyCode::NumpadMultiply => Key::NumpadMultiply,\n        KeyCode::NumpadParenLeft => Key::NumpadParenLeft,\n        KeyCode::NumpadParenRight => Key::NumpadParenRight,\n        KeyCode::NumpadSubtract => Key::NumpadSubtract,\n        KeyCode::Escape => Key::Escape,\n        KeyCode::F1 => Key::F1,\n        KeyCode::F2 => Key::F2,\n        KeyCode::F3 => Key::F3,\n        KeyCode::F4 => Key::F4,\n        KeyCode::F5 => Key::F5,\n        KeyCode::F6 => Key::F6,\n        KeyCode::F7 => Key::F7,\n        KeyCode::F8 => Key::F8,\n        KeyCode::F9 => Key::F9,\n        KeyCode::F10 => Key::F10,\n        KeyCode::F11 => Key::F11,\n        KeyCode::F12 => Key::F12,\n        KeyCode::Fn => Key::Fn,\n        KeyCode::FnLock => Key::FnLock,\n        KeyCode::PrintScreen => Key::PrintScreen,\n        KeyCode::ScrollLock => Key::ScrollLock,\n        KeyCode::Pause => Key::Pause,\n        KeyCode::Hiragana => Key::Hiragana,\n        KeyCode::Katakana => Key::Katakana,\n        _ => Key::Unknown,\n    }\n}\n"
  },
  {
    "path": "mm-client/src/lib.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\npub mod audio;\npub mod cursor;\npub mod delegate;\npub mod flash;\npub mod font;\npub mod gamepad;\npub mod keys;\npub mod overlay;\npub mod render;\npub mod stats;\npub mod video;\npub mod vulkan;\n"
  },
  {
    "path": "mm-client/src/overlay.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::{collections::VecDeque, vec};\n\nuse mm_protocol as protocol;\n\nuse crate::stats::STATS;\n\npub struct Overlay {\n    streaming_width: u32,\n    streaming_height: u32,\n    codec: protocol::VideoCodec,\n\n    video_latency_measurements: VecDeque<f32>,\n\n    reposition: bool,\n}\n\nimpl Overlay {\n    pub fn new(fps: u32) -> Self {\n        Self {\n            streaming_width: 0,\n            streaming_height: 0,\n            codec: protocol::VideoCodec::H264,\n\n            video_latency_measurements: VecDeque::from(vec![0.0; 10 * fps as usize]),\n\n            reposition: true,\n        }\n    }\n\n    pub fn reposition(&mut self) {\n        self.reposition = true;\n    }\n\n    pub fn update_params(&mut self, params: &protocol::Attached) {\n        self.streaming_width = params.streaming_resolution.as_ref().unwrap().width;\n        self.streaming_height = params.streaming_resolution.as_ref().unwrap().height;\n        self.codec = params.video_codec();\n    }\n\n    pub fn build(&mut self, ui: &imgui::Ui) -> anyhow::Result<()> {\n        // Record a latency measurement.\n        let latency = STATS.video_latency();\n        self.video_latency_measurements.rotate_left(1);\n        *self.video_latency_measurements.back_mut().unwrap() = latency;\n\n        let [width, height] = ui.io().display_size;\n        let [scale_x, scale_y] = ui.io().display_framebuffer_scale;\n\n        let condition = if self.reposition {\n            self.reposition = false;\n            imgui::Condition::Always\n        } else {\n            imgui::Condition::Once\n        };\n\n        let _padding = ui.push_style_var(imgui::StyleVar::WindowPadding([8.0, 8.0]));\n        let _rounding = ui.push_style_var(imgui::StyleVar::WindowRounding(4.0));\n        let _frame_rounding = ui.push_style_var(imgui::StyleVar::FrameRounding(4.0));\n\n        if let Some(_window) = ui\n            .window(\"overlay\")\n            .position([width - 16.0, 16.0], condition)\n            .position_pivot([1.0, 0.0])\n            .title_bar(false)\n            .scroll_bar(false)\n            .no_nav()\n            .movable(true)\n            .resizable(true)\n            .bg_alpha(0.8)\n            .begin()\n        {\n            ui.set_window_font_scale(1.5);\n\n            let _stretch = ui.push_item_width(-1.0);\n            if let Some(_table) =\n                ui.begin_table_with_flags(\"stats\", 2, imgui::TableFlags::SIZING_FIXED_FIT)\n            {\n                stat_row(\n                    ui,\n                    \"streaming res:\",\n                    format!(\"{}x{}\", self.streaming_width, self.streaming_height),\n                );\n\n                stat_row(\n                    ui,\n                    \"render res:\",\n                    format!(\"{}x{}\", width * scale_x, height * scale_y),\n                );\n\n                stat_row(\n                    ui,\n                    \"codec:\",\n                    match self.codec {\n                        protocol::VideoCodec::H264 => \"H.264\",\n                        protocol::VideoCodec::H265 => \"H.265\",\n                        protocol::VideoCodec::Av1 => \"AV1\",\n                        _ => \"unknown\",\n                    },\n                );\n\n                stat_row(\n                    ui,\n                    \"bitrate:\",\n                    format!(\"{:.1} mbps\", STATS.video_bitrate() / 1_000_000.0),\n                );\n            }\n\n            let [width, height] = ui.window_size();\n            let cursor_pos = ui.cursor_pos();\n\n            let measurements = self.video_latency_measurements.make_contiguous();\n            let max_latency = measurements.iter().copied().reduce(f32::max).unwrap();\n            let scale = (max_latency.round() as u32).next_multiple_of(10) * 2;\n\n            ui.plot_lines(\"\", measurements)\n                .scale_min(0.0)\n                .scale_max(scale as f32)\n                .graph_size([width - 16.0, 50.0_f32.max(height - cursor_pos[1] - 8.0)])\n                .overlay_text(format!(\"latency: {:.1} ms\", latency).as_str())\n                .build();\n        }\n\n        Ok(())\n    }\n}\n\nfn stat_row(ui: &imgui::Ui, label: impl AsRef<str>, value: impl AsRef<str>) {\n    ui.table_next_row();\n    ui.table_next_column();\n    let cursor_pos = ui.cursor_pos();\n    let pos_x = cursor_pos[0] + ui.column_width(0) - ui.calc_text_size(&label)[0];\n    if pos_x > cursor_pos[0] {\n        ui.set_cursor_pos([pos_x, cursor_pos[1]]);\n    }\n\n    ui.text_colored([0.6, 0.6, 0.6, 1.0], label);\n    ui.table_next_column();\n    ui.text(value);\n}\n"
  },
  {
    "path": "mm-client/src/render.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\n#![allow(clippy::missing_safety_doc)]\n\nuse std::sync::Arc;\nuse std::time;\n\nuse anyhow::{anyhow, Context, Result};\nuse ash::vk;\nuse cstr::cstr;\nuse imgui_rs_vulkan_renderer as imgui_vulkan;\nuse tracing::debug;\nuse tracing::instrument;\nuse tracing::trace;\nuse tracing::trace_span;\nuse tracing::warn;\nuse tracy_client::span_location;\n\nuse crate::font;\nuse crate::video::*;\nuse crate::vulkan::*;\n\nconst FONT_SIZE: f32 = 8.0;\n\n// Matches the definition in render.slang.\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n#[repr(u32)]\nenum TextureColorSpace {\n    Bt709 = 0,\n    Bt2020Pq = 1,\n}\n\nimpl From<crate::video::ColorSpace> for TextureColorSpace {\n    fn from(cs: crate::video::ColorSpace) -> Self {\n        match cs {\n            crate::video::ColorSpace::Bt709 => TextureColorSpace::Bt709,\n            crate::video::ColorSpace::Bt2020Pq => TextureColorSpace::Bt2020Pq,\n        }\n    }\n}\n\n#[derive(Copy, Clone, Debug)]\n#[repr(C)]\nstruct PushConstants {\n    aspect: glam::Vec2,\n    texture_color_space: TextureColorSpace,\n    output_color_space: vk::ColorSpaceKHR,\n}\n\npub struct Renderer {\n    width: u32,\n    height: u32,\n    scale_factor: f64,\n    hdr_mode: bool,\n\n    imgui: imgui::Context,\n    imgui_platform: imgui_winit_support::WinitPlatform,\n    imgui_font: font_kit::font::Font,\n    imgui_fontid_big: imgui::FontId,\n    imgui_time: time::Instant,\n\n    swapchain: Option<Swapchain>,\n    swapchain_dirty: bool,\n\n    new_video_texture: Option<(Arc<VkImage>, VideoStreamParams)>,\n\n    vk: Arc<VkContext>,\n    window: Arc<winit::window::Window>,\n}\n\nstruct VideoTexture {\n    image: Arc<VkImage>,\n    view: vk::ImageView,\n    color_space: TextureColorSpace,\n}\n\nstruct Swapchain {\n    swapchain: vk::SwapchainKHR,\n    frames: Vec<InFlightFrame>,\n    present_images: Vec<SwapImage>,\n    current_frame: usize,\n\n    sampler_conversion: vk::SamplerYcbcrConversion,\n    sampler: vk::Sampler,\n    bound_video_texture: Option<VideoTexture>,\n\n    /// The normalized relationship between the output and the video texture,\n    /// after scaling. For example, a 500x500 video texture in a 1000x500\n    /// swapchain would have the aspect (2.0, 1.0), as would a 250x250 texture.\n    aspect: (f64, f64),\n    surface_format: vk::SurfaceFormatKHR,\n    descriptor_set_layout: vk::DescriptorSetLayout,\n    descriptor_pool: vk::DescriptorPool,\n    pipeline_layout: vk::PipelineLayout,\n    pipeline: vk::Pipeline,\n\n    imgui_renderer: imgui_vulkan::Renderer,\n}\n\nstruct InFlightFrame {\n    render_cb: vk::CommandBuffer,\n    render_fence: vk::Fence,\n    image_acquired_sema: vk::Semaphore,\n    render_complete_sema: vk::Semaphore,\n    descriptor_set: vk::DescriptorSet,\n    ts_pool: VkTimestampQueryPool,\n    tracy_span: Option<tracy_client::GpuSpan>,\n}\n\nstruct SwapImage {\n    image: vk::Image,\n    view: vk::ImageView,\n}\n\nimpl Renderer {\n    pub fn new(\n        vk: Arc<VkContext>,\n        window: Arc<winit::window::Window>,\n        hdr_mode: bool,\n    ) -> Result<Self> {\n        let window_size = window.inner_size();\n        let scale_factor = window.scale_factor();\n\n        let mut imgui = imgui::Context::create();\n        imgui.set_ini_filename(None);\n\n        let mut imgui_platform = imgui_winit_support::WinitPlatform::new(&mut imgui);\n        imgui_platform.attach_window(\n            imgui.io_mut(),\n            &window,\n            imgui_winit_support::HiDpiMode::Default,\n        );\n\n        let imgui_font = font::load_ui_font()?;\n        let imgui_fontid_big = import_imgui_font(&mut imgui, &imgui_font, FONT_SIZE, scale_factor)?;\n\n        let mut renderer = Self {\n            width: window_size.width,\n            height: window_size.height,\n            scale_factor,\n            hdr_mode,\n            window,\n            imgui,\n            imgui_platform,\n            imgui_font,\n            imgui_fontid_big,\n            imgui_time: time::Instant::now(),\n            swapchain: None,\n            swapchain_dirty: false,\n            new_video_texture: None,\n            vk,\n        };\n\n        unsafe { renderer.recreate_swapchain()? };\n\n        Ok(renderer)\n    }\n\n    #[instrument(skip_all, level = \"trace\")]\n    unsafe fn recreate_swapchain(&mut self) -> Result<()> {\n        let start = time::Instant::now();\n        let device = &self.vk.device;\n\n        let surface_format = select_surface_format(self.vk.clone(), self.hdr_mode)?;\n\n        let surface_capabilities = self\n            .vk\n            .surface_loader\n            .get_physical_device_surface_capabilities(self.vk.pdevice, self.vk.surface)\n            .unwrap();\n        let mut desired_image_count = surface_capabilities.min_image_count + 1;\n        if surface_capabilities.max_image_count > 0\n            && desired_image_count > surface_capabilities.max_image_count\n        {\n            desired_image_count = surface_capabilities.max_image_count;\n        }\n\n        let surface_resolution = match surface_capabilities.current_extent.width {\n            std::u32::MAX => vk::Extent2D {\n                width: self.width,\n                height: self.height,\n            },\n            _ => surface_capabilities.current_extent,\n        };\n\n        let pre_transform = if surface_capabilities\n            .supported_transforms\n            .contains(vk::SurfaceTransformFlagsKHR::IDENTITY)\n        {\n            vk::SurfaceTransformFlagsKHR::IDENTITY\n        } else {\n            surface_capabilities.current_transform\n        };\n\n        let present_modes = self\n            .vk\n            .surface_loader\n            .get_physical_device_surface_present_modes(self.vk.pdevice, self.vk.surface)\n            .unwrap();\n\n        let mut present_modes = present_modes.clone();\n        present_modes.sort_by_key(|&mode| match mode {\n            vk::PresentModeKHR::MAILBOX => 0,\n            vk::PresentModeKHR::FIFO => 1,\n            vk::PresentModeKHR::IMMEDIATE => 2,\n            _ => 4,\n        });\n\n        let present_mode = present_modes.first().unwrap();\n        if *present_mode != vk::PresentModeKHR::MAILBOX {\n            warn!(\n                \"present mode MAILBOX not available, using {:?} (available: {:?})\",\n                present_mode, present_modes\n            );\n        }\n\n        let mut swapchain_create_info = vk::SwapchainCreateInfoKHR::default()\n            .surface(self.vk.surface)\n            .min_image_count(desired_image_count)\n            .image_color_space(surface_format.color_space)\n            .image_format(surface_format.format)\n            .image_extent(surface_resolution)\n            .image_usage(vk::ImageUsageFlags::COLOR_ATTACHMENT)\n            .image_sharing_mode(vk::SharingMode::EXCLUSIVE)\n            .pre_transform(pre_transform)\n            .composite_alpha(vk::CompositeAlphaFlagsKHR::OPAQUE)\n            .present_mode(*present_mode)\n            .clipped(true)\n            .image_array_layers(1);\n\n        if let Some(old_swapchain) = self.swapchain.as_ref() {\n            swapchain_create_info = swapchain_create_info.old_swapchain(old_swapchain.swapchain);\n        }\n\n        let swapchain = self\n            .vk\n            .swapchain_loader\n            .create_swapchain(&swapchain_create_info, None)?;\n        let swapchain_images = self.vk.swapchain_loader.get_swapchain_images(swapchain)?;\n\n        // TODO: rather than recreate the swapchain if the video texture\n        // changes, we can just recreate the pipeline. This is tricky because\n        // we create a descriptor set for each SwapFrame, which refers to the\n        // layout, which includes the immutable sampler.\n\n        // We need to create a sampler, even if we don't have a video stream yet\n        // and don't know what the fields should be.\n        let (video_texture_format, video_params) = match self.new_video_texture.as_ref() {\n            Some((tex, params)) => (tex.format, *params),\n            None => (\n                vk::Format::G8_B8_R8_3PLANE_420_UNORM,\n                VideoStreamParams::default(),\n            ),\n        };\n\n        let sampler_conversion =\n            create_ycbcr_sampler_conversion(device, video_texture_format, &video_params)?;\n\n        let sampler = {\n            let mut conversion_info =\n                vk::SamplerYcbcrConversionInfo::default().conversion(sampler_conversion);\n\n            let create_info = vk::SamplerCreateInfo::default()\n                .mag_filter(vk::Filter::LINEAR)\n                .min_filter(vk::Filter::LINEAR)\n                .compare_enable(true)\n                .address_mode_u(vk::SamplerAddressMode::CLAMP_TO_EDGE)\n                .address_mode_v(vk::SamplerAddressMode::CLAMP_TO_EDGE)\n                .address_mode_w(vk::SamplerAddressMode::CLAMP_TO_EDGE)\n                .push_next(&mut conversion_info);\n\n            unsafe { device.create_sampler(&create_info, None)? }\n        };\n\n        let bound_video_texture = if let Some((tex, params)) = self.new_video_texture.as_ref() {\n            let view = create_image_view(\n                &self.vk.device,\n                tex.image,\n                tex.format,\n                Some(sampler_conversion),\n            )?;\n\n            // Increment the reference count on the texture.\n            Some(VideoTexture {\n                image: tex.clone(),\n                view,\n                color_space: params.color_space.into(),\n            })\n        } else {\n            None\n        };\n\n        let aspect = if let Some(tex) = bound_video_texture.as_ref() {\n            calculate_aspect(self.width, self.height, tex.image.width, tex.image.height)\n        } else {\n            (1.0, 1.0)\n        };\n\n        let descriptor_set_layout = {\n            // We're required to use an immutable sampler for YCbCr conversion\n            // by the vulkan spec.\n            let samplers = [sampler];\n            let binding = vk::DescriptorSetLayoutBinding::default()\n                .binding(0)\n                .descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n                .descriptor_count(1)\n                .stage_flags(vk::ShaderStageFlags::FRAGMENT)\n                .immutable_samplers(&samplers);\n\n            unsafe {\n                device.create_descriptor_set_layout(\n                    &vk::DescriptorSetLayoutCreateInfo::default().bindings(&[binding]),\n                    None,\n                )?\n            }\n        };\n\n        let descriptor_pool = {\n            let binding_multiplier = get_ycbcr_conversion_properties(\n                self.vk.pdevice,\n                &self.vk.instance,\n                video_texture_format,\n            )?\n            .combined_image_sampler_descriptor_count;\n            let sampler_size = [vk::DescriptorPoolSize::default()\n                .ty(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n                .descriptor_count(swapchain_images.len() as u32 * binding_multiplier)];\n\n            let info = vk::DescriptorPoolCreateInfo::default()\n                .pool_sizes(&sampler_size)\n                .max_sets(swapchain_images.len() as u32);\n\n            unsafe { device.create_descriptor_pool(&info, None)? }\n        };\n\n        let pipeline_layout = {\n            let pc_ranges = [vk::PushConstantRange::default()\n                .stage_flags(vk::ShaderStageFlags::VERTEX | vk::ShaderStageFlags::FRAGMENT)\n                .offset(0)\n                .size(std::mem::size_of::<PushConstants>() as u32)];\n            let set_layouts = [descriptor_set_layout];\n            let create_info = vk::PipelineLayoutCreateInfo::default()\n                .set_layouts(&set_layouts)\n                .push_constant_ranges(&pc_ranges);\n\n            unsafe { device.create_pipeline_layout(&create_info, None)? }\n        };\n\n        let pipeline = {\n            let vert_bytes = include_bytes!(concat!(env!(\"OUT_DIR\"), \"/shaders/vert.spv\"));\n            let frag_bytes = include_bytes!(concat!(env!(\"OUT_DIR\"), \"/shaders/frag.spv\"));\n            let vert_shader = load_shader(device, vert_bytes).context(\"loading vert.spv\")?;\n            let frag_shader = load_shader(device, frag_bytes).context(\"loading frag.spv\")?;\n\n            let vert_stage = vk::PipelineShaderStageCreateInfo::default()\n                .stage(vk::ShaderStageFlags::VERTEX)\n                .module(vert_shader)\n                .name(cstr!(\"main\"));\n\n            let frag_stage = vk::PipelineShaderStageCreateInfo::default()\n                .stage(vk::ShaderStageFlags::FRAGMENT)\n                .module(frag_shader)\n                .name(cstr!(\"main\"));\n\n            let vertex_input_state = vk::PipelineVertexInputStateCreateInfo::default();\n\n            let input_assembly_state = vk::PipelineInputAssemblyStateCreateInfo::default()\n                .topology(vk::PrimitiveTopology::TRIANGLE_STRIP)\n                .primitive_restart_enable(false);\n\n            let viewport = [vk::Viewport::default()\n                .x(0.0)\n                .y(0.0)\n                .width(self.width as f32)\n                .height(self.height as f32)\n                .min_depth(0.0)\n                .max_depth(1.0)];\n\n            let scissor = [vk::Rect2D::default().extent(vk::Extent2D {\n                width: self.width,\n                height: self.height,\n            })];\n\n            let viewport_state = vk::PipelineViewportStateCreateInfo::default()\n                .viewports(&viewport)\n                .scissors(&scissor);\n\n            let rasterization_state = vk::PipelineRasterizationStateCreateInfo::default()\n                .depth_clamp_enable(false)\n                .rasterizer_discard_enable(false)\n                .polygon_mode(vk::PolygonMode::FILL)\n                .line_width(1.0)\n                .depth_bias_enable(false)\n                // Per https://www.saschawillems.de/blog/2016/08/13/vulkan-tutorial-on-rendering-a-fullscreen-quad-without-buffers\n                .cull_mode(vk::CullModeFlags::FRONT)\n                .front_face(vk::FrontFace::COUNTER_CLOCKWISE);\n\n            let multisample_state = vk::PipelineMultisampleStateCreateInfo::default()\n                .sample_shading_enable(false)\n                .rasterization_samples(vk::SampleCountFlags::TYPE_1);\n\n            let attachment = [vk::PipelineColorBlendAttachmentState::default()\n                .color_write_mask(vk::ColorComponentFlags::RGBA)\n                .blend_enable(true)\n                .src_color_blend_factor(vk::BlendFactor::SRC_ALPHA)\n                .dst_color_blend_factor(vk::BlendFactor::ONE_MINUS_SRC_ALPHA)\n                .color_blend_op(vk::BlendOp::ADD)\n                .src_alpha_blend_factor(vk::BlendFactor::ONE)\n                .dst_alpha_blend_factor(vk::BlendFactor::ZERO)\n                .alpha_blend_op(vk::BlendOp::ADD)];\n\n            let color_blend_state = vk::PipelineColorBlendStateCreateInfo::default()\n                .logic_op_enable(false)\n                .attachments(&attachment);\n\n            let formats = [surface_format.format];\n            let mut pipeline_rendering =\n                vk::PipelineRenderingCreateInfo::default().color_attachment_formats(&formats);\n\n            let stages = [vert_stage, frag_stage];\n            let create_info = vk::GraphicsPipelineCreateInfo::default()\n                .stages(&stages)\n                .vertex_input_state(&vertex_input_state)\n                .input_assembly_state(&input_assembly_state)\n                .viewport_state(&viewport_state)\n                .rasterization_state(&rasterization_state)\n                .multisample_state(&multisample_state)\n                .color_blend_state(&color_blend_state)\n                .layout(pipeline_layout)\n                .push_next(&mut pipeline_rendering);\n\n            unsafe {\n                let pipeline = match device.create_graphics_pipelines(\n                    vk::PipelineCache::null(),\n                    &[create_info],\n                    None,\n                ) {\n                    Ok(pipelines) => Ok(pipelines[0]),\n                    Err((_, e)) => Err(e),\n                }?;\n\n                device.destroy_shader_module(vert_shader, None);\n                device.destroy_shader_module(frag_shader, None);\n                pipeline\n            }\n        };\n\n        let create_frame = || -> Result<InFlightFrame> {\n            let render_cb = {\n                let create_info = vk::CommandBufferAllocateInfo::default()\n                    .level(vk::CommandBufferLevel::PRIMARY)\n                    .command_pool(self.vk.present_queue.command_pool)\n                    .command_buffer_count(1);\n\n                let cbs = device\n                    .allocate_command_buffers(&create_info)\n                    .context(\"failed to allocate render command buffer\")?;\n\n                cbs[0]\n            };\n\n            let descriptor_set = {\n                let layouts = &[descriptor_set_layout];\n                let create_info = vk::DescriptorSetAllocateInfo::default()\n                    .descriptor_pool(descriptor_pool)\n                    .set_layouts(layouts);\n\n                let ds = device\n                    .allocate_descriptor_sets(&create_info)?\n                    .pop()\n                    .unwrap();\n\n                // TODO: do the write in bind_video_texture?\n                if let Some(tex) = bound_video_texture.as_ref() {\n                    let info = [vk::DescriptorImageInfo::default()\n                        .image_layout(vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL)\n                        .image_view(tex.view)];\n\n                    let sampler_write = vk::WriteDescriptorSet::default()\n                        .dst_set(ds)\n                        .dst_binding(0)\n                        .dst_array_element(0)\n                        .descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n                        .image_info(&info);\n\n                    device.update_descriptor_sets(&[sampler_write], &[]);\n                }\n\n                ds\n            };\n\n            let render_fence = create_fence(device, true)?;\n            let image_acquired_sema = create_semaphore(device)?;\n            let render_complete_sema = create_semaphore(device)?;\n\n            let ts_pool = create_timestamp_query_pool(device, 2)?;\n\n            Ok(InFlightFrame {\n                render_cb,\n                render_fence,\n                image_acquired_sema,\n                render_complete_sema,\n                descriptor_set,\n                ts_pool,\n                tracy_span: None,\n            })\n        };\n\n        let frames = (0..swapchain_images.len())\n            .map(|_| create_frame())\n            .collect::<Result<Vec<_>>>()?;\n\n        let swapchain_images = swapchain_images\n            .into_iter()\n            .map(|image| {\n                let image_view = create_image_view(device, image, surface_format.format, None)?;\n\n                Ok(SwapImage {\n                    image,\n                    view: image_view,\n                })\n            })\n            .collect::<Result<Vec<_>>>()?;\n\n        let mut imgui_renderer = imgui_vulkan::Renderer::with_default_allocator(\n            &self.vk.instance,\n            self.vk.pdevice,\n            self.vk.device.clone(),\n            self.vk.present_queue.queue,\n            self.vk.present_queue.command_pool,\n            imgui_vulkan::DynamicRendering {\n                color_attachment_format: surface_format.format,\n                depth_attachment_format: None,\n            },\n            &mut self.imgui,\n            Some(imgui_vulkan::Options {\n                in_flight_frames: frames.len(),\n                ..Default::default()\n            }),\n        )?;\n\n        imgui_renderer.update_fonts_texture(\n            self.vk.present_queue.queue,\n            self.vk.present_queue.command_pool,\n            &mut self.imgui,\n        )?;\n\n        let swapchain = Swapchain {\n            swapchain,\n            frames,\n            present_images: swapchain_images,\n            current_frame: 0,\n\n            descriptor_pool,\n            descriptor_set_layout,\n            sampler_conversion,\n            sampler,\n            bound_video_texture,\n            aspect,\n            surface_format,\n            pipeline_layout,\n            pipeline,\n\n            imgui_renderer,\n        };\n\n        debug!(\"recreated swapchain in {:?}\", start.elapsed());\n\n        if let Some(old_swapchain) = self.swapchain.replace(swapchain) {\n            self.destroy_swapchain(old_swapchain);\n        };\n\n        Ok(())\n    }\n\n    pub fn handle_event(&mut self, event: &winit::event::WindowEvent) -> anyhow::Result<()> {\n        let now = time::Instant::now();\n        self.imgui.io_mut().update_delta_time(now - self.imgui_time);\n        self.imgui_time = now;\n\n        let wrapped: winit::event::Event<()> = winit::event::Event::WindowEvent {\n            window_id: self.window.id(),\n            event: event.clone(),\n        };\n\n        self.imgui_platform\n            .handle_event(self.imgui.io_mut(), self.window.as_ref(), &wrapped);\n\n        match event {\n            winit::event::WindowEvent::Resized(size) => {\n                self.resize(size.width, size.height);\n            }\n            winit::event::WindowEvent::ScaleFactorChanged { scale_factor, .. } => {\n                self.scale_factor_changed(*scale_factor)?;\n            }\n            _ => (),\n        }\n\n        Ok(())\n    }\n\n    pub fn resize(&mut self, width: u32, height: u32) {\n        if self.width == width && self.height == height {\n            return;\n        }\n\n        self.width = width;\n        self.height = height;\n        self.swapchain_dirty = true;\n    }\n\n    fn scale_factor_changed(&mut self, scale_factor: f64) -> anyhow::Result<()> {\n        if self.scale_factor == scale_factor {\n            return Ok(());\n        }\n\n        // Resize fonts.\n        self.imgui_fontid_big =\n            import_imgui_font(&mut self.imgui, &self.imgui_font, FONT_SIZE, scale_factor)?;\n\n        self.scale_factor = scale_factor;\n        Ok(())\n    }\n\n    pub fn bind_video_texture(\n        &mut self,\n        texture: Arc<VkImage>,\n        params: VideoStreamParams,\n    ) -> Result<()> {\n        // TODO: no need to recreate the sampler if the params match.\n        self.new_video_texture = Some((texture, params));\n        self.swapchain_dirty = true;\n        Ok(())\n    }\n\n    // Returns the normalized relationship between the output dimensions and the\n    // video texture dimensions, after scaling. For example, if the video\n    // texture is 250x250 and the output is 1000x500, the aspect would be (2.0,\n    // 1.0).\n    pub fn get_texture_aspect(&self) -> Option<(f64, f64)> {\n        if let Some(Swapchain {\n            bound_video_texture: Some(_),\n            aspect,\n            ..\n        }) = self.swapchain.as_ref()\n        {\n            Some(*aspect)\n        } else {\n            None\n        }\n    }\n\n    #[instrument(skip_all, level = \"trace\")]\n    pub unsafe fn render<F>(&mut self, ui_builder: F) -> Result<()>\n    where\n        F: FnOnce(&imgui::Ui) -> anyhow::Result<()>,\n    {\n        if self.swapchain_dirty || self.swapchain.is_none() {\n            self.recreate_swapchain()?;\n            self.swapchain_dirty = false;\n        }\n\n        let device = &self.vk.device;\n        let swapchain = self.swapchain.as_mut().unwrap();\n        let num_frames = swapchain.frames.len();\n\n        let frame = &mut swapchain.frames[swapchain.current_frame];\n        swapchain.current_frame = (swapchain.current_frame + 1) % num_frames;\n\n        // Wait for the gpu to catch up.\n        device.wait_for_fences(&[frame.render_fence], true, u64::MAX)?;\n\n        // Trace the frame on the GPU side.\n        if let Some(ctx) = &self.vk.tracy_context {\n            if let Some(span) = frame.tracy_span.take() {\n                let timestamps = frame.ts_pool.fetch_results(device)?;\n                span.upload_timestamp(timestamps[0], timestamps[1]);\n            }\n\n            frame.tracy_span = Some(ctx.span(span_location!())?);\n        }\n\n        let result = self.vk.swapchain_loader.acquire_next_image(\n            swapchain.swapchain,\n            u64::MAX,\n            frame.image_acquired_sema,\n            vk::Fence::null(),\n        );\n\n        let swapchain_index = match result {\n            Ok((image_index, _)) => image_index,\n            Err(vk::Result::ERROR_OUT_OF_DATE_KHR) => {\n                // Recreate and try again.\n                self.swapchain_dirty = true;\n                return self.render(ui_builder);\n            }\n            Err(e) => return Err(e.into()),\n        };\n\n        let present_image = swapchain\n            .present_images\n            .get(swapchain_index as usize)\n            .unwrap();\n\n        // Reset the command buffer.\n        device.reset_command_buffer(frame.render_cb, vk::CommandBufferResetFlags::empty())?;\n\n        // Begin the command buffer.\n        {\n            let begin_info = vk::CommandBufferBeginInfo::default()\n                .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT);\n\n            device.begin_command_buffer(frame.render_cb, &begin_info)?;\n        }\n\n        // Record the start timestamp.\n        frame.ts_pool.cmd_reset(device, frame.render_cb);\n        device.cmd_write_timestamp(\n            frame.render_cb,\n            vk::PipelineStageFlags::TOP_OF_PIPE,\n            frame.ts_pool.pool,\n            0,\n        );\n\n        // Transition the present image to be writable.\n        cmd_image_barrier(\n            device,\n            frame.render_cb,\n            present_image.image,\n            vk::PipelineStageFlags::TOP_OF_PIPE,\n            vk::AccessFlags::empty(),\n            vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT,\n            vk::AccessFlags::COLOR_ATTACHMENT_WRITE,\n            vk::ImageLayout::UNDEFINED,\n            vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,\n        );\n\n        // Begin rendering.\n        {\n            let rect: vk::Rect2D = vk::Rect2D::default().extent(vk::Extent2D {\n                width: self.width,\n                height: self.height,\n            });\n\n            let clear_value = vk::ClearValue {\n                color: vk::ClearColorValue {\n                    float32: [0.0, 0.0, 0.0, 1.0],\n                },\n            };\n\n            let color_attachment = vk::RenderingAttachmentInfo::default()\n                .image_view(present_image.view)\n                .image_layout(vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL)\n                .load_op(vk::AttachmentLoadOp::CLEAR)\n                .store_op(vk::AttachmentStoreOp::STORE)\n                .clear_value(clear_value);\n\n            let color_attachments = [color_attachment];\n            let rendering_info = vk::RenderingInfo::default()\n                .render_area(rect)\n                .color_attachments(&color_attachments)\n                .layer_count(1);\n\n            self.vk\n                .dynamic_rendering_loader\n                .cmd_begin_rendering(frame.render_cb, &rendering_info);\n            device.cmd_bind_pipeline(\n                frame.render_cb,\n                vk::PipelineBindPoint::GRAPHICS,\n                swapchain.pipeline,\n            );\n        }\n\n        if self.new_video_texture.is_none() || swapchain.aspect != (1.0, 1.0) {\n            // TODO Draw the background\n            // https://www.toptal.com/designers/subtlepatterns/prism/\n        }\n\n        // Draw the video texture.\n        if let Some(tex) = &swapchain.bound_video_texture {\n            let pc = PushConstants {\n                aspect: glam::Vec2::new(swapchain.aspect.0 as f32, swapchain.aspect.1 as f32),\n                texture_color_space: tex.color_space,\n                output_color_space: swapchain.surface_format.color_space,\n            };\n\n            device.cmd_push_constants(\n                frame.render_cb,\n                swapchain.pipeline_layout,\n                vk::ShaderStageFlags::VERTEX | vk::ShaderStageFlags::FRAGMENT,\n                0,\n                std::slice::from_raw_parts(\n                    &pc as *const _ as *const u8,\n                    std::mem::size_of::<PushConstants>(),\n                ),\n            );\n\n            device.cmd_bind_descriptor_sets(\n                frame.render_cb,\n                vk::PipelineBindPoint::GRAPHICS,\n                swapchain.pipeline_layout,\n                0,\n                &[frame.descriptor_set],\n                &[],\n            );\n\n            // Draw the video texture.\n            device.cmd_draw(frame.render_cb, 4, 1, 0, 0);\n        }\n        // Draw the overlay.\n        {\n            self.imgui_platform\n                .prepare_frame(self.imgui.io_mut(), &self.window)?;\n\n            {\n                let ui = self.imgui.new_frame();\n\n                let _font_stack = ui.push_font(self.imgui_fontid_big);\n                ui_builder(ui)?;\n\n                self.imgui_platform.prepare_render(ui, &self.window);\n            }\n\n            swapchain\n                .imgui_renderer\n                .cmd_draw(frame.render_cb, self.imgui.render())?;\n        };\n\n        // Done rendereng.\n        self.vk\n            .dynamic_rendering_loader\n            .cmd_end_rendering(frame.render_cb);\n\n        // Transition the present image to be presentable.\n        cmd_image_barrier(\n            device,\n            frame.render_cb,\n            present_image.image,\n            vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT,\n            vk::AccessFlags::COLOR_ATTACHMENT_WRITE,\n            vk::PipelineStageFlags::BOTTOM_OF_PIPE,\n            vk::AccessFlags::empty(),\n            vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,\n            vk::ImageLayout::PRESENT_SRC_KHR,\n        );\n\n        // Record the end timestamp.\n        device.cmd_write_timestamp(\n            frame.render_cb,\n            vk::PipelineStageFlags::BOTTOM_OF_PIPE,\n            frame.ts_pool.pool,\n            1,\n        );\n\n        if let Some(span) = &mut frame.tracy_span {\n            span.end_zone();\n        }\n\n        // Submit and present!\n        {\n            let present_queue = self.vk.present_queue.queue;\n\n            device.end_command_buffer(frame.render_cb)?;\n            device.reset_fences(&[frame.render_fence])?;\n\n            let cbs = [frame.render_cb];\n            let wait_semas = [frame.image_acquired_sema];\n            let signal_semas = [frame.render_complete_sema];\n            let wait_stages = [vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT];\n            let submit_info = vk::SubmitInfo::default()\n                .command_buffers(&cbs)\n                .wait_semaphores(&wait_semas)\n                .wait_dst_stage_mask(&wait_stages)\n                .signal_semaphores(&signal_semas);\n\n            trace!(queue = ?present_queue, \"queue submit for render\");\n\n            device.queue_submit(present_queue, &[submit_info], frame.render_fence)?;\n\n            // This \"helps winit [with stuff]\". It also seems to increase latency.\n            self.window.pre_present_notify();\n\n            trace!(queue = ?present_queue, index = swapchain_index, \"queue present\");\n\n            let wait_semas = [frame.render_complete_sema];\n            let swapchains = [swapchain.swapchain];\n            let image_indices = [swapchain_index];\n            let present_info = vk::PresentInfoKHR::default()\n                .wait_semaphores(&wait_semas)\n                .swapchains(&swapchains)\n                .image_indices(&image_indices);\n\n            let res = trace_span!(\"render.queue_present\").in_scope(|| {\n                self.vk\n                    .swapchain_loader\n                    .queue_present(present_queue, &present_info)\n            });\n\n            self.swapchain_dirty = match res {\n                Ok(false) => false,\n                Ok(true) => true,\n                Err(vk::Result::ERROR_OUT_OF_DATE_KHR) => true,\n                Err(e) => return Err(e.into()),\n            };\n        }\n\n        tracy_client::frame_mark();\n\n        Ok(())\n    }\n\n    unsafe fn destroy_swapchain(&mut self, mut swapchain: Swapchain) {\n        let device = &self.vk.device;\n        device.device_wait_idle().unwrap();\n\n        for frame in swapchain.frames.drain(..) {\n            device.free_command_buffers(self.vk.present_queue.command_pool, &[frame.render_cb]);\n            device.destroy_fence(frame.render_fence, None);\n            device.destroy_semaphore(frame.image_acquired_sema, None);\n            device.destroy_semaphore(frame.render_complete_sema, None);\n            device.destroy_query_pool(frame.ts_pool.pool, None);\n        }\n\n        for swap_img in swapchain.present_images.drain(..) {\n            // Destroying the swapchain does this.\n            // device.destroy_image(swap_img.image, None);\n            device.destroy_image_view(swap_img.view, None);\n        }\n\n        device.destroy_pipeline_layout(swapchain.pipeline_layout, None);\n        device.destroy_descriptor_pool(swapchain.descriptor_pool, None);\n        device.destroy_descriptor_set_layout(swapchain.descriptor_set_layout, None);\n        device.destroy_sampler(swapchain.sampler, None);\n        device.destroy_sampler_ycbcr_conversion(swapchain.sampler_conversion, None);\n\n        if let Some(tex) = swapchain.bound_video_texture.take() {\n            device.destroy_image_view(tex.view, None);\n            // We probably drop the last reference to the image here, which then\n            // gets destroyed.\n        }\n\n        device.destroy_pipeline(swapchain.pipeline, None);\n        self.vk\n            .swapchain_loader\n            .destroy_swapchain(swapchain.swapchain, None)\n    }\n}\n\nfn select_surface_format(\n    vk: Arc<VkContext>,\n    hdr_mode: bool,\n) -> Result<vk::SurfaceFormatKHR, vk::Result> {\n    let mut surface_formats = unsafe {\n        vk.surface_loader\n            .get_physical_device_surface_formats(vk.pdevice, vk.surface)?\n    };\n\n    let preferred_formats = [\n        vk::Format::R16G16B16A16_SFLOAT,\n        vk::Format::R8G8B8A8_UNORM,\n        vk::Format::B8G8R8A8_UNORM,\n    ];\n\n    let preferred_color_spaces = if hdr_mode {\n        vec![\n            vk::ColorSpaceKHR::HDR10_ST2084_EXT,\n            vk::ColorSpaceKHR::EXTENDED_SRGB_LINEAR_EXT,\n            vk::ColorSpaceKHR::DISPLAY_P3_NONLINEAR_EXT,\n            vk::ColorSpaceKHR::SRGB_NONLINEAR,\n        ]\n    } else {\n        vec![\n            vk::ColorSpaceKHR::BT709_NONLINEAR_EXT,\n            vk::ColorSpaceKHR::SRGB_NONLINEAR,\n        ]\n    };\n\n    surface_formats.sort_by_key(|sf| {\n        let color_space_score = preferred_color_spaces\n            .iter()\n            .position(|&cs| cs == sf.color_space)\n            .unwrap_or(preferred_color_spaces.len());\n        let format_score = preferred_formats\n            .iter()\n            .position(|&f| f == sf.format)\n            .unwrap_or(preferred_formats.len());\n        (color_space_score, format_score)\n    });\n\n    let surface_format = surface_formats[0];\n    debug!(?surface_format, \"selected surface format\");\n\n    Ok(surface_format)\n}\n\nimpl Drop for Renderer {\n    fn drop(&mut self) {\n        if let Some(swapchain) = self.swapchain.take() {\n            unsafe {\n                self.destroy_swapchain(swapchain);\n            };\n        }\n    }\n}\n\nfn import_imgui_font(\n    imgui: &mut imgui::Context,\n    font: &font_kit::font::Font,\n    size: f32,\n    scale_factor: f64,\n) -> anyhow::Result<imgui::FontId> {\n    let font_size = size * scale_factor as f32;\n    imgui.io_mut().font_global_scale = (1.0 / scale_factor) as f32;\n\n    let data = match font.copy_font_data() {\n        Some(data) => data,\n        None => return Err(anyhow!(\"failed to load font data for {:?}\", font)),\n    };\n\n    let id = imgui.fonts().add_font(&[imgui::FontSource::TtfData {\n        size_pixels: font_size,\n        data: &data,\n        config: Some(imgui::FontConfig {\n            pixel_snap_h: true,\n            oversample_h: 4,\n            oversample_v: 4,\n            ..imgui::FontConfig::default()\n        }),\n    }]);\n\n    Ok(id)\n}\n\nfn calculate_aspect(width: u32, height: u32, tex_width: u32, tex_height: u32) -> (f64, f64) {\n    let width = width as f64;\n    let height = height as f64;\n    let tex_width = tex_width as f64;\n    let tex_height = tex_height as f64;\n\n    let window_aspect = width / height;\n    let texture_aspect = tex_width / tex_height;\n    if window_aspect > texture_aspect {\n        // Screen too wide.\n        let scale = height / tex_height;\n        (width / (tex_width * scale), 1.0)\n    } else if window_aspect < texture_aspect {\n        // Screen too tall.\n        let scale = width / tex_width;\n        (1.0, height / (tex_height * scale))\n    } else {\n        (1.0, 1.0)\n    }\n}\n"
  },
  {
    "path": "mm-client/src/render.slang",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nimport color;\n\n// Should match the definition in render.rs\nenum TextureColorSpace\n{\n    Bt709 = 0,\n    Bt2020Pq = 1,\n}\n\nstruct PushConstants\n{\n    float2 aspect;\n    TextureColorSpace texture_color_space;\n    int vk_color_space;\n};\n\nstatic const int VK_COLOR_SPACE_SRGB_NONLINEAR_EXT = 0;\nstatic const int VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1_000_104_002;\n// static const int VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1_000_104_001;\nstatic const int VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1_000_104_006;\nstatic const int VK_COLOR_SPACE_HDR10_ST2084_EXT = 1_000_104_008;\n\n[[vk::push_constant]]\nconst PushConstants pc;\n\nconst Sampler2D texture;\n\nstruct VertOutput\n{\n    float2 uv : TextureCoord;\n    float4 position : SV_Position;\n};\n\n[shader(\"vertex\")]\nVertOutput vert(uint vertexID: SV_VertexID)\n{\n    VertOutput output;\n\n    let uv = float2((vertexID << 1) & 2, vertexID & 2) / 2.0;\n    output.uv = uv;\n    output.position = float4((uv * 2.0f - 1.0f) / pc.aspect, 0.0f, 1.0f);\n    return output;\n}\n\nfloat3 bt709_to_display(float3 color, int vk_color_space)\n{\n    if (vk_color_space == VK_COLOR_SPACE_BT709_NONLINEAR_EXT)\n    {\n        return color;\n    }\n\n    let linear = bt709_eotf(color);\n    switch (vk_color_space)\n    {\n    case VK_COLOR_SPACE_SRGB_NONLINEAR_EXT:\n        return srgb_inverse_eotf(linear);\n    case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT:\n        return linear;\n    // case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT:\n    //     return srgb_inverse_eotf(transform(color, Primaries::BT709, Primaries::P3))\n    case VK_COLOR_SPACE_HDR10_ST2084_EXT:\n        return pq_inverse_eotf(transform(linear, Primaries::BT709, Primaries::BT2020));\n    default:\n        return srgb_inverse_eotf(linear);\n    }\n}\n\nfloat3 bt2020_pq_to_display(float3 color, int vk_color_space)\n{\n    if (vk_color_space == VK_COLOR_SPACE_HDR10_ST2084_EXT)\n    {\n        return color;\n    }\n\n    let linear = transform(pq_eotf(color) * PQ_MAX_WHITE / SDR_REFERENCE_WHITE, Primaries::BT2020, Primaries::BT709);\n    switch (vk_color_space)\n    {\n    case VK_COLOR_SPACE_SRGB_NONLINEAR_EXT:\n        return srgb_inverse_eotf(linear);\n    case VK_COLOR_SPACE_BT709_NONLINEAR_EXT:\n        return bt709_inverse_eotf(clamp(linear, 0.0, 1.0));\n    case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT:\n        return linear;\n    // case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT:\n    //     return srgb_inverse_eotf(transform(color, Primaries::BT2020, Primaries::P3))\n    default:\n        return srgb_inverse_eotf(linear);\n    }\n}\n\n[shader(\"fragment\")]\nfloat4 frag(float2 uv: TextureCoord)\n    : SV_Target\n{\n    float4 color = clamp(texture.Sample(uv), 0.0, 1.0);\n\n    // When sampling the video texture, vulkan does the matrix multiplication\n    // for us, but doesn't apply any transfer function, so the values are\n    // still nonlinear in either BT.709 or BT.2020/ST2048.\n    switch (pc.texture_color_space)\n    {\n    case TextureColorSpace::Bt709:\n        return float4(bt709_to_display(color.rgb, pc.vk_color_space), 1.0);\n    case TextureColorSpace::Bt2020Pq:\n        return float4(bt2020_pq_to_display(color.rgb, pc.vk_color_space), 1.0);\n    default:\n        return float4(0.0, 0.5, 1.0, 1.0);\n    }\n}\n"
  },
  {
    "path": "mm-client/src/stats.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::{\n    collections::BTreeMap,\n    sync::{Arc, RwLock},\n    time,\n};\n\nuse lazy_static::lazy_static;\nuse simple_moving_average::{SingleSumSMA, SMA as _};\n\nlazy_static! {\n    pub static ref STATS: Arc<Stats> = Arc::new(Stats::default());\n}\n\n#[derive(Default)]\npub struct Stats {\n    inner: RwLock<Inner>,\n}\n\nstruct InFlightFrame(time::Instant);\n\nstruct Inner {\n    in_flight_frames: BTreeMap<(u64, u64), InFlightFrame>,\n\n    video_bitrate: SingleSumSMA<f32, f32, 60>,\n    video_bytes: u64,\n    last_frame: time::Instant,\n\n    connection_rtt: time::Duration,\n    video_latency: SingleSumSMA<u64, u64, 60>,\n}\n\nimpl Stats {\n    pub fn set_connection_rtt(&self, rtt: time::Duration) {\n        self.inner.write().unwrap().connection_rtt = rtt;\n    }\n\n    /// Starts tracking a frame.\n    pub fn frame_received(&self, stream_seq: u64, seq: u64, len: usize) {\n        let now = time::Instant::now();\n        let mut inner = self.inner.write().unwrap();\n\n        inner\n            .in_flight_frames\n            .entry((stream_seq, seq))\n            .or_insert(InFlightFrame(now));\n\n        inner.video_bytes += len as u64;\n    }\n\n    /// Tracks the total frame time. Should be called right before the frame is\n    /// rendered.\n    pub fn frame_rendered(&self, stream_seq: u64, seq: u64) {\n        let now = time::Instant::now();\n        let mut inner = self.inner.write().unwrap();\n\n        // Add a bitrate measurement.\n        let duration = (now - inner.last_frame).as_secs_f32();\n        inner.last_frame = now;\n\n        let sample = inner.video_bytes as f32 * 8.0 / duration;\n        inner.video_bitrate.add_sample(sample);\n        inner.video_bytes = 0;\n\n        // Finish tracking the frame, and measure latency.\n        if let Some(frame) = inner.in_flight_frames.remove(&(stream_seq, seq)) {\n            inner\n                .video_latency\n                .add_sample((now - frame.0).as_nanos() as u64)\n        }\n    }\n\n    pub fn frame_discarded(&self, stream_seq: u64, seq: u64) {\n        self.inner\n            .write()\n            .unwrap()\n            .in_flight_frames\n            .remove(&(stream_seq, seq));\n    }\n\n    /// Returns the average video bitrate in bits per second.\n    pub fn video_bitrate(&self) -> f32 {\n        self.inner.read().unwrap().video_bitrate.get_average()\n    }\n\n    /// Returns the average total video latency in milliseconds.\n    pub fn video_latency(&self) -> f32 {\n        let inner = self.inner.read().unwrap();\n\n        let avg = inner.video_latency.get_average() + inner.connection_rtt.as_nanos() as u64;\n        avg as f32 / 1_000_000.0\n    }\n}\n\nimpl Default for Inner {\n    fn default() -> Self {\n        Self {\n            in_flight_frames: BTreeMap::new(),\n\n            video_bitrate: SingleSumSMA::new(),\n            video_bytes: 0,\n            last_frame: time::Instant::now(),\n\n            connection_rtt: time::Duration::ZERO,\n            video_latency: SingleSumSMA::new(),\n        }\n    }\n}\n"
  },
  {
    "path": "mm-client/src/video.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::{\n    sync::{mpsc, Arc},\n    time,\n};\n\nuse anyhow::{anyhow, bail, Context};\nuse ash::vk;\nuse bytes::{Bytes, BytesMut};\nuse ffmpeg_next as ffmpeg;\nuse ffmpeg_sys_next as ffmpeg_sys;\nuse mm_client_common as client;\nuse mm_protocol as protocol;\nuse tracing::{debug, error, instrument, trace, trace_span, warn};\n\nuse crate::{stats::STATS, vulkan::*};\n\nconst DECODER_INIT_TIMEOUT: time::Duration = time::Duration::from_secs(5);\n\ntype Undecoded = std::sync::Arc<client::Packet>;\n\n#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]\npub struct FrameMetadata {\n    pub stream_seq: u64,\n    pub seq: u64,\n    pub pts: u64,\n}\n\n#[derive(Debug, Clone)]\nstruct YUVPicture {\n    planes: [Bytes; 3],\n    num_planes: usize,\n    info: FrameMetadata,\n}\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\npub enum ColorSpace {\n    Bt709,\n    Bt2020Pq,\n}\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\npub struct VideoStreamParams {\n    pub width: u32,\n    pub height: u32,\n    pub color_space: ColorSpace,\n    pub color_full_range: bool,\n}\n\nimpl Default for VideoStreamParams {\n    fn default() -> Self {\n        Self {\n            width: 0,\n            height: 0,\n            color_space: ColorSpace::Bt709,\n            color_full_range: false,\n        }\n    }\n}\n\npub enum VideoStreamEvent {\n    VideoStreamReady(Arc<VkImage>, VideoStreamParams),\n    VideoFrameAvailable,\n}\n\nenum StreamState {\n    Empty,\n    Initializing(DecoderInit),\n    Streaming(CPUDecoder),\n    Restarting(CPUDecoder, DecoderInit),\n}\n\nimpl std::fmt::Debug for StreamState {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        match self {\n            StreamState::Empty => write!(f, \"Empty\"),\n            StreamState::Initializing(init) => write!(f, \"Initializing({})\", init.stream_seq),\n            StreamState::Streaming(dec) => write!(f, \"Streaming({})\", dec.stream_seq),\n            StreamState::Restarting(dec, init) => write!(\n                f,\n                \"RestartingStream({} -> {})\",\n                dec.stream_seq, init.stream_seq\n            ),\n        }\n    }\n}\n\npub struct VideoStream<T: From<VideoStreamEvent> + Send + 'static> {\n    state: StreamState,\n    proxy: winit::event_loop::EventLoopProxy<T>,\n    vk: Arc<VkContext>,\n}\n\nimpl<T: From<VideoStreamEvent> + Send + 'static> VideoStream<T> {\n    pub fn new(vk: Arc<VkContext>, proxy: winit::event_loop::EventLoopProxy<T>) -> Self {\n        Self {\n            state: StreamState::Empty,\n            proxy,\n            vk,\n        }\n    }\n\n    /// Initiates a restart of the current video stream. The restart completes\n    /// once enough packets have been received to determine the stream metadata,\n    /// at which point a VideoStreamReady event is sent with the new texture.\n    pub fn reset(\n        &mut self,\n        stream_seq: u64,\n        width: u32,\n        height: u32,\n        codec: protocol::VideoCodec,\n    ) -> anyhow::Result<()> {\n        debug!(\n            stream_seq,\n            width,\n            height,\n            ?codec,\n            \"starting or restarting video stream\"\n        );\n\n        let init = DecoderInit::new(self.vk.clone(), stream_seq, codec, width, height)?;\n\n        use StreamState::*;\n        let state = std::mem::replace(&mut self.state, Empty);\n        self.state = match state {\n            Empty | Initializing(_) => Initializing(init),\n            Streaming(renderer) | Restarting(renderer, _) => Restarting(renderer, init),\n        };\n\n        trace!(state = ?self.state, \"video stream updated\");\n        Ok(())\n    }\n\n    pub fn recv_packet(&mut self, buf: Undecoded) -> anyhow::Result<()> {\n        use StreamState::*;\n\n        let stream_seq = buf.stream_seq();\n        let seq = buf.seq();\n        let len = buf.len();\n        trace!(stream_seq, seq, len, \"received video packet\",);\n\n        // Feed the existing stream.\n        if let Streaming(ref mut dec) | Restarting(ref mut dec, _) = self.state {\n            if dec.stream_seq == stream_seq {\n                trace!(\n                    stream_seq,\n                    seq,\n                    pts = buf.pts(),\n                    len,\n                    \"received full video packet\",\n                );\n\n                STATS.frame_received(stream_seq, seq, len);\n                dec.send_packet(buf)?;\n                return Ok(());\n            }\n        }\n\n        // Feed the new stream, if there is one.\n        let new_stream_ready = match self.state {\n            Initializing(ref mut init) | Restarting(_, ref mut init)\n                if init.stream_seq == stream_seq =>\n            {\n                trace!(\n                    stream_seq,\n                    seq,\n                    len,\n                    \"received full video packet for initializing stream\",\n                );\n\n                // Returns true if the stream is ready.\n                init.send_packet(buf)?\n            }\n            _ => false,\n        };\n\n        if new_stream_ready {\n            // N.B. An error here puts us into an invalid state.\n            let (dec, texture, params) = match std::mem::replace(&mut self.state, Empty) {\n                Initializing(init) | Restarting(_, init) => {\n                    init.into_decoder(self.proxy.clone())?\n                }\n                Streaming(_) | Empty => unreachable!(),\n            };\n\n            let _ = self\n                .proxy\n                .send_event(VideoStreamEvent::VideoStreamReady(texture, params).into());\n\n            self.state = Streaming(dec);\n            trace!(state = ?self.state, \"video stream updated\")\n        }\n\n        Ok(())\n    }\n\n    pub fn prepare_frame(&mut self) -> anyhow::Result<Option<FrameMetadata>> {\n        match self.state {\n            StreamState::Streaming(ref mut dec) | StreamState::Restarting(ref mut dec, _) => {\n                dec.prepare_frame()\n            }\n            StreamState::Empty | StreamState::Initializing(_) => Ok(None),\n        }\n    }\n\n    pub fn mark_frame_rendered(&mut self) {\n        match self.state {\n            StreamState::Streaming(ref mut dec) | StreamState::Restarting(ref mut dec, _) => {\n                dec.mark_frame_rendered()\n            }\n            StreamState::Empty | StreamState::Initializing(_) => (),\n        }\n    }\n\n    pub fn is_ready(&self) -> bool {\n        match self.state {\n            StreamState::Empty | StreamState::Initializing(_) => false,\n            StreamState::Streaming(_) | StreamState::Restarting(_, _) => true,\n        }\n    }\n}\n\nstruct CPUDecoder {\n    stream_seq: u64,\n    prepared_frame_info: Option<FrameMetadata>,\n\n    staging_buffer: VkHostBuffer,\n    yuv_buffer_offsets: [usize; 3],\n    yuv_buffer_strides: [usize; 3],\n    // This is reference-counted because we share it with the renderer.\n    video_texture: Arc<VkImage>,\n    texture_width: u32,\n    texture_height: u32,\n\n    upload_cb: vk::CommandBuffer,\n    upload_fence: vk::Fence,\n    upload_ts_pool: VkTimestampQueryPool,\n    tracy_upload_span: Option<tracy_client::GpuSpan>,\n\n    undecoded_send: mpsc::Sender<Undecoded>,\n    decoded_recv: mpsc::Receiver<YUVPicture>,\n    decoder_thread_handle: Option<std::thread::JoinHandle<anyhow::Result<()>>>,\n\n    vk: Arc<VkContext>,\n}\n\n/// A temporary struct that receives video packets until it has enough metadata\n/// to start decoding and recieves a single frame. It also handles timing out\n/// if it never receives any metadata in the (otherwise valid) video stream.\nstruct DecoderInit {\n    stream_seq: u64,\n    width: u32,\n    height: u32,\n    started: time::Instant,\n    decoder: ffmpeg::decoder::Video,\n    packet: ffmpeg::Packet,\n    first_frame: Option<(ffmpeg::frame::Video, FrameMetadata)>,\n    vk: Arc<VkContext>,\n}\n\nimpl DecoderInit {\n    fn new(\n        vk: Arc<VkContext>,\n        stream_seq: u64,\n        codec: protocol::VideoCodec,\n        width: u32,\n        height: u32,\n    ) -> anyhow::Result<Self> {\n        let codec = {\n            let id = match codec {\n                protocol::VideoCodec::H264 => ffmpeg::codec::Id::H264,\n                protocol::VideoCodec::H265 => ffmpeg::codec::Id::H265,\n                protocol::VideoCodec::Av1 => ffmpeg::codec::Id::AV1,\n                _ => {\n                    error!(\"unexpected codec: {:?}\", codec);\n                    unimplemented!();\n                }\n            };\n\n            ffmpeg::decoder::find(id).ok_or(anyhow::anyhow!(\"codec not found\"))?\n        };\n\n        let dec_ctx = unsafe {\n            let ptr = ffmpeg_sys::avcodec_alloc_context3(codec.as_ptr());\n            (*ptr).width = width as i32;\n            (*ptr).height = height as i32;\n\n            let mut hw_ctx: *mut _ = std::ptr::null_mut();\n\n            let device_type = if cfg!(target_vendor = \"apple\") {\n                ffmpeg_sys::AVHWDeviceType::AV_HWDEVICE_TYPE_VIDEOTOOLBOX\n            } else {\n                ffmpeg_sys::AVHWDeviceType::AV_HWDEVICE_TYPE_VULKAN\n            };\n\n            let res = ffmpeg_sys::av_hwdevice_ctx_create(\n                &mut hw_ctx,\n                device_type,\n                std::ptr::null_mut(),\n                std::ptr::null_mut(),\n                0,\n            );\n\n            if res < 0 {\n                warn!(\"hardware decoding setup failed, falling back to CPU decoder\");\n            } else {\n                (*ptr).hw_device_ctx = hw_ctx;\n                (*ptr).get_format = Some(get_hw_format);\n            }\n\n            ffmpeg::codec::context::Context::wrap(ptr, None)\n        };\n\n        let mut decoder = dec_ctx.decoder();\n        decoder.set_flags(ffmpeg::codec::Flags::LOW_DELAY);\n\n        let decoder = decoder.open()?.video()?;\n        let packet = ffmpeg::Packet::empty();\n\n        Ok(Self {\n            stream_seq,\n            width,\n            height,\n            started: time::Instant::now(),\n            decoder,\n            packet,\n            first_frame: None,\n            vk,\n        })\n    }\n\n    /// Feed a packet into the decoder. Returns true if the parameters of the\n    /// stream have been recovered and it's safe to call into_decoder. Returns\n    /// an error only on timeout.\n    fn send_packet(&mut self, buf: Undecoded) -> anyhow::Result<bool> {\n        let info = FrameMetadata {\n            stream_seq: self.stream_seq,\n            seq: buf.seq(),\n            pts: buf.pts(),\n        };\n\n        if self.started.elapsed() > DECODER_INIT_TIMEOUT {\n            return Err(anyhow!(\"timed out waiting for video stream metadata\"));\n        }\n\n        copy_packet(&mut self.packet, buf)?;\n        match self.decoder.send_packet(&self.packet) {\n            Ok(()) => {}\n            Err(ffmpeg::Error::Other {\n                errno: ffmpeg::error::EAGAIN,\n            }) => return Err(anyhow!(\"decoder already read initial packets\")),\n            Err(_) => return Ok(false),\n        }\n\n        let mut frame = ffmpeg::frame::Video::empty();\n        match self.decoder.receive_frame(&mut frame) {\n            Ok(()) => {\n                self.first_frame = match frame.format() {\n                    ffmpeg::format::Pixel::VULKAN | ffmpeg_next::format::Pixel::VIDEOTOOLBOX => {\n                        let sw_format = unsafe {\n                            let ctx_ref = (*self.decoder.as_ptr()).hw_frames_ctx;\n                            assert!(!ctx_ref.is_null());\n\n                            let mut transfer_fmt_list = std::ptr::null_mut();\n                            if ffmpeg_sys::av_hwframe_transfer_get_formats(\n                            ctx_ref,\n                            ffmpeg_sys::AVHWFrameTransferDirection::AV_HWFRAME_TRANSFER_DIRECTION_FROM,\n                            &mut transfer_fmt_list,\n                            0) < 0\n                                {\n                                    bail!(\"call to av_hwframe_transfer_get_formats failed\");\n                                };\n\n                            let transfer_formats = read_format_list(transfer_fmt_list);\n                            assert!(!transfer_formats.is_empty());\n\n                            transfer_formats[0]\n                        };\n\n                        let mut sw_frame = ffmpeg::frame::Video::new(\n                            sw_format,\n                            self.decoder.width(),\n                            self.decoder.height(),\n                        );\n\n                        unsafe {\n                            let res = ffmpeg_sys::av_hwframe_transfer_data(\n                                sw_frame.as_mut_ptr(),\n                                frame.as_ptr(),\n                                0,\n                            );\n\n                            if res < 0 {\n                                return Err(anyhow!(\"call to av_hwframe_transfer_data failed\"));\n                            }\n\n                            Some((sw_frame, info))\n                        }\n                    }\n                    _ => Some((frame, info)),\n                };\n\n                Ok(true)\n            }\n            Err(ffmpeg::Error::Other {\n                errno: ffmpeg::error::EAGAIN,\n            }) => Ok(false),\n            Err(e) => Err(e.into()),\n        }\n    }\n\n    /// Consumes the DecoderInit, returning a CPUDecoder capable of uploading\n    /// frames to the GPU.\n    fn into_decoder<T>(\n        self,\n        proxy: winit::event_loop::EventLoopProxy<T>,\n    ) -> anyhow::Result<(CPUDecoder, Arc<VkImage>, VideoStreamParams)>\n    where\n        T: From<VideoStreamEvent> + Send,\n    {\n        let width = self.decoder.width();\n        let height = self.decoder.height();\n\n        let decoder_format = self.decoder.format();\n        let first_frame = match self.first_frame {\n            Some(f) => f,\n            None => return Err(anyhow!(\"no frames received yet\")),\n        };\n\n        // If we're using hardware decode, create a \"hardware\" frame to use with\n        // receive_frame.\n        let output_format = first_frame.0.format();\n\n        let ((mut frame, info), mut hw_frame) = match decoder_format {\n            ffmpeg::format::Pixel::VULKAN => {\n                let hw_frame =\n                    ffmpeg::frame::Video::new(ffmpeg::format::Pixel::VULKAN, width, height);\n                debug!(format = ?hw_frame.format(), \"hw_frame format\");\n\n                (first_frame, Some(hw_frame))\n            }\n            ffmpeg::format::Pixel::VIDEOTOOLBOX => {\n                let hw_frame =\n                    ffmpeg::frame::Video::new(ffmpeg::format::Pixel::VIDEOTOOLBOX, width, height);\n\n                (first_frame, Some(hw_frame))\n            }\n            _ => (first_frame, None),\n        };\n\n        // For 10-bit textures, we need to end up in on the GPU in P010LE,\n        // because that's better supported. To make the copy easier, we'll use\n        // swscale to convert to a matching intermediate format.\n        let (intermediate_format, texture_format) = match output_format {\n            ffmpeg::format::Pixel::YUV420P => (None, vk::Format::G8_B8_R8_3PLANE_420_UNORM),\n            ffmpeg::format::Pixel::NV12 => (None, vk::Format::G8_B8R8_2PLANE_420_UNORM),\n            ffmpeg::format::Pixel::P010LE => {\n                (None, vk::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16)\n            }\n            ffmpeg::format::Pixel::YUV420P10 | ffmpeg::format::Pixel::YUV420P10LE => (\n                Some(ffmpeg::format::Pixel::P010LE),\n                vk::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,\n            ),\n            _ => return Err(anyhow!(\"unexpected pixel format: {:?}\", output_format)),\n        };\n\n        debug_assert_eq!(frame.width(), width);\n        debug_assert_eq!(frame.height(), height);\n\n        if width != self.width || height != self.height {\n            return Err(anyhow!(\n                \"unexpected video stream dimensions: {}x{} (expected {}x{})\",\n                width,\n                height,\n                self.width,\n                self.height\n            ));\n        }\n\n        let mut intermediate_frame =\n            intermediate_format.map(|fmt| ffmpeg::frame::Video::new(fmt, width, height));\n\n        // For the purposes of determining the size of and offsets into the\n        // staging buffer, we use the intermediate frame if it exists, otherwise\n        // the output frame.\n        let model_frame = intermediate_frame.as_ref().unwrap_or(&frame);\n\n        // Precalculate the layout of the staging buffer.\n        let mut buffer_strides = [0; 3];\n        let mut buffer_offsets = [0; 3];\n        let mut buffer_size = 0;\n        for plane in 0..model_frame.planes() {\n            let stride = model_frame.stride(plane);\n            let len = stride * model_frame.plane_height(plane) as usize;\n\n            buffer_strides[plane] = stride;\n            buffer_offsets[plane] = buffer_size;\n            buffer_size += len;\n        }\n\n        let staging_buffer = create_host_buffer(\n            &self.vk.device,\n            self.vk.device_info.host_visible_mem_type_index,\n            vk::BufferUsageFlags::TRANSFER_SRC,\n            buffer_size,\n        )?;\n\n        let color_space = match (\n            self.decoder.color_space(),\n            self.decoder.color_transfer_characteristic(),\n        ) {\n            (ffmpeg::color::Space::BT709, ffmpeg::color::TransferCharacteristic::BT709) => {\n                ColorSpace::Bt709\n            }\n            (ffmpeg::color::Space::BT2020NCL, ffmpeg::color::TransferCharacteristic::SMPTE2084) => {\n                ColorSpace::Bt2020Pq\n            }\n            (\n                ffmpeg::color::Space::Unspecified,\n                ffmpeg::color::TransferCharacteristic::Unspecified,\n            ) => {\n                warn!(\"video stream has unspecified color primaries or transfer function\");\n                ColorSpace::Bt709\n            }\n            (cs, ctrc) => bail!(\"unexpected color description: {:?} / {:?}\", cs, ctrc),\n        };\n\n        let color_full_range = match self.decoder.color_range() {\n            ffmpeg::color::Range::MPEG => false,\n            ffmpeg::color::Range::JPEG => true,\n            cr => {\n                warn!(\"unexpected color range: {:?}\", cr);\n                false\n            }\n        };\n\n        let video_texture = Arc::new(VkImage::new(\n            self.vk.clone(),\n            texture_format,\n            width,\n            height,\n            vk::ImageUsageFlags::TRANSFER_DST\n                | vk::ImageUsageFlags::SAMPLED\n                | vk::ImageUsageFlags::TRANSFER_SRC,\n            vk::SharingMode::EXCLUSIVE,\n            vk::ImageCreateFlags::empty(),\n        )?);\n\n        // Uploads happen on the present queue.\n        let upload_cb = create_command_buffer(&self.vk.device, self.vk.present_queue.command_pool)?;\n        let upload_fence = create_fence(&self.vk.device, true)?;\n        let upload_ts_pool = create_timestamp_query_pool(&self.vk.device, 2)?;\n\n        let (undecoded_send, undecoded_recv) = mpsc::channel::<Undecoded>();\n        let (decoded_send, decoded_recv) = mpsc::channel::<YUVPicture>();\n\n        // Send the frame we have from before.\n        decoded_send\n            .send(copy_frame(\n                &mut frame,\n                intermediate_frame.as_mut(),\n                &mut BytesMut::new(),\n                info,\n            ))\n            .unwrap();\n\n        // Spawn another thread that receives packets on one channel and sends\n        // completed pictures on another.\n        let stream_seq = self.stream_seq;\n        let mut decoder = self.decoder;\n        let mut packet = self.packet;\n        let decoder_thread_handle = std::thread::Builder::new()\n            .name(\"CPU decoder\".to_string())\n            .spawn(move || -> anyhow::Result<()> {\n                tracy_client::set_thread_name!(\"CPU decoder\");\n\n                // This should have enough capacity for four pictures (YUV420 has\n                // a bpp of 1.5). It will also resize dynamically, of course.\n                let mut scratch = BytesMut::with_capacity((width * height * 6) as usize);\n\n                for buf in undecoded_recv {\n                    let _tracy_frame = tracy_client::non_continuous_frame!(\"decode\");\n                    let span = trace_span!(\"decode_loop\");\n                    let _guard = span.enter();\n\n                    let info = FrameMetadata {\n                        stream_seq,\n                        seq: buf.seq(),\n                        pts: buf.pts(),\n                    };\n\n                    copy_packet(&mut packet, buf)?;\n\n                    // Send the packet to the decoder.\n                    if trace_span!(\"send_packet\")\n                        .in_scope(|| decoder.send_packet(&packet))\n                        .is_err()\n                    {\n                        continue;\n                    }\n\n                    // Receive frames until we get EAGAIN.\n                    loop {\n                        match receive_frame(&mut decoder, &mut frame, hw_frame.as_mut()) {\n                            Ok(()) => {\n                                let pic = copy_frame(\n                                    &mut frame,\n                                    intermediate_frame.as_mut(),\n                                    &mut scratch,\n                                    info,\n                                );\n\n                                let span = trace_span!(\"send\");\n                                let _guard = span.enter();\n\n                                match decoded_send.send(pic) {\n                                    Ok(()) => {}\n                                    Err(mpsc::SendError(_)) => return Ok(()),\n                                }\n\n                                match proxy.send_event(VideoStreamEvent::VideoFrameAvailable.into())\n                                {\n                                    Ok(()) => {}\n                                    Err(_) => return Ok(()),\n                                }\n                            }\n                            Err(ffmpeg::Error::Other {\n                                errno: ffmpeg::error::EAGAIN,\n                            }) => break,\n                            Err(e) => {\n                                debug!(\"receive_frame failed: {:?}\", e);\n                                return Err(e.into());\n                            }\n                        }\n                    }\n                }\n\n                Ok(())\n            })?;\n\n        let dec = CPUDecoder {\n            stream_seq: self.stream_seq,\n            prepared_frame_info: None,\n\n            staging_buffer,\n            yuv_buffer_offsets: buffer_offsets,\n            yuv_buffer_strides: buffer_strides,\n            video_texture: video_texture.clone(),\n            texture_width: width,\n            texture_height: height,\n            upload_cb,\n            upload_fence,\n            upload_ts_pool,\n            tracy_upload_span: None,\n\n            undecoded_send,\n            decoded_recv,\n            decoder_thread_handle: Some(decoder_thread_handle),\n            vk: self.vk,\n        };\n\n        unsafe { dec.prerecord_upload()? };\n\n        let params = VideoStreamParams {\n            width,\n            height,\n            color_space,\n            color_full_range,\n        };\n\n        Ok((dec, video_texture, params))\n    }\n}\n\nimpl CPUDecoder {\n    fn send_packet(&mut self, buf: Undecoded) -> anyhow::Result<()> {\n        let exit = match self.undecoded_send.send(buf) {\n            Ok(_) => return Ok(()),\n            Err(mpsc::SendError(_)) => match self.decoder_thread_handle.take() {\n                Some(h) => h.join(),\n                None => unreachable!(),\n            },\n        };\n\n        match exit {\n            Ok(Ok(())) => Err(anyhow!(\"decoding thread exited unexpectedly\")),\n            Ok(Err(e)) => Err(e).context(\"decoding exited with error\"),\n            Err(v) => Err(anyhow!(\"decoding thread panicked: {:?}\", v)),\n        }\n    }\n\n    pub fn prepare_frame(&mut self) -> anyhow::Result<Option<FrameMetadata>> {\n        // If multiple frames are ready, only grab the last one.\n        let mut iterator = self.decoded_recv.try_iter().peekable();\n        while let Some(pic) = iterator.next() {\n            if iterator.peek().is_some() {\n                STATS.frame_discarded(pic.info.stream_seq, pic.info.seq);\n\n                debug!(\n                    stream_seq = pic.info.stream_seq,\n                    seq = pic.info.seq,\n                    \"discarding frame\"\n                );\n            } else {\n                let pic_info = pic.info;\n                unsafe {\n                    self.upload(pic).context(\"uploading frame to GPU\")?;\n                }\n\n                if let Some(old) = self.prepared_frame_info.replace(pic_info) {\n                    debug!(\n                        stream_seq = old.stream_seq,\n                        seq = old.seq,\n                        \"overwriting uploaded frame\"\n                    );\n\n                    STATS.frame_discarded(old.stream_seq, old.seq);\n                }\n\n                return Ok(Some(pic_info));\n            }\n        }\n\n        Ok(None)\n    }\n\n    pub fn mark_frame_rendered(&mut self) {\n        if let Some(info) = self.prepared_frame_info.take() {\n            STATS.frame_rendered(info.stream_seq, info.seq);\n        }\n    }\n\n    unsafe fn upload(&mut self, pic: YUVPicture) -> anyhow::Result<()> {\n        // Wait for the previous upload to complete.\n        let device = &self.vk.device;\n        device.wait_for_fences(&[self.upload_fence], true, u64::MAX)?;\n\n        // Copy data into the staging buffer.\n        self.yuv_buffer_offsets\n            .iter()\n            .zip(pic.planes.iter())\n            .take(pic.num_planes)\n            .for_each(|(offset, src)| {\n                let dst = std::slice::from_raw_parts_mut(\n                    (self.staging_buffer.access as *mut u8).add(*offset),\n                    src.len(),\n                );\n\n                dst.copy_from_slice(src);\n            });\n\n        // Trace the upload, including loading timestamps for the previous upload.\n        if let Some(ctx) = &self.vk.tracy_context {\n            if let Some(prev_span) = self.tracy_upload_span.take() {\n                let timestamps = self.upload_ts_pool.fetch_results(&self.vk.device)?;\n                prev_span.upload_timestamp(timestamps[0], timestamps[1]);\n            }\n\n            self.tracy_upload_span = Some(ctx.span(tracy_client::span_location!())?);\n        }\n\n        // The command buffer was prerecorded, so we can directly submit it.\n        {\n            let cbs = [self.upload_cb];\n            let submit_info = vk::SubmitInfo::default().command_buffers(&cbs);\n\n            self.vk.device.reset_fences(&[self.upload_fence])?;\n\n            trace!(queue = ?self.vk.present_queue.queue, \"queue submit for upload\");\n\n            let submits = [submit_info];\n            device.queue_submit(self.vk.present_queue.queue, &submits, self.upload_fence)?;\n        }\n\n        if let Some(span) = self.tracy_upload_span.as_mut() {\n            span.end_zone();\n        }\n\n        Ok(())\n    }\n\n    unsafe fn prerecord_upload(&self) -> anyhow::Result<()> {\n        let device = &self.vk.device;\n\n        // Reset the command buffer.\n        device.reset_command_buffer(self.upload_cb, vk::CommandBufferResetFlags::empty())?;\n\n        // Begin the command buffer.\n        {\n            let begin_info = vk::CommandBufferBeginInfo::default()\n                .flags(vk::CommandBufferUsageFlags::SIMULTANEOUS_USE);\n\n            device.begin_command_buffer(self.upload_cb, &begin_info)?;\n        }\n\n        // Record the start timestamp.\n        self.upload_ts_pool.cmd_reset(device, self.upload_cb);\n        device.cmd_write_timestamp(\n            self.upload_cb,\n            vk::PipelineStageFlags::TOP_OF_PIPE,\n            self.upload_ts_pool.pool,\n            0,\n        );\n\n        // Transfer the image to be writable.\n        cmd_image_barrier(\n            device,\n            self.upload_cb,\n            self.video_texture.image,\n            vk::PipelineStageFlags::TOP_OF_PIPE,\n            vk::AccessFlags::empty(),\n            vk::PipelineStageFlags::TRANSFER,\n            vk::AccessFlags::TRANSFER_WRITE,\n            vk::ImageLayout::UNDEFINED,\n            vk::ImageLayout::TRANSFER_DST_OPTIMAL,\n        );\n\n        // Upload from the staging buffer to the texture.\n        {\n            let num_planes = match self.video_texture.format {\n                vk::Format::G8_B8_R8_3PLANE_420_UNORM => 3,\n                vk::Format::G8_B8R8_2PLANE_420_UNORM => 2,\n                vk::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 => 2,\n                _ => unreachable!(),\n            };\n\n            let regions = [\n                vk::ImageAspectFlags::PLANE_0,\n                vk::ImageAspectFlags::PLANE_1,\n                vk::ImageAspectFlags::PLANE_2,\n            ]\n            .into_iter()\n            .enumerate()\n            .take(num_planes)\n            .map(|(plane, plane_aspect_mask)| {\n                // Vulkan considers the image width/height to be 1/2 the size\n                // for the U and V planes.\n                let (width, height) = if plane == 0 {\n                    (self.texture_width, self.texture_height)\n                } else {\n                    (self.texture_width / 2, self.texture_height / 2)\n                };\n\n                let texel_width = match self.video_texture.format {\n                    vk::Format::G8_B8_R8_3PLANE_420_UNORM => 1,\n                    vk::Format::G8_B8R8_2PLANE_420_UNORM => {\n                        if plane == 0 {\n                            1\n                        } else {\n                            2\n                        }\n                    }\n                    vk::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 => {\n                        if plane == 0 {\n                            2\n                        } else {\n                            4\n                        }\n                    }\n                    _ => unreachable!(),\n                };\n\n                vk::BufferImageCopy::default()\n                    .buffer_offset(self.yuv_buffer_offsets[plane] as u64)\n                    .buffer_row_length((self.yuv_buffer_strides[plane] / texel_width) as u32) // In texels.\n                    .image_subresource(vk::ImageSubresourceLayers {\n                        aspect_mask: plane_aspect_mask,\n                        mip_level: 0,\n                        base_array_layer: 0,\n                        layer_count: 1,\n                    })\n                    .image_extent(vk::Extent3D {\n                        width,\n                        height,\n                        depth: 1,\n                    })\n            })\n            .collect::<Vec<_>>();\n\n            device.cmd_copy_buffer_to_image(\n                self.upload_cb,\n                self.staging_buffer.buffer,\n                self.video_texture.image,\n                vk::ImageLayout::TRANSFER_DST_OPTIMAL,\n                &regions,\n            );\n        }\n\n        // Transfer the image back to be readable.\n        cmd_image_barrier(\n            device,\n            self.upload_cb,\n            self.video_texture.image,\n            vk::PipelineStageFlags::TRANSFER,\n            vk::AccessFlags::TRANSFER_WRITE,\n            vk::PipelineStageFlags::FRAGMENT_SHADER,\n            vk::AccessFlags::SHADER_READ,\n            vk::ImageLayout::UNDEFINED,\n            vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,\n        );\n\n        // Record the end timestamp.\n        device.cmd_write_timestamp(\n            self.upload_cb,\n            vk::PipelineStageFlags::BOTTOM_OF_PIPE,\n            self.upload_ts_pool.pool,\n            1,\n        );\n\n        device.end_command_buffer(self.upload_cb)?;\n        Ok(())\n    }\n}\n\nimpl Drop for CPUDecoder {\n    fn drop(&mut self) {\n        let device = &self.vk.device;\n\n        unsafe {\n            device.queue_wait_idle(self.vk.present_queue.queue).ok();\n\n            destroy_host_buffer(device, &self.staging_buffer);\n            device.destroy_fence(self.upload_fence, None);\n            device.destroy_query_pool(self.upload_ts_pool.pool, None);\n            device.free_command_buffers(self.vk.present_queue.command_pool, &[self.upload_cb]);\n        }\n    }\n}\n\n#[instrument(skip_all)]\nfn receive_frame(\n    dec: &mut ffmpeg::decoder::Video,\n    frame: &mut ffmpeg::frame::Video,\n    hw_frame: Option<&mut ffmpeg::frame::Video>,\n) -> Result<(), ffmpeg::Error> {\n    match hw_frame {\n        Some(f) => {\n            dec.receive_frame(f)?;\n\n            unsafe {\n                let res = ffmpeg_sys::av_hwframe_transfer_data(frame.as_mut_ptr(), f.as_ptr(), 0);\n                if res < 0 {\n                    error!(\"call to av_hwframe_transfer_data failed\");\n                    Err(ffmpeg::Error::Other { errno: res })\n                } else {\n                    Ok(())\n                }\n            }\n        }\n        None => dec.receive_frame(frame),\n    }\n}\n\n#[instrument(skip_all)]\nfn copy_packet(pkt: &mut ffmpeg::Packet, buf: Undecoded) -> anyhow::Result<()> {\n    // It's necessary to reset the packet metadata for each NAL.\n    unsafe {\n        use ffmpeg::packet::Mut;\n        ffmpeg_sys::av_init_packet(pkt.as_mut_ptr());\n    }\n\n    // Copy into data.\n    let packet_len = buf.len();\n    match pkt.size().cmp(&packet_len) {\n        std::cmp::Ordering::Less => {\n            pkt.grow(packet_len - pkt.size());\n        }\n        std::cmp::Ordering::Greater => {\n            // Takes the new total, not the amount to shrink.\n            pkt.shrink(packet_len);\n        }\n        std::cmp::Ordering::Equal => {}\n    };\n\n    buf.copy_to_slice(pkt.data_mut().unwrap());\n    Ok(())\n}\n\n#[instrument(skip_all)]\nfn copy_frame(\n    frame: &mut ffmpeg::frame::Video,\n    intermediate_frame: Option<&mut ffmpeg::frame::Video>,\n    scratch: &mut BytesMut,\n    info: FrameMetadata,\n) -> YUVPicture {\n    let transfer_src = if let Some(intermediate) = intermediate_frame {\n        // TODO reuse\n        let mut ctx = ffmpeg::software::scaling::Context::get(\n            frame.format(),\n            frame.width(),\n            frame.height(),\n            intermediate.format(),\n            intermediate.width(),\n            intermediate.height(),\n            ffmpeg::software::scaling::Flags::empty(),\n        )\n        .expect(\"failed to create sws ctx\");\n\n        ctx.run(frame, intermediate).expect(\"failed to convert\");\n\n        intermediate\n    } else {\n        frame\n    };\n\n    let mut pic = YUVPicture {\n        planes: [Bytes::new(), Bytes::new(), Bytes::new()],\n        num_planes: transfer_src.planes(),\n        info,\n    };\n\n    scratch.truncate(0);\n    for plane in 0..transfer_src.planes() {\n        scratch.extend_from_slice(transfer_src.data(plane));\n        pic.planes[plane] = scratch.split().freeze();\n    }\n\n    pic\n}\n\n#[no_mangle]\nunsafe extern \"C\" fn get_hw_format(\n    ctx: *mut ffmpeg_sys::AVCodecContext,\n    list: *const ffmpeg_sys::AVPixelFormat,\n) -> ffmpeg_sys::AVPixelFormat {\n    use ffmpeg_sys::AVPixelFormat::*;\n\n    let sw_pix_fmt = (*ctx).sw_pix_fmt;\n    let formats = read_format_list(list);\n\n    debug!(?formats, ?sw_pix_fmt, \"get_hw_format\");\n\n    if formats.contains(&ffmpeg::format::Pixel::VULKAN) {\n        return AV_PIX_FMT_VULKAN;\n    } else if formats.contains(&ffmpeg::format::Pixel::VIDEOTOOLBOX) {\n        let frames_ctx_ref = ffmpeg_sys::av_hwframe_ctx_alloc((*ctx).hw_device_ctx);\n        if frames_ctx_ref.is_null() {\n            error!(\"call to av_hwframe_ctx_alloc failed\");\n            return sw_pix_fmt;\n        }\n\n        let frames_ctx = (*frames_ctx_ref).data as *mut ffmpeg_sys::AVHWFramesContext;\n        (*frames_ctx).width = (*ctx).width;\n        (*frames_ctx).height = (*ctx).height;\n        (*frames_ctx).format = AV_PIX_FMT_VIDEOTOOLBOX;\n        (*frames_ctx).sw_format = AV_PIX_FMT_YUV420P;\n\n        let res = ffmpeg_sys::av_hwframe_ctx_init(frames_ctx_ref);\n        if res < 0 {\n            error!(\"call to av_hwframe_ctx_init failed\");\n            return sw_pix_fmt;\n        }\n\n        debug!(\"using VideoToolbox hardware encoder\");\n        (*ctx).hw_frames_ctx = frames_ctx_ref;\n\n        return AV_PIX_FMT_VIDEOTOOLBOX;\n    }\n\n    warn!(\"unable to determine ffmpeg hw format\");\n    sw_pix_fmt\n}\n\nunsafe fn read_format_list(\n    mut ptr: *const ffmpeg_sys::AVPixelFormat,\n) -> Vec<ffmpeg::format::Pixel> {\n    let mut formats = Vec::new();\n    while !ptr.is_null() && *ptr != ffmpeg_sys::AVPixelFormat::AV_PIX_FMT_NONE {\n        formats.push((*ptr).into());\n        ptr = ptr.add(1);\n    }\n\n    formats\n}\n"
  },
  {
    "path": "mm-client/src/vulkan.rs",
    "content": "#![allow(clippy::missing_safety_doc)]\n\n// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::{\n    ffi::{c_void, CStr, CString},\n    sync::Arc,\n};\n\nuse anyhow::{anyhow, bail, Context};\nuse ash::{\n    ext::debug_utils,\n    khr::{\n        dynamic_rendering, surface, swapchain, video_decode_av1, video_decode_h264,\n        video_decode_h265, video_decode_queue, video_queue,\n    },\n    vk,\n};\nuse cstr::cstr;\nuse tracing::{debug, error, info, warn};\nuse winit::raw_window_handle::{HasDisplayHandle, HasWindowHandle as _};\n\nuse crate::video::ColorSpace;\n\npub struct VkDebugContext {\n    debug: debug_utils::Instance,\n    messenger: vk::DebugUtilsMessengerEXT,\n}\n\npub struct VkQueue {\n    pub queue: vk::Queue,\n    pub command_pool: vk::CommandPool,\n}\n\npub struct VkDeviceInfo {\n    pub device_name: CString,\n    pub device_type: vk::PhysicalDeviceType,\n    pub limits: vk::PhysicalDeviceLimits,\n    pub present_family: u32,\n    pub decode_family: Option<u32>,\n    pub supports_h264: bool,\n    pub supports_h265: bool,\n    pub supports_av1: bool,\n    pub memory_props: vk::PhysicalDeviceMemoryProperties,\n    pub host_visible_mem_type_index: u32,\n    pub host_mem_is_cached: bool,\n    pub selected_extensions: Vec<CString>,\n}\n\npub struct VkContext {\n    pub entry: ash::Entry,\n    pub instance: ash::Instance,\n    pub swapchain_loader: swapchain::Device,\n    pub surface_loader: surface::Instance,\n    pub dynamic_rendering_loader: dynamic_rendering::Device,\n\n    pub surface: vk::SurfaceKHR,\n    pub pdevice: vk::PhysicalDevice,\n    pub device: ash::Device,\n    pub device_info: VkDeviceInfo,\n    pub present_queue: VkQueue,\n    pub decode_queue: Option<VkQueue>,\n    pub debug: Option<VkDebugContext>,\n\n    pub tracy_context: Option<tracy_client::GpuContext>,\n\n    // Hold on to a reference to the window, so that it gets dropped last.\n    _window: Arc<winit::window::Window>,\n}\n\nimpl VkDeviceInfo {\n    fn query(\n        instance: &ash::Instance,\n        surface_loader: &surface::Instance,\n        surface: vk::SurfaceKHR,\n        device: vk::PhysicalDevice,\n    ) -> anyhow::Result<Self> {\n        let props = unsafe { instance.get_physical_device_properties(device) };\n        let device_type = props.device_type;\n        let device_name = unsafe { CStr::from_ptr(props.device_name.as_ptr()).to_owned() };\n\n        let queue_families = unsafe {\n            instance\n                .get_physical_device_queue_family_properties(device)\n                .into_iter()\n                .collect::<Vec<_>>()\n        };\n\n        let present_family = queue_families\n            .iter()\n            .enumerate()\n            .find(|(idx, properties)| {\n                properties.queue_flags.contains(vk::QueueFlags::GRAPHICS)\n                    && properties.queue_flags.contains(vk::QueueFlags::COMPUTE)\n                    && unsafe {\n                        surface_loader\n                            .get_physical_device_surface_support(device, *idx as u32, surface)\n                            .unwrap_or(false)\n                    }\n            })\n            .map(|(index, _)| index as u32)\n            .to_owned()\n            .ok_or_else(|| anyhow::anyhow!(\"no graphics queue found\"))?;\n\n        let decode_family = queue_families\n            .iter()\n            .enumerate()\n            .find(|(_, properties)| {\n                properties\n                    .queue_flags\n                    .contains(vk::QueueFlags::VIDEO_DECODE_KHR)\n            })\n            .map(|(index, _)| index as u32);\n\n        let available_extensions = unsafe {\n            instance\n                .enumerate_device_extension_properties(device)\n                .unwrap()\n                .into_iter()\n                .map(|properties| CStr::from_ptr(&properties.extension_name as *const _).to_owned())\n                .collect::<Vec<_>>()\n        };\n\n        let ext_swapchain = swapchain::NAME;\n        if !contains(&available_extensions, ext_swapchain) {\n            return Err(anyhow::anyhow!(\"swapchain extension not available\"));\n        }\n\n        let mut selected_extensions = vec![\n            ext_swapchain.to_owned(),\n            dynamic_rendering::NAME.to_owned(),\n            #[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\n            vk::KhrPortabilitySubsetFn::name().to_owned(),\n        ];\n\n        let ext_video_queue = video_queue::NAME;\n        let ext_video_decode_queue = video_decode_queue::NAME;\n        let ext_h264 = video_decode_h264::NAME;\n        let ext_h265 = video_decode_h265::NAME;\n        let ext_av1 = video_decode_av1::NAME;\n\n        let mut supports_h264 = false;\n        let mut supports_h265 = false;\n        let mut supports_av1 = false;\n        if decode_family.is_some()\n            && contains(&available_extensions, ext_video_queue)\n            && contains(&available_extensions, ext_video_decode_queue)\n        {\n            selected_extensions.push(ext_video_decode_queue.to_owned());\n            selected_extensions.push(ext_video_queue.to_owned());\n\n            if contains(&available_extensions, ext_h264) {\n                supports_h264 = true;\n                selected_extensions.push(ext_h264.to_owned());\n            }\n\n            if contains(&available_extensions, ext_h265) {\n                supports_h265 = true;\n                selected_extensions.push(ext_h265.to_owned());\n            }\n\n            // This doesn't actually exist yet.\n            if contains(&available_extensions, ext_av1) {\n                supports_av1 = true;\n                selected_extensions.push(ext_av1.to_owned());\n            }\n        }\n\n        // We want HOST_CACHED | HOST_COHERENT, but we can make do with just\n        // HOST_VISIBLE.\n        let memory_props = unsafe { instance.get_physical_device_memory_properties(device) };\n        let (host_visible_mem_type_index, host_mem_is_cached) = {\n            let mut cached = true;\n            let mut idx = select_memory_type(\n                &memory_props,\n                vk::MemoryPropertyFlags::HOST_VISIBLE\n                    | vk::MemoryPropertyFlags::HOST_CACHED\n                    | vk::MemoryPropertyFlags::HOST_COHERENT,\n                None,\n            );\n\n            if idx.is_none() {\n                idx = select_memory_type(\n                    &memory_props,\n                    vk::MemoryPropertyFlags::HOST_VISIBLE | vk::MemoryPropertyFlags::HOST_COHERENT,\n                    None,\n                );\n\n                if idx.is_none() {\n                    bail!(\"no host visible memory type found\");\n                }\n\n                cached = false;\n            }\n\n            (idx.unwrap(), cached)\n        };\n\n        Ok(Self {\n            device_name,\n            device_type,\n            limits: props.limits,\n            present_family,\n            decode_family,\n            supports_h264,\n            supports_h265,\n            supports_av1,\n            memory_props,\n            host_visible_mem_type_index,\n            host_mem_is_cached,\n            selected_extensions,\n        })\n    }\n\n    pub fn is_integrated(&self) -> bool {\n        self.device_type == vk::PhysicalDeviceType::INTEGRATED_GPU\n    }\n}\n\nimpl VkContext {\n    pub unsafe fn new(window: Arc<winit::window::Window>, debug: bool) -> anyhow::Result<Self> {\n        // MoltenVK is very noisy.\n        #[cfg(target_os = \"macos\")]\n        std::env::set_var(\n            \"MVK_CONFIG_LOG_LEVEL\",\n            std::env::var(\"MVK_CONFIG_LOG_LEVEL\").unwrap_or(\"0\".to_string()),\n        );\n\n        #[cfg(all(target_os = \"macos\", feature = \"moltenvk_static\"))]\n        let entry = ash_molten::load();\n\n        #[cfg(not(all(target_os = \"macos\", feature = \"moltenvk_static\")))]\n        let entry = unsafe { ash::Entry::load().context(\"failed to load vulkan libraries!\") }?;\n\n        debug!(\"creating vulkan instance\");\n\n        let (major, minor) = match entry.try_enumerate_instance_version()? {\n            // Vulkan 1.1+\n            Some(version) => (\n                vk::api_version_major(version),\n                vk::api_version_minor(version),\n            ),\n            // Vulkan 1.0\n            None => (1, 0),\n        };\n\n        if major < 1 || (major == 1 && minor < 2) {\n            return Err(anyhow::anyhow!(\"vulkan 1.2 or higher is required\"));\n        }\n\n        // MoltenVK doesn't actually support 1.3.\n        let (major, minor) = if cfg!(any(target_os = \"macos\")) {\n            (1, 2)\n        } else {\n            (major, minor)\n        };\n\n        let app_info = vk::ApplicationInfo::default()\n            .application_name(cstr!(\"Magic Mirror\"))\n            .application_version(vk::make_api_version(0, 0, 1, 0))\n            .engine_name(cstr!(\"No Engine\"))\n            .engine_version(vk::make_api_version(0, 0, 1, 0))\n            .api_version(vk::make_api_version(0, major, minor, 0));\n\n        let mut extensions =\n            ash_window::enumerate_required_extensions(window.display_handle()?.as_raw())?.to_vec();\n\n        let mut layers = Vec::new();\n\n        #[cfg(all(target_os = \"macos\", not(feature = \"moltenvk_static\")))]\n        {\n            extensions.push(vk::KhrPortabilityEnumerationFn::name().as_ptr());\n            // Enabling this extension is a requirement when using\n            // `VK_KHR_portability_subset`\n            extensions.push(vk::KhrGetPhysicalDeviceProperties2Fn::name().as_ptr());\n        }\n\n        if debug {\n            let props = entry.enumerate_instance_extension_properties(None)?;\n            let available_extensions = props\n                .into_iter()\n                .map(|properties| unsafe {\n                    CStr::from_ptr(&properties.extension_name as *const _).to_owned()\n                })\n                .collect::<Vec<_>>();\n\n            if !available_extensions\n                .iter()\n                .any(|ext| ext.as_c_str() == debug_utils::NAME)\n            {\n                return Err(anyhow::anyhow!(\n                    \"debug utils extension requested, but not available\"\n                ));\n            }\n\n            warn!(\"vulkan debug tooling enabled\");\n            extensions.push(debug_utils::NAME.as_ptr());\n\n            let validation_layer = cstr!(\"VK_LAYER_KHRONOS_validation\");\n            let layer_props = entry.enumerate_instance_layer_properties()?;\n            if layer_props\n                .into_iter()\n                .map(|properties| unsafe { CStr::from_ptr(&properties.layer_name as *const _) })\n                .any(|layer| layer == validation_layer)\n            {\n                layers.push(validation_layer.as_ptr());\n            } else {\n                warn!(\"validation layers requested, but not available!\")\n            }\n        }\n\n        let instance = {\n            let flags = if cfg!(any(target_os = \"macos\", target_os = \"ios\")) {\n                vk::InstanceCreateFlags::ENUMERATE_PORTABILITY_KHR\n            } else {\n                vk::InstanceCreateFlags::default()\n            };\n\n            let instance_create_info = vk::InstanceCreateInfo::default()\n                .flags(flags)\n                .application_info(&app_info)\n                .enabled_layer_names(&layers)\n                .enabled_extension_names(&extensions);\n\n            unsafe { entry.create_instance(&instance_create_info, None)? }\n        };\n\n        let surface_loader = surface::Instance::new(&entry, &instance);\n        let surface = unsafe {\n            ash_window::create_surface(\n                &entry,\n                &instance,\n                window.display_handle()?.as_raw(),\n                window.window_handle()?.as_raw(),\n                None,\n            )?\n        };\n\n        let debug_utils = if debug {\n            let debug_utils = debug_utils::Instance::new(&entry, &instance);\n\n            let create_info = vk::DebugUtilsMessengerCreateInfoEXT::default()\n                .message_severity(\n                    vk::DebugUtilsMessageSeverityFlagsEXT::WARNING\n                        | vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE\n                        | vk::DebugUtilsMessageSeverityFlagsEXT::INFO\n                        | vk::DebugUtilsMessageSeverityFlagsEXT::ERROR,\n                )\n                .message_type(\n                    vk::DebugUtilsMessageTypeFlagsEXT::GENERAL\n                        | vk::DebugUtilsMessageTypeFlagsEXT::PERFORMANCE\n                        | vk::DebugUtilsMessageTypeFlagsEXT::VALIDATION,\n                )\n                .pfn_user_callback(Some(vulkan_debug_utils_callback));\n\n            let messenger =\n                unsafe { debug_utils.create_debug_utils_messenger(&create_info, None) }?;\n\n            Some(VkDebugContext {\n                debug: debug_utils,\n                messenger,\n            })\n        } else {\n            None\n        };\n\n        // Select a device based on encoding support.\n        let devices = unsafe { instance.enumerate_physical_devices()? };\n        let mut devices = devices\n            .into_iter()\n            .enumerate()\n            .flat_map(|(index, dev)| {\n                match VkDeviceInfo::query(&instance, &surface_loader, surface, dev) {\n                    Ok(info) => Some((index as u32, dev, info)),\n                    Err(err) => {\n                        let device_name = unsafe {\n                            CStr::from_ptr(\n                                instance\n                                    .get_physical_device_properties(dev)\n                                    .device_name\n                                    .as_ptr(),\n                            )\n                            .to_owned()\n                        };\n\n                        warn!(\"gpu {device_name:?} ineligible: {err}\");\n                        None\n                    }\n                }\n            })\n            .collect::<Vec<_>>();\n\n        devices.sort_by_key(|(_, _, info)| {\n            let mut score = match info.device_type {\n                vk::PhysicalDeviceType::DISCRETE_GPU => 0,\n                vk::PhysicalDeviceType::INTEGRATED_GPU => 10,\n                _ => 20,\n            };\n\n            score += info.decode_family.is_none() as u32;\n            score += !info.supports_h264 as u32;\n            score += !info.supports_h265 as u32;\n            score += !info.supports_av1 as u32;\n            score\n        });\n\n        if devices.is_empty() {\n            return Err(anyhow!(\"no eligible GPU found!\"));\n        }\n\n        let (index, pdevice, device_info) = devices.remove(0);\n        info!(\"selected gpu: {:?} ({index})\", device_info.device_name);\n\n        let device = {\n            let queue_priorities = &[1.0];\n            let mut queue_indices = Vec::new();\n            queue_indices.push(device_info.present_family);\n            if let Some(idx) = device_info.decode_family {\n                queue_indices.push(idx);\n            }\n\n            queue_indices.dedup();\n            let queue_create_infos = queue_indices\n                .iter()\n                .map(|&index| {\n                    vk::DeviceQueueCreateInfo::default()\n                        .queue_family_index(index)\n                        .queue_priorities(queue_priorities)\n                })\n                .collect::<Vec<_>>();\n\n            let mut enabled_1_1_features =\n                vk::PhysicalDeviceVulkan11Features::default().sampler_ycbcr_conversion(true);\n\n            let mut dynamic_rendering_features =\n                vk::PhysicalDeviceDynamicRenderingFeatures::default().dynamic_rendering(true);\n\n            let extension_names = device_info\n                .selected_extensions\n                .iter()\n                .map(|v| v.as_c_str().as_ptr())\n                .collect::<Vec<_>>();\n            let device_create_info = vk::DeviceCreateInfo::default()\n                .queue_create_infos(&queue_create_infos)\n                .enabled_extension_names(&extension_names)\n                .push_next(&mut enabled_1_1_features)\n                .push_next(&mut dynamic_rendering_features);\n\n            unsafe { instance.create_device(pdevice, &device_create_info, None)? }\n        };\n\n        let present_queue = get_queue_with_command_pool(&device, device_info.present_family)?;\n        let mut decode_queue = None;\n        if device_info.decode_family.is_some() {\n            info!(\n                \"vulkan video decode support: (h264: {}, h265: {}, av1: {})\",\n                device_info.supports_h264, device_info.supports_h265, device_info.supports_av1\n            );\n\n            decode_queue = Some(get_queue_with_command_pool(\n                &device,\n                device_info.decode_family.unwrap(),\n            )?);\n        } else {\n            debug!(\"no vulkan video support found\")\n        }\n\n        if !device_info.host_mem_is_cached {\n            warn!(\"no cache-coherent memory type found on device!\");\n        }\n\n        let swapchain_loader = swapchain::Device::new(&instance, &device);\n        let dynamic_rendering_loader = dynamic_rendering::Device::new(&instance, &device);\n\n        let tracy_context = tracy_client::Client::running().and_then(|client| {\n            match init_tracy_context(&device, &device_info, &present_queue, client) {\n                Ok(ctx) => Some(ctx),\n                Err(err) => {\n                    error!(\"failed to initialize tracy GPU context: {err}\");\n                    None\n                }\n            }\n        });\n\n        Ok(Self {\n            entry,\n            instance,\n            swapchain_loader,\n            surface_loader,\n            dynamic_rendering_loader,\n\n            surface,\n            pdevice,\n            device,\n            device_info,\n            present_queue,\n            decode_queue,\n            debug: debug_utils,\n            tracy_context,\n\n            _window: window,\n        })\n    }\n}\n\nimpl Drop for VkContext {\n    fn drop(&mut self) {\n        let device = &self.device;\n\n        unsafe {\n            device.destroy_command_pool(self.present_queue.command_pool, None);\n            if let Some(decode_queue) = self.decode_queue.take() {\n                device.destroy_command_pool(decode_queue.command_pool, None);\n            }\n\n            if let Some(debug) = self.debug.take() {\n                debug\n                    .debug\n                    .destroy_debug_utils_messenger(debug.messenger, None);\n            }\n\n            self.surface_loader.destroy_surface(self.surface, None);\n            self.device.destroy_device(None);\n            self.instance.destroy_instance(None);\n        }\n    }\n}\n\nfn contains(list: &[CString], str: &'static CStr) -> bool {\n    list.iter().any(|v| v.as_c_str() == str)\n}\n\nfn init_tracy_context(\n    device: &ash::Device,\n    pdevice: &VkDeviceInfo,\n    present_queue: &VkQueue,\n    client: tracy_client::Client,\n) -> anyhow::Result<tracy_client::GpuContext> {\n    // Query the timestamp once to calibrate the clocks.\n    let cb = create_command_buffer(device, present_queue.command_pool)?;\n\n    unsafe {\n        device.reset_command_buffer(cb, vk::CommandBufferResetFlags::empty())?;\n\n        let query_pool = create_timestamp_query_pool(device, 1)?;\n        let fence = create_fence(device, false)?;\n\n        // Begin the command buffer.\n        device.begin_command_buffer(\n            cb,\n            &vk::CommandBufferBeginInfo::default()\n                .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT),\n        )?;\n\n        // Write a timestamp.\n        query_pool.cmd_reset(device, cb);\n        device.cmd_write_timestamp(\n            cb,\n            vk::PipelineStageFlags::BOTTOM_OF_PIPE,\n            query_pool.pool,\n            0,\n        );\n\n        // Submit.\n        device.end_command_buffer(cb)?;\n\n        let cbs = [cb];\n        device.queue_submit(\n            present_queue.queue,\n            &[vk::SubmitInfo::default().command_buffers(&cbs)],\n            fence,\n        )?;\n\n        // Wait for the fence, fetch the timestamp.\n        device.wait_for_fences(&[fence], true, u64::MAX)?;\n        let ts = query_pool.fetch_results(device)?[0];\n\n        let context = client.new_gpu_context(\n            Some(\"present queue\"),\n            tracy_client::GpuContextType::Vulkan,\n            ts as i64,\n            pdevice.limits.timestamp_period,\n        )?;\n\n        // Cleanup.\n        device.free_command_buffers(present_queue.command_pool, &[cb]);\n        device.destroy_fence(fence, None);\n        device.destroy_query_pool(query_pool.pool, None);\n\n        Ok(context)\n    }\n}\n\npub fn select_memory_type(\n    props: &vk::PhysicalDeviceMemoryProperties,\n    flags: vk::MemoryPropertyFlags,\n    req: Option<vk::MemoryRequirements>,\n) -> Option<u32> {\n    for i in 0..props.memory_type_count {\n        if let Some(req) = req {\n            if req.memory_type_bits & (1 << i) == 0 {\n                continue;\n            }\n        }\n\n        if flags.is_empty()\n            || props.memory_types[i as usize]\n                .property_flags\n                .contains(flags)\n        {\n            return Some(i);\n        }\n    }\n\n    None\n}\n\nfn get_queue_with_command_pool(device: &ash::Device, idx: u32) -> Result<VkQueue, vk::Result> {\n    let queue = unsafe { device.get_device_queue(idx, 0) };\n\n    let command_pool = unsafe {\n        let create_info = vk::CommandPoolCreateInfo::default()\n            .queue_family_index(idx)\n            .flags(vk::CommandPoolCreateFlags::RESET_COMMAND_BUFFER);\n\n        device.create_command_pool(&create_info, None)?\n    };\n\n    Ok(VkQueue {\n        queue,\n        command_pool,\n    })\n}\n\npub fn create_command_buffer(\n    device: &ash::Device,\n    pool: vk::CommandPool,\n) -> anyhow::Result<vk::CommandBuffer> {\n    let create_info = vk::CommandBufferAllocateInfo::default()\n        .level(vk::CommandBufferLevel::PRIMARY)\n        .command_pool(pool)\n        .command_buffer_count(1);\n\n    let cb = unsafe {\n        device\n            .allocate_command_buffers(&create_info)\n            .context(\"failed to allocate render command buffer\")?\n            .pop()\n            .unwrap()\n    };\n\n    Ok(cb)\n}\n\npub struct VkImage {\n    pub image: vk::Image,\n    pub memory: vk::DeviceMemory,\n    pub format: vk::Format,\n    pub width: u32,\n    pub height: u32,\n    vk: Arc<VkContext>,\n}\n\nimpl VkImage {\n    pub fn new(\n        vk: Arc<VkContext>,\n        format: vk::Format,\n        width: u32,\n        height: u32,\n        usage: vk::ImageUsageFlags,\n        sharing_mode: vk::SharingMode,\n        flags: vk::ImageCreateFlags,\n    ) -> anyhow::Result<Self> {\n        let image = {\n            let create_info = vk::ImageCreateInfo::default()\n                .image_type(vk::ImageType::TYPE_2D)\n                .format(format)\n                .extent(vk::Extent3D {\n                    width,\n                    height,\n                    depth: 1,\n                })\n                .mip_levels(1)\n                .array_layers(1)\n                .samples(vk::SampleCountFlags::TYPE_1)\n                .tiling(vk::ImageTiling::OPTIMAL)\n                .usage(usage)\n                .sharing_mode(sharing_mode)\n                .initial_layout(vk::ImageLayout::UNDEFINED)\n                .flags(flags);\n\n            unsafe {\n                vk.device\n                    .create_image(&create_info, None)\n                    .context(\"VkCreateImage\")?\n            }\n        };\n\n        let memory =\n            unsafe { bind_memory_for_image(&vk.device, &vk.device_info.memory_props, image)? };\n\n        Ok(Self {\n            image,\n            memory,\n            format,\n            width,\n            height,\n            vk,\n        })\n    }\n\n    pub fn wrap(\n        vk: Arc<VkContext>,\n        image: vk::Image,\n        memory: vk::DeviceMemory,\n        format: vk::Format,\n        width: u32,\n        height: u32,\n    ) -> Self {\n        Self {\n            image,\n            memory,\n            format,\n            width,\n            height,\n            vk,\n        }\n    }\n\n    pub fn extent(&self) -> vk::Extent2D {\n        vk::Extent2D {\n            width: self.width,\n            height: self.height,\n        }\n    }\n\n    pub fn rect(&self) -> vk::Rect2D {\n        vk::Rect2D {\n            offset: vk::Offset2D { x: 0, y: 0 },\n            extent: self.extent(),\n        }\n    }\n}\n\nimpl Drop for VkImage {\n    fn drop(&mut self) {\n        unsafe {\n            self.vk.device.destroy_image(self.image, None);\n            self.vk.device.free_memory(self.memory, None);\n        }\n    }\n}\n\npub unsafe fn bind_memory_for_image(\n    device: &ash::Device,\n    props: &vk::PhysicalDeviceMemoryProperties,\n    image: vk::Image,\n) -> anyhow::Result<vk::DeviceMemory> {\n    let image_memory_req = unsafe { device.get_image_memory_requirements(image) };\n\n    let mem_type_index = select_memory_type(\n        props,\n        vk::MemoryPropertyFlags::DEVICE_LOCAL,\n        Some(image_memory_req),\n    );\n\n    if mem_type_index.is_none() {\n        bail!(\n            \"no appropriate memory type found for reqs: {:?}\",\n            image_memory_req\n        );\n    }\n\n    let memory = {\n        let image_allocate_info = vk::MemoryAllocateInfo::default()\n            .allocation_size(image_memory_req.size)\n            .memory_type_index(mem_type_index.unwrap());\n\n        unsafe {\n            device\n                .allocate_memory(&image_allocate_info, None)\n                .context(\"VkAllocateMemory\")?\n        }\n    };\n\n    unsafe {\n        device\n            .bind_image_memory(image, memory, 0)\n            .context(\"VkBindImageMemory\")?;\n    }\n\n    Ok(memory)\n}\n\npub unsafe fn create_image_view(\n    device: &ash::Device,\n    image: vk::Image,\n    format: vk::Format,\n    sampler_conversion: Option<vk::SamplerYcbcrConversion>,\n) -> anyhow::Result<vk::ImageView> {\n    let mut create_info = vk::ImageViewCreateInfo::default()\n        .image(image)\n        .view_type(vk::ImageViewType::TYPE_2D)\n        .format(format)\n        .components(vk::ComponentMapping {\n            r: vk::ComponentSwizzle::IDENTITY,\n            g: vk::ComponentSwizzle::IDENTITY,\n            b: vk::ComponentSwizzle::IDENTITY,\n            a: vk::ComponentSwizzle::IDENTITY,\n        })\n        .subresource_range(vk::ImageSubresourceRange {\n            aspect_mask: vk::ImageAspectFlags::COLOR,\n            base_mip_level: 0,\n            level_count: vk::REMAINING_MIP_LEVELS,\n            base_array_layer: 0,\n            layer_count: vk::REMAINING_ARRAY_LAYERS,\n        });\n\n    let mut sampler_conversion_info;\n    if let Some(sampler_conversion) = sampler_conversion {\n        sampler_conversion_info =\n            vk::SamplerYcbcrConversionInfo::default().conversion(sampler_conversion);\n        create_info = create_info.push_next(&mut sampler_conversion_info);\n    }\n\n    device\n        .create_image_view(&create_info, None)\n        .context(\"VkCreateImageView\")\n}\n\n#[derive(Copy, Clone)]\npub struct VkHostBuffer {\n    pub buffer: vk::Buffer,\n    pub memory: vk::DeviceMemory,\n    pub access: *mut c_void,\n}\n\npub fn create_host_buffer(\n    device: &ash::Device,\n    mem_type: u32,\n    usage: vk::BufferUsageFlags,\n    size: usize,\n) -> Result<VkHostBuffer, vk::Result> {\n    let buffer = {\n        let create_info = vk::BufferCreateInfo::default()\n            .size(size as u64)\n            .usage(usage)\n            .sharing_mode(vk::SharingMode::EXCLUSIVE);\n\n        unsafe { device.create_buffer(&create_info, None)? }\n    };\n\n    let memory = {\n        let requirements = unsafe { device.get_buffer_memory_requirements(buffer) };\n\n        let alloc_info = vk::MemoryAllocateInfo::default()\n            .allocation_size(requirements.size)\n            .memory_type_index(mem_type);\n\n        unsafe { device.allocate_memory(&alloc_info, None)? }\n    };\n\n    unsafe { device.bind_buffer_memory(buffer, memory, 0)? };\n\n    let access =\n        { unsafe { device.map_memory(memory, 0, vk::WHOLE_SIZE, vk::MemoryMapFlags::empty())? } };\n\n    Ok(VkHostBuffer {\n        buffer,\n        memory,\n        access,\n    })\n}\n\npub unsafe fn destroy_host_buffer(device: &ash::Device, buffer: &VkHostBuffer) {\n    device.unmap_memory(buffer.memory);\n    device.destroy_buffer(buffer.buffer, None);\n    device.free_memory(buffer.memory, None);\n}\n\npub struct VkTimestampQueryPool {\n    pub pool: vk::QueryPool,\n    num_timestamps: u32,\n}\n\nimpl VkTimestampQueryPool {\n    pub unsafe fn cmd_reset(&self, device: &ash::Device, command_buffer: vk::CommandBuffer) {\n        device.cmd_reset_query_pool(command_buffer, self.pool, 0, self.num_timestamps);\n    }\n\n    pub fn fetch_results(&self, device: &ash::Device) -> anyhow::Result<Vec<i64>> {\n        let mut results = vec![0_i64; self.num_timestamps as usize];\n        unsafe {\n            device\n                .get_query_pool_results(self.pool, 0, &mut results, vk::QueryResultFlags::empty())\n                .context(\"vkGetQueryPoolResults\")?;\n        }\n\n        for v in &results {\n            assert!(v > &0_i64, \"invalid query pool results\")\n        }\n\n        Ok(results)\n    }\n}\n\npub fn create_timestamp_query_pool(\n    device: &ash::Device,\n    num_timestamps: u32,\n) -> anyhow::Result<VkTimestampQueryPool> {\n    let create_info = vk::QueryPoolCreateInfo::default()\n        .query_type(vk::QueryType::TIMESTAMP)\n        .query_count(num_timestamps);\n\n    let pool = unsafe {\n        device\n            .create_query_pool(&create_info, None)\n            .context(\"vkCreateQueryPool\")?\n    };\n\n    Ok(VkTimestampQueryPool {\n        pool,\n        num_timestamps,\n    })\n}\n\npub fn create_fence(device: &ash::Device, signalled: bool) -> Result<vk::Fence, vk::Result> {\n    let mut create_info = vk::FenceCreateInfo::default();\n    if signalled {\n        create_info = create_info.flags(vk::FenceCreateFlags::SIGNALED);\n    }\n\n    let fence = unsafe { device.create_fence(&create_info, None)? };\n\n    Ok(fence)\n}\n\npub fn create_semaphore(device: &ash::Device) -> Result<vk::Semaphore, vk::Result> {\n    let semaphore = unsafe { device.create_semaphore(&vk::SemaphoreCreateInfo::default(), None)? };\n    Ok(semaphore)\n}\n\npub fn load_shader(device: &ash::Device, bytes: &[u8]) -> anyhow::Result<vk::ShaderModule> {\n    let code = ash::util::read_spv(&mut std::io::Cursor::new(bytes))?;\n    let create_info = vk::ShaderModuleCreateInfo::default().code(&code);\n\n    let shader = unsafe { device.create_shader_module(&create_info, None)? };\n\n    Ok(shader)\n}\n\npub fn create_ycbcr_sampler_conversion(\n    device: &ash::Device,\n    format: vk::Format,\n    params: &crate::video::VideoStreamParams,\n) -> anyhow::Result<vk::SamplerYcbcrConversion> {\n    let ycbcr_model = match params.color_space {\n        ColorSpace::Bt709 => vk::SamplerYcbcrModelConversion::YCBCR_709,\n        ColorSpace::Bt2020Pq => vk::SamplerYcbcrModelConversion::YCBCR_2020,\n    };\n\n    let ycbcr_range = if params.color_full_range {\n        vk::SamplerYcbcrRange::ITU_FULL\n    } else {\n        vk::SamplerYcbcrRange::ITU_NARROW\n    };\n\n    let create_info = vk::SamplerYcbcrConversionCreateInfo::default()\n        .format(format)\n        .ycbcr_model(ycbcr_model)\n        .ycbcr_range(ycbcr_range)\n        .chroma_filter(vk::Filter::LINEAR)\n        .x_chroma_offset(vk::ChromaLocation::MIDPOINT)\n        .y_chroma_offset(vk::ChromaLocation::MIDPOINT);\n\n    let conversion = unsafe { device.create_sampler_ycbcr_conversion(&create_info, None)? };\n    Ok(conversion)\n}\n\npub fn get_ycbcr_conversion_properties(\n    device: vk::PhysicalDevice,\n    instance: &ash::Instance,\n    format: vk::Format,\n) -> anyhow::Result<vk::SamplerYcbcrConversionImageFormatProperties> {\n    let mut ycbcr_props = vk::SamplerYcbcrConversionImageFormatProperties::default();\n    let mut image_format_props2 = vk::ImageFormatProperties2::default().push_next(&mut ycbcr_props);\n\n    let image_format_info = vk::PhysicalDeviceImageFormatInfo2::default()\n        .format(format)\n        .ty(vk::ImageType::TYPE_2D)\n        .tiling(vk::ImageTiling::OPTIMAL)\n        .usage(vk::ImageUsageFlags::SAMPLED);\n\n    unsafe {\n        instance.get_physical_device_image_format_properties2(\n            device,\n            &image_format_info,\n            &mut image_format_props2,\n        )?;\n    }\n\n    Ok(ycbcr_props)\n}\n\nunsafe extern \"system\" fn vulkan_debug_utils_callback(\n    message_severity: vk::DebugUtilsMessageSeverityFlagsEXT,\n    message_type: vk::DebugUtilsMessageTypeFlagsEXT,\n    p_callback_data: *const vk::DebugUtilsMessengerCallbackDataEXT,\n    _userdata: *mut c_void,\n) -> vk::Bool32 {\n    let _ = std::panic::catch_unwind(|| {\n        let message = unsafe { CStr::from_ptr((*p_callback_data).p_message) }.to_string_lossy();\n        let ty = format!(\"{:?}\", message_type).to_lowercase();\n\n        // TODO: these should all be debug.\n        match message_severity {\n            vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE => {\n                tracing::trace!(ty, \"{}\", message)\n            }\n            vk::DebugUtilsMessageSeverityFlagsEXT::INFO => info!(ty, \"{}\", message),\n            vk::DebugUtilsMessageSeverityFlagsEXT::WARNING => warn!(ty, \"{}\", message),\n            vk::DebugUtilsMessageSeverityFlagsEXT::ERROR => error!(ty, \"{}\", message),\n            _ => (),\n        }\n    });\n\n    // Must always return false.\n    vk::FALSE\n}\n\n#[allow(clippy::too_many_arguments)]\npub fn cmd_image_barrier(\n    device: &ash::Device,\n    command_buffer: vk::CommandBuffer,\n    image: vk::Image,\n    src_stage_mask: vk::PipelineStageFlags,\n    src_access_mask: vk::AccessFlags,\n    dst_stage_mask: vk::PipelineStageFlags,\n    dst_access_mask: vk::AccessFlags,\n    old_layout: vk::ImageLayout,\n    new_layout: vk::ImageLayout,\n) {\n    let barrier = vk::ImageMemoryBarrier::default()\n        .src_access_mask(src_access_mask)\n        .dst_access_mask(dst_access_mask)\n        .old_layout(old_layout)\n        .new_layout(new_layout)\n        .image(image)\n        .subresource_range(vk::ImageSubresourceRange {\n            aspect_mask: vk::ImageAspectFlags::COLOR,\n            base_mip_level: 0,\n            level_count: 1,\n            base_array_layer: 0,\n            layer_count: 1,\n        });\n\n    unsafe {\n        device.cmd_pipeline_barrier(\n            command_buffer,\n            src_stage_mask,\n            dst_stage_mask,\n            vk::DependencyFlags::empty(),\n            &[],\n            &[],\n            &[barrier],\n        )\n    };\n}\n"
  },
  {
    "path": "mm-client-common/Cargo.toml",
    "content": "[package]\nname = \"mm-client-common\"\nversion = \"0.1.0\"\nedition = \"2021\"\nlicense = \"MIT\"\n\n[lib]\ncrate-type = [\"lib\", \"staticlib\"]\nname = \"mm_client_common\"\n\n[[bin]]\nname = \"uniffi-bindgen\"\npath = \"bin/uniffi-bindgen.rs\"\n\n[dependencies]\nmm-protocol = { path = \"../mm-protocol\", features = [\"uniffi\"] }\nasync-mutex = \"1\"\nbytes = \"1\"\nip_rfc = \"0.1\"\nflume = \"0.11\"\nfutures = { version = \"0.3\", features = [\"executor\"] }\nlog = \"0.4\"\nmio = { version = \"1\", features = [\"net\", \"os-ext\", \"os-poll\"] }\nprost-types = \"0.13\"\nquiche = { version = \"0.23\", features = [\"qlog\"] }\nraptorq = \"2.0\"\nring = \"0.17\"\nthiserror = \"1\"\ntracing = { version = \"0.1\", features = [\"log\"] }\nuniffi = { version = \"0.28\", features = [\"cli\"] }\n\n[build-dependencies]\nuniffi = { version = \"0.27\", features = [\"build\"] }\n"
  },
  {
    "path": "mm-client-common/bin/uniffi-bindgen.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nfn main() {\n    uniffi::uniffi_bindgen_main()\n}\n"
  },
  {
    "path": "mm-client-common/src/attachment.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::sync::Arc;\n\nuse async_mutex::Mutex as AsyncMutex;\nuse futures::{channel::oneshot, future, FutureExt as _};\nuse mm_protocol as protocol;\npub use protocol::audio_channels::Channel as AudioChannel;\nuse tracing::error;\n\nuse crate::{\n    codec, conn, display_params, input,\n    packet::{self, PacketRing},\n    ClientError, ClientState,\n};\n\n#[derive(Debug, Clone, uniffi::Record)]\npub struct AttachmentConfig {\n    /// The width of the video stream.\n    pub width: u32,\n    /// The height of the video stream.\n    pub height: u32,\n\n    /// The codec to use for the video stream. Leaving it empty allows the\n    /// server to decide.\n    pub video_codec: Option<codec::VideoCodec>,\n\n    /// The profile (bit depth and colorspace) to use for the video stream.\n    /// Leaving it empty allows the server to decide.\n    pub video_profile: Option<codec::VideoProfile>,\n\n    /// The quality preset, from 1-10. A None or 0 indicates the server should\n    /// decide.\n    pub quality_preset: Option<u32>,\n\n    /// The codec to use for the audio stream. Leaving it empty allows the\n    /// server to decide.\n    pub audio_codec: Option<codec::AudioCodec>,\n\n    /// The sample rate to use for the audio stream. Leaving it empty allows the\n    /// server to decide.\n    pub sample_rate: Option<u32>,\n\n    /// The channel layout to use for the audio stream. An empty vec indicates\n    /// the server should decide.\n    pub channels: Vec<AudioChannel>,\n\n    /// An offset to apply to the stream_seq of incoming video packets. The\n    /// offset is applied on the client side, and exists as a convenient way to\n    /// way to ensure sequence numbers stay monotonic, even across individual\n    /// attachment streams.\n    pub video_stream_seq_offset: u64,\n\n    /// An offset to apply to the stream_seq of incoming audio packets. The\n    /// offset is applied on the client side, and exists as a convenient way to\n    /// way to ensure sequence numbers stay monotonic, even across individual\n    /// attachment streams.\n    pub audio_stream_seq_offset: u64,\n}\n\n/// The settled video stream params, after the server has applied its defaults.\n#[derive(Debug, Clone, uniffi::Record)]\npub struct VideoStreamParams {\n    pub width: u32,\n    pub height: u32,\n\n    pub codec: codec::VideoCodec,\n    pub profile: codec::VideoProfile,\n}\n\n/// The settled audio stream params, after the server has applied its defaults.\n#[derive(Debug, Clone, uniffi::Record)]\npub struct AudioStreamParams {\n    pub codec: codec::AudioCodec,\n    pub sample_rate: u32,\n    pub channels: Vec<AudioChannel>,\n}\n\n/// A handle for sending messages to the server over an attachment stream.\n///\n/// An attachment is ended once the corresponding AttachmentDelegate receives\n/// the attachment_ended or parameters_changed (with reattach_required = true)\n/// callbacks. Using it past that point will silently drop events.\n#[derive(uniffi::Object)]\npub struct Attachment {\n    sid: u64,\n\n    /// Used to un-munge the stream_seq for [Attachment::request_video_refresh].\n    video_stream_seq_offset: u64,\n\n    // We store a copy of these so that we can send messages on the attachment\n    // stream without locking the client mutex.\n    outgoing: flume::Sender<conn::OutgoingMessage>,\n    conn_waker: Arc<mio::Waker>,\n\n    detached: future::Shared<oneshot::Receiver<()>>,\n}\n\nimpl Attachment {\n    pub(crate) async fn new(\n        sid: u64,\n        client: Arc<AsyncMutex<super::InnerClient>>,\n        attached: protocol::Attached,\n        delegate: Arc<dyn AttachmentDelegate>,\n        video_stream_seq_offset: u64,\n    ) -> Result<Self, ClientError> {\n        let session_id = attached.session_id;\n        let attachment_id = attached.attachment_id;\n        let (detached_tx, detached_rx) = oneshot::channel();\n\n        let state = AttachmentState {\n            session_id,\n            attachment_id,\n\n            delegate,\n            attached_msg: attached,\n            server_error: None,\n\n            video_packet_ring: PacketRing::new(),\n            video_stream_seq: None,\n            prev_video_stream_seq: None,\n            video_stream_seq_offset,\n\n            audio_packet_ring: PacketRing::new(),\n            audio_stream_seq: None,\n            prev_audio_stream_seq: None,\n            audio_stream_seq_offset: 0,\n\n            notify_detached: Some(detached_tx),\n            reattach_required: false,\n        };\n\n        let mut guard = client.lock().await;\n\n        let super::ConnHandle {\n            outgoing,\n            waker,\n            attachments,\n            ..\n        } = match &guard.state {\n            ClientState::Connected(conn) => conn,\n            ClientState::Defunct(e) => return Err(e.clone()),\n        };\n\n        let outgoing = outgoing.clone();\n        let conn_waker = waker.clone();\n\n        // Track the attachment in the client, so that the reactor thread will\n        // send us messages.\n        if attachments.send_async((sid, state)).await.is_err() {\n            match guard.close() {\n                Ok(_) => return Err(ClientError::Defunct),\n                Err(e) => return Err(e),\n            }\n        }\n\n        Ok(Self {\n            sid,\n            video_stream_seq_offset,\n            outgoing,\n            conn_waker,\n            detached: detached_rx.shared(),\n        })\n    }\n}\n\n/// Used by client implementations to handle attachment events.\n#[uniffi::export(with_foreign)]\npub trait AttachmentDelegate: Send + Sync + std::fmt::Debug {\n    /// The video stream is starting or restarting.\n    fn video_stream_start(&self, stream_seq: u64, params: VideoStreamParams);\n\n    /// A video packet is available.\n    fn video_packet(&self, packet: Arc<packet::Packet>);\n\n    /// A video packet was lost.\n    fn dropped_video_packet(&self, dropped: packet::DroppedPacket);\n\n    /// The audio stream is starting or restarting.\n    fn audio_stream_start(&self, stream_seq: u64, params: AudioStreamParams);\n\n    /// An audio packet is available.\n    fn audio_packet(&self, packet: Arc<packet::Packet>);\n\n    // The cursor was updated.\n    fn update_cursor(\n        &self,\n        icon: input::CursorIcon,\n        image: Option<Vec<u8>>,\n        hotspot_x: u32,\n        hotspot_y: u32,\n    );\n\n    /// The pointer should be locked to the given location.\n    fn lock_pointer(&self, x: f64, y: f64);\n\n    /// The pointer should be released.\n    fn release_pointer(&self);\n\n    /// The remote session display params were changed. This usually requires\n    /// the client to reattach. If reattach_required is true, the attachment\n    /// should be considered ended. [attachment_ended] will not be called.\n    fn display_params_changed(\n        &self,\n        params: display_params::DisplayParams,\n        reattach_required: bool,\n    );\n\n    /// The client encountered an error. The attachment should be considered\n    /// ended. [attachment_ended] will not be called.\n    fn error(&self, err: ClientError);\n\n    /// The attachment was ended by the server.\n    fn attachment_ended(&self);\n}\n\nimpl Attachment {\n    fn send(&self, msg: impl Into<protocol::MessageType>, fin: bool) {\n        let _ = self.outgoing.send(conn::OutgoingMessage {\n            sid: self.sid,\n            msg: msg.into(),\n            fin,\n        });\n\n        let _ = self.conn_waker.wake();\n    }\n}\n\n#[uniffi::export]\nimpl Attachment {\n    /// Requests that the server generate a packet with headers and a keyframe.\n    pub fn request_video_refresh(&self, stream_seq: u64) {\n        self.send(\n            protocol::RequestVideoRefresh {\n                stream_seq: stream_seq - self.video_stream_seq_offset,\n            },\n            false,\n        )\n    }\n\n    /// Sends keyboard input to the server.\n    pub fn keyboard_input(&self, key: input::Key, state: input::KeyState, character: u32) {\n        self.send(\n            protocol::KeyboardInput {\n                key: key.into(),\n                state: state.into(),\n                char: character,\n            },\n            false,\n        )\n    }\n\n    /// Notifies the server that the pointer has entered the video area,\n    /// including if it enters a letterbox around the video.\n    pub fn pointer_entered(&self) {\n        self.send(protocol::PointerEntered {}, false)\n    }\n\n    /// Notifies the server that the pointer has left the video area. This\n    /// should consider any letterboxing part of the video area.\n    pub fn pointer_left(&self) {\n        self.send(protocol::PointerLeft {}, false)\n    }\n\n    /// Sends pointer motion to the server.\n    pub fn pointer_motion(&self, x: f64, y: f64) {\n        self.send(protocol::PointerMotion { x, y }, false)\n    }\n\n    /// Sends relative pointer motion to the server.\n    pub fn relative_pointer_motion(&self, x: f64, y: f64) {\n        self.send(protocol::RelativePointerMotion { x, y }, false)\n    }\n\n    /// Sends pointer input to the server.\n    pub fn pointer_input(&self, button: input::Button, state: input::ButtonState, x: f64, y: f64) {\n        self.send(\n            protocol::PointerInput {\n                button: button.into(),\n                state: state.into(),\n                x,\n                y,\n            },\n            false,\n        )\n    }\n\n    /// Sends pointer scroll events to the server.\n    pub fn pointer_scroll(&self, scroll_type: input::ScrollType, x: f64, y: f64) {\n        self.send(\n            protocol::PointerScroll {\n                scroll_type: scroll_type.into(),\n                x,\n                y,\n            },\n            false,\n        )\n    }\n\n    /// Sends a 'Gamepad Available' event to the server.\n    pub fn gamepad_available(&self, pad: input::Gamepad) {\n        self.send(\n            protocol::GamepadAvailable {\n                gamepad: Some(pad.into()),\n            },\n            false,\n        )\n    }\n\n    /// Sends a 'Gamepad Unavailable' event to the server.\n    pub fn gamepad_unavailable(&self, id: u64) {\n        self.send(protocol::GamepadUnavailable { id }, false)\n    }\n\n    /// Sends gamepad joystick motion to the server.\n    pub fn gamepad_motion(&self, id: u64, axis: input::GamepadAxis, value: f64) {\n        self.send(\n            protocol::GamepadMotion {\n                gamepad_id: id,\n                axis: axis.into(),\n                value,\n            },\n            false,\n        )\n    }\n\n    /// Sends gamepad button input to the server.\n    pub fn gamepad_input(\n        &self,\n        id: u64,\n        button: input::GamepadButton,\n        state: input::GamepadButtonState,\n    ) {\n        self.send(\n            protocol::GamepadInput {\n                gamepad_id: id,\n                button: button.into(),\n                state: state.into(),\n            },\n            false,\n        )\n    }\n\n    /// Ends the attachment.\n    pub async fn detach(&self) -> Result<(), ClientError> {\n        self.send(protocol::Detach {}, true);\n        Ok(self.detached.clone().await?)\n    }\n}\n\n/// Internal state for an attachment.\npub(crate) struct AttachmentState {\n    pub(crate) session_id: u64,\n    pub(crate) attachment_id: u64,\n\n    delegate: Arc<dyn AttachmentDelegate>,\n    attached_msg: protocol::Attached,\n    reattach_required: bool,\n    server_error: Option<protocol::Error>,\n\n    video_packet_ring: PacketRing,\n    video_stream_seq: Option<u64>,\n    prev_video_stream_seq: Option<u64>,\n    video_stream_seq_offset: u64,\n\n    audio_packet_ring: PacketRing,\n    audio_stream_seq: Option<u64>,\n    prev_audio_stream_seq: Option<u64>,\n    audio_stream_seq_offset: u64,\n\n    // A future representing the end of the attachment.\n    notify_detached: Option<oneshot::Sender<()>>,\n}\n\nimpl AttachmentState {\n    pub(crate) fn handle_message(&mut self, msg: protocol::MessageType) {\n        match msg {\n            protocol::MessageType::Attached(attached) => {\n                error!(\n                    \"unexpected {} on already-attached stream\",\n                    protocol::MessageType::Attached(attached)\n                );\n            }\n            protocol::MessageType::VideoChunk(chunk) => {\n                // We always send packets for two streams - the current one and\n                // (if there is one) the previous one.\n                if self.video_stream_seq.is_none_or(|s| s < chunk.stream_seq) {\n                    // A new stream started.\n                    self.prev_video_stream_seq = self.video_stream_seq;\n                    self.video_stream_seq = Some(chunk.stream_seq);\n\n                    let res = self.attached_msg.streaming_resolution.unwrap_or_default();\n\n                    self.delegate.video_stream_start(\n                        chunk.stream_seq + self.video_stream_seq_offset,\n                        VideoStreamParams {\n                            width: res.width,\n                            height: res.height,\n                            codec: self.attached_msg.video_codec(),\n                            profile: self.attached_msg.video_profile(),\n                        },\n                    );\n\n                    // Discard any older packets.\n                    if let Some(prev) = self.prev_video_stream_seq {\n                        self.video_packet_ring.discard(prev.saturating_sub(1));\n                    }\n                }\n\n                if let Err(err) = self.video_packet_ring.recv_chunk(chunk) {\n                    error!(\"error in packet ring: {:#}\", err);\n                }\n\n                if let Some(prev) = self.prev_video_stream_seq {\n                    // Ignore dropped packets on the previous stream.\n                    for mut packet in self\n                        .video_packet_ring\n                        .drain_completed(prev)\n                        .flat_map(Result::ok)\n                    {\n                        packet.stream_seq += self.video_stream_seq_offset;\n                        self.delegate.video_packet(Arc::new(packet));\n                    }\n                }\n\n                if self.video_stream_seq != self.prev_video_stream_seq {\n                    for res in self\n                        .video_packet_ring\n                        .drain_completed(self.video_stream_seq.unwrap())\n                    {\n                        match res {\n                            Ok(mut packet) => {\n                                packet.stream_seq += self.video_stream_seq_offset;\n                                self.delegate.video_packet(Arc::new(packet));\n                            }\n                            Err(mut dropped) => {\n                                dropped.stream_seq += self.video_stream_seq_offset;\n                                self.delegate.dropped_video_packet(dropped);\n                            }\n                        }\n                    }\n                }\n            }\n            protocol::MessageType::AudioChunk(chunk) => {\n                // We always send packets for two streams - the current one and\n                // (if there is one) the previous one.\n                if self.audio_stream_seq.is_none_or(|s| s < chunk.stream_seq) {\n                    // A new stream started.\n                    self.prev_audio_stream_seq = self.audio_stream_seq;\n                    self.audio_stream_seq = Some(chunk.stream_seq);\n\n                    let channels = self\n                        .attached_msg\n                        .channels\n                        .as_ref()\n                        .map(|c| c.channels().collect())\n                        .unwrap_or_default();\n\n                    self.delegate.audio_stream_start(\n                        chunk.stream_seq + self.audio_stream_seq_offset,\n                        AudioStreamParams {\n                            codec: self.attached_msg.audio_codec(),\n                            sample_rate: self.attached_msg.sample_rate_hz,\n                            channels,\n                        },\n                    );\n\n                    // Discard any older packets.\n                    if let Some(prev) = self.prev_audio_stream_seq {\n                        self.audio_packet_ring.discard(prev.saturating_sub(1));\n                    }\n                }\n\n                if let Err(err) = self.audio_packet_ring.recv_chunk(chunk) {\n                    error!(\"error in packet ring: {:#}\", err);\n                }\n\n                if let Some(prev) = self.prev_audio_stream_seq {\n                    for mut packet in self\n                        .audio_packet_ring\n                        .drain_completed(prev)\n                        .flat_map(Result::ok)\n                    {\n                        packet.stream_seq += self.audio_stream_seq_offset;\n                        self.delegate.audio_packet(Arc::new(packet));\n                    }\n                }\n\n                if self.audio_stream_seq != self.prev_audio_stream_seq {\n                    for mut packet in self\n                        .audio_packet_ring\n                        .drain_completed(self.audio_stream_seq.unwrap())\n                        .flat_map(Result::ok)\n                    {\n                        packet.stream_seq += self.audio_stream_seq_offset;\n                        self.delegate.audio_packet(Arc::new(packet));\n                    }\n                }\n            }\n            protocol::MessageType::UpdateCursor(msg) => {\n                let image = match &msg.image {\n                    v if v.is_empty() => None,\n                    v => Some(v.to_vec()),\n                };\n\n                self.delegate\n                    .update_cursor(msg.icon(), image, msg.hotspot_x, msg.hotspot_y);\n            }\n            protocol::MessageType::LockPointer(msg) => {\n                self.delegate.lock_pointer(msg.x, msg.y);\n            }\n            protocol::MessageType::ReleasePointer(_) => self.delegate.release_pointer(),\n            protocol::MessageType::SessionParametersChanged(msg) => {\n                let Some(params) = msg.display_params.and_then(|p| p.try_into().ok()) else {\n                    error!(?msg, \"invalid display params from server\");\n                    return;\n                };\n\n                self.delegate\n                    .display_params_changed(params, msg.reattach_required);\n\n                // Mute the attachment_ended callback once.\n                self.reattach_required = msg.reattach_required;\n            }\n            protocol::MessageType::SessionEnded(_) => {\n                // We just check for the fin on the attachment stream.\n            }\n            protocol::MessageType::Error(error) => {\n                self.server_error = Some(error.clone());\n                self.delegate.error(ClientError::ServerError(error));\n            }\n            v => error!(\"unexpected message on attachment stream: {}\", v),\n        }\n    }\n\n    pub(crate) fn handle_close(mut self, err: Option<ClientError>) {\n        if let Some(tx) = self.notify_detached.take() {\n            let _ = tx.send(());\n        }\n\n        if self.reattach_required {\n            self.reattach_required = false;\n        } else if let Some(err) = err {\n            self.delegate.error(err);\n        } else if self.server_error.is_some() {\n            // We don't call attachment_ended because we already called error.\n        } else {\n            self.delegate.attachment_ended();\n        }\n    }\n}\n"
  },
  {
    "path": "mm-client-common/src/codec.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse mm_protocol as protocol;\n\npub use protocol::{AudioCodec, VideoCodec, VideoProfile};\n"
  },
  {
    "path": "mm-client-common/src/conn/hostport.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\n#[derive(Debug, Eq, PartialEq)]\npub(crate) struct MalformedHostPort;\n\nimpl std::fmt::Display for MalformedHostPort {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        write!(f, \"invalid host:port string\")\n    }\n}\n\nimpl std::error::Error for MalformedHostPort {}\n\n/// Splits a network address into the host and port components. Accepts\n/// addresses of the following form:\n///  - \"host\"\n///  - \"[host]\"\n///  - \"host:port\"\n///  - \"[host]:port\"\n///\n///  # References\n///\n///  https://cs.opensource.google/go/go/+/refs/tags/go1.23.3:src/net/ipsock.go;l=165\npub(crate) fn split_host_port(\n    hostport: impl AsRef<[u8]>,\n) -> Result<(String, Option<u16>), MalformedHostPort> {\n    let input = hostport.as_ref();\n    let mut split = rfind(input, b':');\n\n    let host;\n    if input[0] == b'[' {\n        let Some(end) = find(input, b']') else {\n            return Err(MalformedHostPort);\n        };\n\n        match end + 1 {\n            v if v == input.len() => {\n                host = &input[1..end];\n                split = None;\n            }\n            v if split.is_some_and(|i| v == i) => {\n                host = &input[1..end];\n            }\n            _ => return Err(MalformedHostPort),\n        }\n\n        if find(&input[1..], b'[').is_some() || find(&input[end + 1..], b']').is_some() {\n            return Err(MalformedHostPort);\n        }\n    } else {\n        host = &input[..split.unwrap_or(input.len())];\n        if find(input, b'[').is_some() || find(input, b']').is_some() {\n            return Err(MalformedHostPort);\n        }\n    }\n\n    let Ok(host) = std::str::from_utf8(host) else {\n        return Err(MalformedHostPort);\n    };\n\n    let port = if let Some(i) = split {\n        Some(\n            std::str::from_utf8(&input[i + 1..])\n                .ok()\n                .and_then(|s| s.parse().ok())\n                .ok_or(MalformedHostPort)?,\n        )\n    } else {\n        None\n    };\n\n    Ok((host.to_owned(), port))\n}\n\nfn find(buf: &[u8], c: u8) -> Option<usize> {\n    buf.iter().position(|x| x == &c)\n}\n\nfn rfind(buf: &[u8], c: u8) -> Option<usize> {\n    buf.iter().rposition(|x| x == &c)\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn test_split_host_port() {\n        macro_rules! check {\n            ($s:literal, $host:literal, $port:literal) => {\n                assert_eq!(Ok(($host.to_string(), Some($port))), split_host_port($s));\n            };\n            ($s:literal, $host:literal) => {\n                assert_eq!(Ok(($host.to_string(), None)), split_host_port($s));\n            };\n            ($s:literal, bad) => {\n                assert_eq!(Err(MalformedHostPort), split_host_port($s));\n            };\n        }\n\n        check!(\"foo\", \"foo\");\n        check!(\"foo:9599\", \"foo\", 9599);\n        check!(\"[foo]\", \"foo\");\n        check!(\"[foo]:9599\", \"foo\", 9599);\n        check!(\"[::1]\", \"::1\");\n        check!(\"[::1]:9599\", \"::1\", 9599);\n\n        check!(\"foo:\", bad);\n        check!(\"foo:bar\", bad);\n        check!(\"[foo:]9599\", bad);\n        check!(\"[::1]:\", bad);\n        check!(\"[foo]]:9599\", bad);\n        check!(\"[[foo]]:9599\", bad);\n    }\n}\n"
  },
  {
    "path": "mm-client-common/src/conn.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nmod hostport;\n\nconst DEFAULT_PORT: u16 = 9599;\nconst MAX_QUIC_PACKET_SIZE: usize = 1350;\n\nconst SOCKET: mio::Token = mio::Token(0);\nconst WAKER: mio::Token = mio::Token(1);\n\nuse std::{\n    collections::{HashMap, HashSet},\n    net::SocketAddr,\n    sync::{atomic::Ordering, Arc},\n    time,\n};\n\nuse futures::channel::oneshot;\nuse mm_protocol as protocol;\nuse tracing::{debug, error, info, trace, warn};\n\nuse crate::stats::StatsCollector;\n\n#[derive(Debug, Clone, thiserror::Error)]\npub enum ConnError {\n    #[error(\"invalid address: {0}\")]\n    InvalidAddress(String),\n    #[error(\"unexpected OS error: {0}\")]\n    Unknown(#[from] Arc<std::io::Error>),\n    #[error(\"QUIC error\")]\n    QuicError(#[from] quiche::Error),\n    #[error(\"connection timeout\")]\n    Timeout,\n    #[error(\"connection closed due to inactivity\")]\n    Idle,\n    #[error(\"closed by peer (is_app={}, code={})\", .0.is_app, .0.error_code)]\n    PeerError(quiche::ConnectionError),\n    #[error(\"recv or send queue is full\")]\n    QueueFull,\n    #[error(\"protocol error\")]\n    ProtocolError(#[from] protocol::ProtocolError),\n}\n\n// In order to let ConnError implement Clone, we need to wrap io::Error in Arc;\n// but then we lose From<io::Error>, which breaks the ? operator.\nimpl From<std::io::Error> for ConnError {\n    fn from(e: std::io::Error) -> Self {\n        Self::Unknown(Arc::new(e))\n    }\n}\n\n#[derive(Debug, Clone)]\npub(crate) enum ConnEvent {\n    StreamMessage(u64, protocol::MessageType),\n    Datagram(protocol::MessageType),\n    StreamClosed(u64),\n}\n\npub(crate) struct OutgoingMessage {\n    pub(crate) sid: u64,\n    pub(crate) msg: protocol::MessageType,\n    pub(crate) fin: bool,\n}\n\npub(crate) struct Conn {\n    scratch: bytes::BytesMut,\n    socket: mio::net::UdpSocket,\n    local_addr: SocketAddr,\n    poll: mio::Poll,\n    waker: Arc<mio::Waker>,\n    conn: quiche::Connection,\n    partial_reads: HashMap<u64, bytes::BytesMut>,\n    open_streams: HashSet<u64>,\n\n    shutdown: oneshot::Receiver<()>,\n    shutting_down: bool,\n\n    incoming: flume::Sender<ConnEvent>,\n    outgoing: flume::Receiver<OutgoingMessage>,\n\n    ready: Option<oneshot::Sender<Result<(), ConnError>>>,\n\n    stats_timer: time::Instant,\n    stats_collector: Arc<StatsCollector>,\n}\n\nimpl Conn {\n    pub fn new(\n        addr: &str,\n        incoming: flume::Sender<ConnEvent>,\n        outgoing: flume::Receiver<OutgoingMessage>,\n        ready: oneshot::Sender<Result<(), ConnError>>,\n        shutdown: oneshot::Receiver<()>,\n        stats: Arc<StatsCollector>,\n    ) -> Result<Self, ConnError> {\n        let (hostname, server_addr) = resolve_server(addr)?;\n        let bind_addr = match server_addr {\n            std::net::SocketAddr::V4(_) => \"0.0.0.0:0\",\n            std::net::SocketAddr::V6(_) => \"[::]:0\",\n        };\n\n        let mut socket = mio::net::UdpSocket::bind(bind_addr.parse().unwrap())?;\n\n        let mut config = quiche::Config::new(quiche::PROTOCOL_VERSION)?;\n\n        if !ip_rfc::global(&server_addr.ip()) {\n            warn!(\"skipping TLS verification for private server address\");\n            config.verify_peer(false);\n        }\n\n        config.set_application_protos(&[protocol::ALPN_PROTOCOL_VERSION])?;\n\n        config.set_max_idle_timeout(60_000);\n        config.set_max_recv_udp_payload_size(MAX_QUIC_PACKET_SIZE);\n        config.set_max_send_udp_payload_size(MAX_QUIC_PACKET_SIZE);\n        config.set_initial_max_data(65536);\n        config.set_initial_max_stream_data_bidi_local(65536);\n        config.set_initial_max_stream_data_bidi_remote(6536);\n        config.set_initial_max_streams_bidi(100);\n        config.set_initial_max_stream_data_uni(65536);\n        config.set_initial_max_streams_uni(100);\n        config.enable_dgram(true, 65536, 0);\n\n        let initial_scid = gen_scid();\n        let local_addr = socket.local_addr().unwrap();\n        let conn = quiche::connect(\n            Some(&hostname),\n            &initial_scid,\n            local_addr,\n            server_addr,\n            &mut config,\n        )?;\n\n        let scratch = bytes::BytesMut::with_capacity(65536);\n\n        let poll = mio::Poll::new().unwrap();\n        let waker = Arc::new(mio::Waker::new(poll.registry(), WAKER)?);\n\n        poll.registry()\n            .register(&mut socket, SOCKET, mio::Interest::READABLE)?;\n\n        Ok(Self {\n            scratch,\n            socket,\n            local_addr,\n            poll,\n            waker,\n            conn,\n            partial_reads: HashMap::new(),\n            open_streams: HashSet::new(),\n\n            shutdown,\n            shutting_down: false,\n\n            incoming,\n            outgoing,\n\n            ready: Some(ready),\n\n            stats_timer: time::Instant::now(),\n            stats_collector: stats,\n        })\n    }\n\n    pub fn waker(&self) -> Arc<mio::Waker> {\n        self.waker.clone()\n    }\n\n    pub fn run(&mut self, connect_timeout: time::Duration) -> Result<(), ConnError> {\n        let mut events = mio::Events::with_capacity(1024);\n        let start = time::Instant::now();\n\n        loop {\n            const ONE_SECOND: time::Duration = time::Duration::from_secs(1);\n            let timeout = self\n                .conn\n                .timeout()\n                .map_or(ONE_SECOND, |d| d.min(ONE_SECOND));\n\n            self.poll.poll(&mut events, Some(timeout))?;\n\n            let now = time::Instant::now();\n            if self.conn.timeout_instant().is_some_and(|t| now >= t) {\n                self.conn.on_timeout();\n            }\n\n            if self.conn.is_closed() || self.conn.is_draining() {\n                if self.conn.is_timed_out() {\n                    return Err(ConnError::Idle);\n                } else if self.conn.is_dgram_recv_queue_full() {\n                    return Err(ConnError::QueueFull);\n                } else if let Some(err) = self.conn.peer_error() {\n                    return Err(ConnError::PeerError(err.clone()));\n                } else if !self.shutting_down {\n                    panic!(\"connection closed unexpectedly\");\n                } else {\n                    return Ok(());\n                }\n            }\n\n            if self.ready.is_some() {\n                if self.conn.is_established() || self.conn.is_in_early_data() {\n                    trace!(\"connection ready\");\n                    let _ = self.ready.take().unwrap().send(Ok(()));\n                } else if start.elapsed() > connect_timeout {\n                    let _ = self.ready.take().unwrap().send(Err(ConnError::Timeout));\n                }\n            }\n\n            if let Ok(Some(())) = self.shutdown.try_recv() {\n                self.start_shutdown()?;\n            }\n\n            if (now - self.stats_timer) > time::Duration::from_millis(200) {\n                self.stats_timer = now;\n                let stats = self.conn.path_stats().next().unwrap();\n                self.stats_collector\n                    .rtt_us\n                    .store(stats.rtt.as_micros() as u64, Ordering::SeqCst);\n            }\n\n            // Read incoming UDP packets and handle them.\n            loop {\n                // TODO: use recv_mmsg for a small efficiency boost.\n                self.scratch.resize(MAX_QUIC_PACKET_SIZE, 0);\n                let (len, from) = match self.socket.recv_from(&mut self.scratch) {\n                    Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => {\n                        break;\n                    }\n                    v => v?,\n                };\n\n                self.conn.recv(\n                    &mut self.scratch[..len],\n                    quiche::RecvInfo {\n                        from,\n                        to: self.local_addr,\n                    },\n                )?;\n            }\n\n            if (self.conn.is_established() || self.conn.is_in_early_data()) && !self.shutting_down {\n                // Demux incoming messages and datagrams.\n                for sid in self.conn.readable() {\n                    self.open_streams.insert(sid);\n                    self.pump_stream(sid)?;\n                }\n\n                loop {\n                    self.scratch.resize(protocol::MAX_MESSAGE_SIZE, 0);\n                    match self.conn.dgram_recv(&mut self.scratch) {\n                        Ok(len) => {\n                            let (msg, msg_len) = match protocol::decode_message(&self.scratch) {\n                                Ok(v) => v,\n                                Err(protocol::ProtocolError::InvalidMessageType(t, _)) => {\n                                    warn!(msg_type = t, \"ignoring unknown message type\");\n                                    continue;\n                                }\n                                Err(e) => return Err(e.into()),\n                            };\n\n                            debug_assert_eq!(msg_len, len);\n                            trace!(%msg, len, \"received datagram\");\n\n                            match self.incoming.send(ConnEvent::Datagram(msg)) {\n                                Ok(()) => {}\n                                Err(_) => {\n                                    self.start_shutdown()?;\n                                    break;\n                                }\n                            }\n                        }\n                        Err(quiche::Error::Done) => break,\n                        Err(e) => {\n                            error!(\"QUIC recv error: {:#}\", e);\n                            break;\n                        }\n                    }\n                }\n\n                // Enqueue outgoing messages.\n                loop {\n                    match self.outgoing.try_recv() {\n                        Ok(OutgoingMessage { sid, msg, fin }) => {\n                            if matches!(\n                                self.conn.stream_capacity(sid),\n                                Err(quiche::Error::InvalidState)\n                                    | Err(quiche::Error::StreamStopped(_))\n                            ) {\n                                debug!(sid, %msg, \"dropping outgoing message for finished stream\");\n                                continue;\n                            }\n\n                            self.open_streams.insert(sid);\n                            self.send_message(sid, msg, fin)?;\n                        }\n                        Err(flume::TryRecvError::Empty) => {\n                            break;\n                        }\n                        Err(flume::TryRecvError::Disconnected) => {\n                            self.start_shutdown()?;\n                            break;\n                        }\n                    }\n                }\n\n                // Garbage collect closed streams.\n                let mut closed = Vec::new();\n                self.open_streams.retain(|sid| {\n                    if self.conn.stream_finished(*sid) {\n                        trace!(sid, \"stream finished\");\n                        closed.push(*sid);\n                        false\n                    } else {\n                        true\n                    }\n                });\n\n                for sid in closed {\n                    match self.incoming.send(ConnEvent::StreamClosed(sid)) {\n                        Ok(()) => {}\n                        Err(_) => {\n                            self.start_shutdown()?;\n                            break;\n                        }\n                    }\n                }\n            }\n\n            // Write out UDP packets.\n            loop {\n                self.scratch.resize(MAX_QUIC_PACKET_SIZE, 0);\n                let (len, send_info) = match self.conn.send(&mut self.scratch) {\n                    Ok(v) => v,\n                    Err(quiche::Error::Done) => break,\n                    Err(e) => {\n                        error!(\"QUIC send error: {:#}\", e);\n                        break;\n                    }\n                };\n\n                // TODO implement pacing with SO_TXTIME. (We can do\n                // sendmmsg at the same time).\n                self.socket.send_to(&self.scratch[..len], send_info.to)?;\n            }\n        }\n    }\n\n    fn pump_stream(&mut self, sid: u64) -> Result<bool, ConnError> {\n        use bytes::Buf;\n\n        self.scratch.truncate(0);\n        if let Some(partial) = self.partial_reads.remove(&sid) {\n            self.scratch.unsplit(partial);\n        }\n\n        let mut off = self.scratch.len();\n        let mut stream_fin = false;\n        loop {\n            self.scratch.resize(off + protocol::MAX_MESSAGE_SIZE, 0);\n            match self.conn.stream_recv(sid, &mut self.scratch[off..]) {\n                Ok((len, fin)) => {\n                    off += len;\n\n                    if fin {\n                        stream_fin = true;\n                        break;\n                    }\n                }\n                Err(quiche::Error::Done) => break,\n                Err(e) => return Err(e.into()),\n            }\n        }\n\n        // Read messages (there may be multiple).\n        self.scratch.truncate(off);\n        let mut buf = self.scratch.split();\n        while !buf.is_empty() {\n            let (msg, len) = match protocol::decode_message(&buf) {\n                Ok(v) => v,\n                Err(protocol::ProtocolError::ShortBuffer(n)) => {\n                    debug!(have = buf.len(), need = n, sid, \"partial message\");\n                    self.partial_reads.insert(sid, buf);\n                    break;\n                }\n                Err(e) => {\n                    error!(\"protocol error: {:#}\", e);\n                    break;\n                }\n            };\n\n            trace!(\n                sid,\n                %msg,\n                len,\n                fin = stream_fin,\n                \"received msg\",\n            );\n\n            buf.advance(len);\n            match self.incoming.send(ConnEvent::StreamMessage(sid, msg)) {\n                Ok(()) => {}\n                Err(_) => {\n                    self.start_shutdown()?;\n                    break;\n                }\n            }\n        }\n\n        Ok(stream_fin)\n    }\n\n    fn send_message(\n        &mut self,\n        sid: u64,\n        msg: protocol::MessageType,\n        fin: bool,\n    ) -> Result<(), ConnError> {\n        self.scratch.resize(protocol::MAX_MESSAGE_SIZE, 0);\n        let len = protocol::encode_message(&msg, &mut self.scratch)?;\n\n        trace!(sid, %msg, fin, \"sending message\");\n        match self.conn.stream_send(sid, &self.scratch[..len], fin) {\n            Ok(_) => Ok(()),\n            Err(quiche::Error::Done) | Err(quiche::Error::FinalSize) => {\n                warn!(sid, %msg, \"dropping message on blocked stream\");\n                if fin {\n                    // Try to close the connection anyway.\n                    let _ = self.conn.stream_send(sid, &[], fin);\n                }\n\n                Ok(())\n            }\n            Err(e) => Err(e.into()),\n        }\n    }\n\n    fn start_shutdown(&mut self) -> Result<(), ConnError> {\n        match self.conn.close(true, 0x00, b\"\") {\n            Ok(()) | Err(quiche::Error::Done) => (),\n            Err(e) => return Err(e.into()),\n        }\n        self.shutting_down = true;\n        Ok(())\n    }\n}\n\nfn gen_scid() -> quiche::ConnectionId<'static> {\n    use ring::rand::SecureRandom;\n\n    let mut scid = vec![0; quiche::MAX_CONN_ID_LEN];\n\n    ring::rand::SystemRandom::new().fill(&mut scid[..]).unwrap();\n    quiche::ConnectionId::from_vec(scid)\n}\n\nfn resolve_server(hostport: &str) -> Result<(String, SocketAddr), ConnError> {\n    use std::net::ToSocketAddrs;\n\n    let Ok((host, port)) = hostport::split_host_port(hostport) else {\n        return Err(ConnError::InvalidAddress(hostport.to_string()));\n    };\n\n    let port = port.unwrap_or_else(|| {\n        info!(\"using default port ({DEFAULT_PORT})\");\n        DEFAULT_PORT\n    });\n\n    // Rust chokes on zone identifiers. They are rarely needed.\n    let host = if let Some((before, _)) = host.rsplit_once('%') {\n        before\n    } else {\n        &host\n    };\n\n    let addr = (host, port)\n        .to_socket_addrs()\n        .map_err(|_| ConnError::InvalidAddress(hostport.to_string()))?\n        .next()\n        .unwrap();\n\n    Ok((host.to_string(), addr))\n}\n"
  },
  {
    "path": "mm-client-common/src/display_params.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse mm_protocol as protocol;\n\nuse crate::{pixel_scale::PixelScale, validation::*};\n\n#[derive(Debug, Clone, PartialEq, Eq, uniffi::Record)]\npub struct DisplayParams {\n    pub width: u32,\n    pub height: u32,\n    pub framerate: u32,\n    pub ui_scale: PixelScale,\n}\n\nimpl TryFrom<protocol::VirtualDisplayParameters> for DisplayParams {\n    type Error = ValidationError;\n\n    fn try_from(msg: protocol::VirtualDisplayParameters) -> Result<Self, Self::Error> {\n        let res = required_field!(msg.resolution)?;\n\n        Ok(DisplayParams {\n            width: res.width,\n            height: res.height,\n            framerate: msg.framerate_hz,\n            ui_scale: required_field!(msg.ui_scale)?.try_into()?,\n        })\n    }\n}\n\nimpl From<DisplayParams> for protocol::VirtualDisplayParameters {\n    fn from(value: DisplayParams) -> Self {\n        protocol::VirtualDisplayParameters {\n            resolution: Some(protocol::Size {\n                width: value.width,\n                height: value.height,\n            }),\n            framerate_hz: value.framerate,\n            ui_scale: Some(value.ui_scale.into()),\n        }\n    }\n}\n"
  },
  {
    "path": "mm-client-common/src/input.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse mm_protocol as protocol;\n\npub use protocol::gamepad::GamepadLayout;\npub use protocol::gamepad_input::{GamepadButton, GamepadButtonState};\npub use protocol::gamepad_motion::GamepadAxis;\npub use protocol::keyboard_input::{Key, KeyState};\npub use protocol::pointer_input::{Button, ButtonState};\npub use protocol::pointer_scroll::ScrollType;\npub use protocol::update_cursor::CursorIcon;\n\nuse crate::validation::ValidationError;\n\n#[derive(Debug, Clone, Copy, uniffi::Record)]\npub struct Gamepad {\n    pub id: u64,\n    pub layout: GamepadLayout,\n}\n\nimpl From<Gamepad> for protocol::Gamepad {\n    fn from(value: Gamepad) -> Self {\n        Self {\n            id: value.id,\n            layout: value.layout.into(),\n        }\n    }\n}\n\nimpl TryFrom<protocol::Gamepad> for Gamepad {\n    type Error = ValidationError;\n\n    fn try_from(value: protocol::Gamepad) -> Result<Self, Self::Error> {\n        let layout = value\n            .layout\n            .try_into()\n            .map_err(|_| ValidationError::InvalidEnum(\"layout\".to_string()))?;\n\n        if value.id == 0 {\n            return Err(ValidationError::Required(\"id\".to_string()));\n        }\n\n        Ok(Self {\n            id: value.id,\n            layout,\n        })\n    }\n}\n"
  },
  {
    "path": "mm-client-common/src/lib.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::{\n    collections::{HashMap, HashSet},\n    sync::Arc,\n    time,\n};\n\nuse async_mutex::{Mutex as AsyncMutex, MutexGuard as AsyncMutexGuard};\nuse futures::{channel::oneshot, executor::block_on};\nuse mm_protocol as protocol;\nuse tracing::{debug, error};\n\nmod attachment;\nmod conn;\nmod logging;\nmod packet;\nmod session;\nmod stats;\nmod validation;\n\npub mod codec;\npub mod display_params;\npub mod input;\npub mod pixel_scale;\n\npub use attachment::*;\npub use logging::*;\npub use packet::*;\npub use session::*;\n\nuniffi::setup_scaffolding!();\n\npub use protocol::error::ErrorCode;\n\n#[derive(Debug, Clone, thiserror::Error, uniffi::Error)]\n#[uniffi(flat_error)]\npub enum ClientError {\n    #[error(\"protocol error\")]\n    ProtocolError(#[from] protocol::ProtocolError),\n    #[error(\"{}: {}\", .0.err_code().as_str_name(), .0.error_text)]\n    ServerError(protocol::Error),\n    #[error(\"request timed out\")]\n    RequestTimeout,\n    #[error(\"connection error\")]\n    ConnectionError(#[from] conn::ConnError),\n    #[error(\"stream closed before request could be received\")]\n    Canceled(#[from] oneshot::Canceled),\n    #[error(\"received unexpected message: {0}\")]\n    UnexpectedMessage(protocol::MessageType),\n    #[error(\"message validation failed\")]\n    ValidationFailed(#[from] validation::ValidationError),\n    #[error(\"client defunct\")]\n    Defunct,\n    #[error(\"attachment ended\")]\n    Detached,\n}\n\n/// A handle for the QUIC connection thread, used to push outgoing messages.\nstruct ConnHandle {\n    thread_handle: std::thread::JoinHandle<Result<(), conn::ConnError>>,\n    waker: Arc<mio::Waker>,\n    outgoing: flume::Sender<conn::OutgoingMessage>,\n    roundtrips: flume::Sender<(u64, Roundtrip)>,\n    attachments: flume::Sender<(u64, AttachmentState)>,\n    shutdown: oneshot::Sender<()>,\n}\n\nimpl ConnHandle {\n    /// Signals the connection thread that it should close.\n    fn close(self) -> Result<(), Option<conn::ConnError>> {\n        let _ = self.shutdown.send(());\n        self.waker.wake().map_err(conn::ConnError::from)?;\n\n        if !self.thread_handle.is_finished() {\n            return Ok(());\n        }\n\n        match self.thread_handle.join() {\n            Ok(Ok(_)) => Ok(()),\n            Ok(Err(e)) => Err(Some(e)),\n            // The connection thread panicked.\n            Err(_) => {\n                error!(\"connection thread panicked\");\n                Err(None)\n            }\n        }\n    }\n}\n\n/// Stores the current connection state.\nenum ClientState {\n    Connected(ConnHandle),\n    Defunct(ClientError),\n}\n\nstruct Roundtrip {\n    tx: oneshot::Sender<Result<protocol::MessageType, ClientError>>,\n    deadline: Option<time::Instant>,\n}\n\n/// Client state inside the mutex.\nstruct InnerClient {\n    next_stream_id: u64,\n    state: ClientState,\n}\n\nimpl InnerClient {\n    fn next_stream_id(&mut self) -> u64 {\n        let sid = self.next_stream_id;\n        self.next_stream_id += 4;\n\n        sid\n    }\n\n    fn close(&mut self) -> Result<(), ClientError> {\n        if let ClientState::Defunct(err) = &self.state {\n            return Err(err.clone());\n        }\n\n        let ClientState::Connected(conn) =\n            std::mem::replace(&mut self.state, ClientState::Defunct(ClientError::Defunct))\n        else {\n            unreachable!();\n        };\n\n        //Shut down the connection thread.\n        let close_err = conn.close();\n        if let Err(Some(e)) = &close_err {\n            error!(\"connection error: {e:?}\");\n            self.state = ClientState::Defunct(e.clone().into());\n        }\n\n        match close_err {\n            Ok(_) => Ok(()),\n            Err(Some(e)) => Err(e.into()),\n            Err(None) => Err(ClientError::Defunct),\n        }\n    }\n}\n\n#[derive(uniffi::Object)]\npub struct Client {\n    name: String,\n    addr: String,\n    connect_timeout: time::Duration,\n    inner: Arc<AsyncMutex<InnerClient>>,\n    stats: Arc<stats::StatsCollector>,\n}\n\nimpl Client {\n    async fn reconnect(&self) -> Result<AsyncMutexGuard<InnerClient>, ClientError> {\n        let inner_clone = self.inner.clone();\n        let mut guard = self.inner.lock().await;\n\n        match &guard.state {\n            ClientState::Connected(_) => (),\n            ClientState::Defunct(ClientError::ConnectionError(conn::ConnError::Idle)) => {\n                // Reconnect after an idle timeout.\n                let conn = match spawn_conn(\n                    &self.addr,\n                    inner_clone,\n                    self.stats.clone(),\n                    self.connect_timeout,\n                )\n                .await\n                {\n                    Ok(conn) => conn,\n                    Err(e) => {\n                        error!(\"connection failed: {e:#}\");\n                        return Err(e);\n                    }\n                };\n\n                guard.state = ClientState::Connected(conn);\n\n                debug!(\"reconnected after idle timeout\");\n            }\n            ClientState::Defunct(e) => {\n                return Err(e.clone());\n            }\n        }\n\n        Ok(guard)\n    }\n\n    async fn initiate_stream(\n        &self,\n        msg: impl Into<protocol::MessageType>,\n        fin: bool,\n        timeout: Option<time::Duration>,\n    ) -> Result<(u64, protocol::MessageType), ClientError> {\n        let mut guard = self.reconnect().await?;\n\n        let sid = guard.next_stream_id();\n        let (oneshot_tx, oneshot_rx) = oneshot::channel();\n\n        let ConnHandle {\n            waker,\n            outgoing,\n            roundtrips,\n            ..\n        } = match &guard.state {\n            ClientState::Connected(conn) => conn,\n            ClientState::Defunct(err) => return Err(err.clone()),\n        };\n\n        if outgoing\n            .send(conn::OutgoingMessage {\n                sid,\n                msg: msg.into(),\n                fin,\n            })\n            .is_err()\n        {\n            match guard.close() {\n                Ok(_) => return Err(ClientError::Defunct),\n                Err(e) => return Err(e),\n            }\n        }\n\n        let deadline = timeout.map(|d| time::Instant::now() + d);\n        if roundtrips\n            .send_async((\n                sid,\n                Roundtrip {\n                    tx: oneshot_tx,\n                    deadline,\n                },\n            ))\n            .await\n            .is_err()\n        {\n            match guard.close() {\n                Ok(_) => return Err(ClientError::Defunct),\n                Err(e) => return Err(e),\n            }\n        };\n\n        waker.wake().map_err(conn::ConnError::from)?;\n\n        // We don't want to hold the mutex while waiting for a response.\n        drop(guard);\n\n        let res = oneshot_rx.await??;\n        Ok((sid, res))\n    }\n\n    async fn roundtrip(\n        &self,\n        msg: impl Into<protocol::MessageType>,\n        timeout: time::Duration,\n    ) -> Result<protocol::MessageType, ClientError> {\n        let (_, msg) = self.initiate_stream(msg, false, Some(timeout)).await?;\n        Ok(msg)\n    }\n}\n\n#[uniffi::export]\nimpl Client {\n    #[uniffi::constructor]\n    pub async fn new(\n        addr: &str,\n        client_name: &str,\n        connect_timeout: time::Duration,\n    ) -> Result<Self, ClientError> {\n        let inner = Arc::new(AsyncMutex::new(InnerClient {\n            next_stream_id: 0,\n            state: ClientState::Defunct(ClientError::Defunct),\n        }));\n\n        let stats = Arc::new(stats::StatsCollector::default());\n        let conn = spawn_conn(addr, inner.clone(), stats.clone(), connect_timeout).await?;\n        inner.lock().await.state = ClientState::Connected(conn);\n\n        Ok(Self {\n            name: client_name.to_owned(),\n            addr: addr.to_owned(),\n            connect_timeout,\n            inner,\n            stats,\n        })\n    }\n\n    pub fn stats(&self) -> stats::ClientStats {\n        self.stats.snapshot()\n    }\n\n    pub async fn list_applications(\n        &self,\n        timeout: time::Duration,\n    ) -> Result<Vec<Application>, ClientError> {\n        let res = match self\n            .roundtrip(protocol::ListApplications {}, timeout)\n            .await?\n        {\n            protocol::MessageType::ApplicationList(res) => res,\n            protocol::MessageType::Error(e) => return Err(ClientError::ServerError(e)),\n            msg => return Err(ClientError::UnexpectedMessage(msg)),\n        };\n\n        Ok(res\n            .list\n            .into_iter()\n            .map(Application::try_from)\n            .collect::<Result<Vec<_>, validation::ValidationError>>()?)\n    }\n\n    pub async fn fetch_application_image(\n        &self,\n        application_id: String,\n        format: session::ApplicationImageFormat,\n        timeout: time::Duration,\n    ) -> Result<Vec<u8>, ClientError> {\n        let fetch = protocol::FetchApplicationImage {\n            format: format.into(),\n            application_id,\n        };\n\n        match self.roundtrip(fetch, timeout).await? {\n            protocol::MessageType::ApplicationImage(res) => Ok(res.image_data.into()),\n            protocol::MessageType::Error(e) => Err(ClientError::ServerError(e)),\n            msg => Err(ClientError::UnexpectedMessage(msg)),\n        }\n    }\n\n    pub async fn list_sessions(\n        &self,\n        timeout: time::Duration,\n    ) -> Result<Vec<Session>, ClientError> {\n        let res = match self.roundtrip(protocol::ListSessions {}, timeout).await? {\n            protocol::MessageType::SessionList(res) => res,\n            protocol::MessageType::Error(e) => return Err(ClientError::ServerError(e)),\n            msg => return Err(ClientError::UnexpectedMessage(msg)),\n        };\n\n        Ok(res\n            .list\n            .into_iter()\n            .map(Session::try_from)\n            .collect::<Result<Vec<_>, validation::ValidationError>>()?)\n    }\n\n    pub async fn launch_session(\n        &self,\n        application_id: String,\n        display_params: display_params::DisplayParams,\n        permanent_gamepads: Vec<input::Gamepad>,\n        timeout: time::Duration,\n    ) -> Result<Session, ClientError> {\n        let msg = protocol::LaunchSession {\n            application_id: application_id.clone(),\n            display_params: Some(display_params.clone().into()),\n            permanent_gamepads: permanent_gamepads.iter().map(|pad| (*pad).into()).collect(),\n        };\n\n        let res = match self.roundtrip(msg, timeout).await? {\n            protocol::MessageType::SessionLaunched(msg) => msg,\n            protocol::MessageType::Error(e) => return Err(ClientError::ServerError(e)),\n            msg => return Err(ClientError::UnexpectedMessage(msg)),\n        };\n\n        Ok(Session {\n            id: res.id,\n            start: time::SystemTime::now(),\n            application_id,\n            display_params,\n        })\n    }\n\n    pub async fn end_session(&self, id: u64, timeout: time::Duration) -> Result<(), ClientError> {\n        let msg = protocol::EndSession { session_id: id };\n        match self.roundtrip(msg, timeout).await? {\n            protocol::MessageType::SessionEnded(_) => Ok(()),\n            protocol::MessageType::Error(e) => Err(ClientError::ServerError(e)),\n            msg => Err(ClientError::UnexpectedMessage(msg)),\n        }\n    }\n\n    pub async fn update_session_display_params(\n        &self,\n        id: u64,\n        params: display_params::DisplayParams,\n        timeout: time::Duration,\n    ) -> Result<(), ClientError> {\n        let msg = protocol::UpdateSession {\n            session_id: id,\n            display_params: Some(params.into()),\n        };\n\n        match self.roundtrip(msg, timeout).await? {\n            protocol::MessageType::SessionUpdated(_) => Ok(()),\n            protocol::MessageType::Error(e) => Err(ClientError::ServerError(e)),\n            msg => Err(ClientError::UnexpectedMessage(msg)),\n        }\n    }\n\n    /// Attach to a session. The timeout parameter is used for the duration of\n    /// the initial request, i.e. until an Attached message is returned by the\n    /// server.\n    pub async fn attach_session(\n        &self,\n        session_id: u64,\n        config: AttachmentConfig,\n        delegate: Arc<dyn AttachmentDelegate>,\n        timeout: time::Duration,\n    ) -> Result<Attachment, ClientError> {\n        // Send an attach message using the roundtrip mechanism, but the leave\n        // the stream open.\n        let channel_conf = if config.channels.is_empty() {\n            None\n        } else {\n            Some(protocol::AudioChannels {\n                channels: config.channels.iter().copied().map(Into::into).collect(),\n            })\n        };\n\n        let attach = protocol::Attach {\n            session_id,\n            client_name: self.name.clone(),\n            attachment_type: protocol::AttachmentType::Operator.into(),\n            video_codec: config.video_codec.unwrap_or_default().into(),\n            streaming_resolution: Some(protocol::Size {\n                width: config.width,\n                height: config.height,\n            }),\n            video_profile: config.video_profile.unwrap_or_default().into(),\n            quality_preset: config.quality_preset.unwrap_or_default(),\n\n            audio_codec: config.audio_codec.unwrap_or_default().into(),\n            sample_rate_hz: config.sample_rate.unwrap_or_default(),\n            channels: channel_conf,\n        };\n\n        let (sid, res) = self.initiate_stream(attach, false, Some(timeout)).await?;\n\n        let attached = match res {\n            protocol::MessageType::Attached(att) => att,\n            protocol::MessageType::Error(e) => return Err(ClientError::ServerError(e)),\n            msg => return Err(ClientError::UnexpectedMessage(msg)),\n        };\n\n        Attachment::new(\n            sid,\n            self.inner.clone(),\n            attached,\n            delegate,\n            config.video_stream_seq_offset,\n        )\n        .await\n    }\n}\n\nasync fn spawn_conn(\n    addr: &str,\n    client: Arc<AsyncMutex<InnerClient>>,\n    stats: Arc<stats::StatsCollector>,\n    connect_timeout: time::Duration,\n) -> Result<ConnHandle, ClientError> {\n    let (incoming_tx, incoming_rx) = flume::unbounded();\n    let (outgoing_tx, outgoing_rx) = flume::unbounded();\n    let (ready_tx, ready_rx) = oneshot::channel();\n    let (shutdown_tx, shutdown_rx) = oneshot::channel();\n\n    // Rendezvous channels for synchronized state.\n    let (roundtrips_tx, roundtrips_rx) = flume::bounded(0);\n    let (attachments_tx, attachments_rx) = flume::bounded(0);\n\n    let mut conn = conn::Conn::new(addr, incoming_tx, outgoing_rx, ready_tx, shutdown_rx, stats)?;\n    let waker = conn.waker();\n\n    // Spawn a polling loop for the quic connection.\n    let thread_handle = std::thread::Builder::new()\n        .name(\"QUIC conn\".to_string())\n        .spawn(move || conn.run(connect_timeout))\n        .unwrap();\n\n    // Spawn a second thread to fulfill request/response futures and drive\n    // the attachment delegates.\n\n    let _ = std::thread::Builder::new()\n        .name(\"mmclient reactor\".to_string())\n        .spawn(move || conn_reactor(incoming_rx, roundtrips_rx, attachments_rx, client))\n        .unwrap();\n\n    if ready_rx.await.is_err() {\n        // An error occured while spinning up.\n        match thread_handle.join() {\n            Ok(Ok(_)) | Err(_) => return Err(ClientError::Defunct),\n            Ok(Err(e)) => return Err(e.into()),\n        }\n    }\n\n    Ok(ConnHandle {\n        thread_handle,\n        waker,\n        outgoing: outgoing_tx,\n        shutdown: shutdown_tx,\n        roundtrips: roundtrips_tx,\n        attachments: attachments_tx,\n    })\n}\n\n#[derive(Default)]\nstruct InFlight {\n    roundtrips: HashMap<u64, Roundtrip>,\n    attachments: HashMap<u64, AttachmentState>,\n    prev_attachments: HashSet<u64>, // By attachment ID.\n}\n\nfn conn_reactor(\n    incoming: flume::Receiver<conn::ConnEvent>,\n    roundtrips: flume::Receiver<(u64, Roundtrip)>,\n    attachments: flume::Receiver<(u64, AttachmentState)>,\n    client: Arc<AsyncMutex<InnerClient>>,\n) {\n    let mut in_flight = InFlight::default();\n    let mut tick = time::Instant::now() + time::Duration::from_secs(1);\n\n    loop {\n        // Perform some cleanup once per second.\n        let now = time::Instant::now();\n        if now > tick {\n            tick = now + time::Duration::from_secs(1);\n\n            // Check roundtrip deadlines.\n            let mut timed_out = Vec::new();\n            for (sid, Roundtrip { deadline, .. }) in in_flight.roundtrips.iter() {\n                if deadline.is_some_and(|dl| now >= dl) {\n                    timed_out.push(*sid);\n                }\n            }\n\n            // Fulfill the futures with an error.\n            for id in &timed_out {\n                let Roundtrip { tx, .. } = in_flight.roundtrips.remove(id).unwrap();\n                let _ = tx.send(Err(ClientError::RequestTimeout));\n            }\n        }\n\n        enum SelectResult {\n            RecvError,\n            InsertRoundtrip(u64, Roundtrip),\n            InsertAttachment(u64, AttachmentState),\n            Incoming(conn::ConnEvent),\n        }\n\n        let res = flume::select::Selector::new()\n            .recv(&roundtrips, |ev| {\n                if let Ok((sid, rt)) = ev {\n                    SelectResult::InsertRoundtrip(sid, rt)\n                } else {\n                    SelectResult::RecvError\n                }\n            })\n            .recv(&attachments, |ev| {\n                if let Ok((sid, att)) = ev {\n                    SelectResult::InsertAttachment(sid, att)\n                } else {\n                    SelectResult::RecvError\n                }\n            })\n            .recv(&incoming, |ev| {\n                if let Ok(ev) = ev {\n                    SelectResult::Incoming(ev)\n                } else {\n                    SelectResult::RecvError\n                }\n            })\n            .wait_deadline(tick);\n\n        match res {\n            Err(flume::select::SelectError::Timeout) => continue,\n            Ok(SelectResult::RecvError) => break,\n            Ok(SelectResult::InsertRoundtrip(sid, rt)) => {\n                in_flight.roundtrips.insert(sid, rt);\n            }\n            Ok(SelectResult::InsertAttachment(sid, att)) => {\n                in_flight.attachments.insert(sid, att);\n            }\n            Ok(SelectResult::Incoming(ev)) => conn_reactor_handle_incoming(&mut in_flight, ev),\n        };\n    }\n\n    // The client is probably already closed, but we should make sure, since\n    // this thread is the only one notified if the connection thread died.\n    let mut guard = block_on(client.lock());\n    let stream_err = match guard.close() {\n        Err(e) => Some(e.clone()),\n        Ok(_) => None,\n    };\n\n    for (_, att) in in_flight.attachments.drain() {\n        att.handle_close(stream_err.clone());\n    }\n\n    in_flight.roundtrips.clear(); // Cancels the futures.\n}\n\nfn conn_reactor_handle_incoming(in_flight: &mut InFlight, ev: conn::ConnEvent) {\n    match ev {\n        conn::ConnEvent::StreamMessage(sid, msg) => {\n            if let Some(attachment) = in_flight.attachments.get_mut(&sid) {\n                attachment.handle_message(msg);\n                return;\n            }\n\n            if let Some(Roundtrip { tx, .. }) = in_flight.roundtrips.remove(&sid) {\n                let _ = tx.send(Ok(msg));\n            }\n        }\n        conn::ConnEvent::Datagram(msg) => {\n            let (session_id, attachment_id) = match &msg {\n                protocol::MessageType::VideoChunk(chunk) => (chunk.session_id, chunk.attachment_id),\n                protocol::MessageType::AudioChunk(chunk) => (chunk.session_id, chunk.attachment_id),\n                msg => {\n                    error!(\"unexpected {} as datagram\", msg);\n                    return;\n                }\n            };\n\n            // Find the relevant attachment. The session ID and attachment\n            // may be omitted if there's only one attachment.\n            let attachment = match (session_id, attachment_id) {\n                (0, 0) if in_flight.attachments.len() == 1 => {\n                    in_flight.attachments.iter_mut().next()\n                }\n                (0, _) | (_, 0) => None, // This is invalid.\n                (s, a) => in_flight\n                    .attachments\n                    .iter_mut()\n                    .find(|(_, att)| att.session_id == s && att.attachment_id == a),\n            };\n\n            if let Some((_, attachment)) = attachment {\n                attachment.handle_message(msg);\n            } else if !in_flight.prev_attachments.contains(&attachment_id) {\n                error!(\n                    session_id,\n                    attachment_id, \"failed to match datagram to attachment\"\n                );\n            }\n        }\n        conn::ConnEvent::StreamClosed(sid) => {\n            in_flight.roundtrips.remove(&sid);\n            if let Some(attachment) = in_flight.attachments.remove(&sid) {\n                in_flight.prev_attachments.insert(attachment.attachment_id);\n                attachment.handle_close(None);\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "mm-client-common/src/logging.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::sync::{Arc, OnceLock};\n\n#[derive(uniffi::Enum)]\npub enum LogLevel {\n    None,\n    Trace,\n    Debug,\n    Info,\n    Warn,\n    Error,\n}\n\nimpl From<log::Level> for LogLevel {\n    fn from(value: log::Level) -> Self {\n        match value {\n            log::Level::Trace => LogLevel::Trace,\n            log::Level::Debug => LogLevel::Debug,\n            log::Level::Info => LogLevel::Info,\n            log::Level::Warn => LogLevel::Warn,\n            log::Level::Error => LogLevel::Error,\n        }\n    }\n}\n\n/// An interface for receiving logs from this library.\n#[uniffi::export(with_foreign)]\npub trait LogDelegate: Send + Sync + std::fmt::Debug {\n    fn log(&self, level: LogLevel, target: String, msg: String);\n}\n\nstruct LogWrapper(Arc<dyn LogDelegate>);\n\nimpl log::Log for LogWrapper {\n    fn enabled(&self, metadata: &log::Metadata) -> bool {\n        metadata.level() <= log::max_level()\n    }\n\n    fn log(&self, record: &log::Record) {\n        if self.enabled(record.metadata()) {\n            LogDelegate::log(\n                &*self.0,\n                record.level().into(),\n                record.target().to_owned(),\n                record.args().to_string(),\n            )\n        }\n    }\n\n    fn flush(&self) {}\n}\n\n/// Set the minimum log level.\n#[uniffi::export]\nfn set_log_level(level: LogLevel) {\n    let filter = match level {\n        LogLevel::None => log::LevelFilter::Off,\n        LogLevel::Trace => log::LevelFilter::Trace,\n        LogLevel::Debug => log::LevelFilter::Debug,\n        LogLevel::Info => log::LevelFilter::Info,\n        LogLevel::Warn => log::LevelFilter::Warn,\n        LogLevel::Error => log::LevelFilter::Error,\n    };\n\n    log::set_max_level(filter);\n}\n\n/// Set the global logger.\n#[uniffi::export]\nfn set_logger(logger: Arc<dyn LogDelegate>) {\n    // This has to accept an Arc to be exportable by uniffi, however awkward\n    // that may be.\n    static LOGGER: OnceLock<LogWrapper> = OnceLock::new();\n\n    let logger = LOGGER.get_or_init(|| LogWrapper(logger));\n    log::set_logger(logger).expect(\"failed to set logger\")\n}\n"
  },
  {
    "path": "mm-client-common/src/packet/ring.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::collections::{BTreeMap, VecDeque};\n\nuse mm_protocol as protocol;\nuse tracing::warn;\n\nuse super::{DroppedPacket, Packet};\n\nconst RING_TARGET_SIZE: usize = 5;\n\npub(crate) trait Chunk {\n    fn seq(&self) -> u64;\n    fn stream_seq(&self) -> u64;\n    fn chunk(&self) -> u32;\n    fn num_chunks(&self) -> u32;\n    fn data(&self) -> bytes::Bytes;\n    fn pts(&self) -> u64;\n    fn hierarchical_layer(&self) -> u32;\n    fn fec_metadata(&self) -> Option<protocol::FecMetadata>;\n}\n\nimpl Chunk for protocol::VideoChunk {\n    fn seq(&self) -> u64 {\n        self.seq\n    }\n\n    fn stream_seq(&self) -> u64 {\n        self.stream_seq\n    }\n\n    fn chunk(&self) -> u32 {\n        self.chunk\n    }\n\n    fn num_chunks(&self) -> u32 {\n        self.num_chunks\n    }\n\n    fn data(&self) -> bytes::Bytes {\n        self.data.clone()\n    }\n\n    fn pts(&self) -> u64 {\n        self.timestamp\n    }\n\n    fn hierarchical_layer(&self) -> u32 {\n        self.hierarchical_layer\n    }\n\n    fn fec_metadata(&self) -> Option<mm_protocol::FecMetadata> {\n        self.fec_metadata.clone()\n    }\n}\n\nimpl Chunk for protocol::AudioChunk {\n    fn seq(&self) -> u64 {\n        self.seq\n    }\n\n    fn stream_seq(&self) -> u64 {\n        self.stream_seq\n    }\n\n    fn chunk(&self) -> u32 {\n        self.chunk\n    }\n\n    fn num_chunks(&self) -> u32 {\n        self.num_chunks\n    }\n\n    fn data(&self) -> bytes::Bytes {\n        self.data.clone()\n    }\n\n    fn pts(&self) -> u64 {\n        self.timestamp\n    }\n\n    fn hierarchical_layer(&self) -> u32 {\n        0\n    }\n\n    fn fec_metadata(&self) -> Option<mm_protocol::FecMetadata> {\n        self.fec_metadata.clone()\n    }\n}\n\n#[derive(Debug)]\nenum FECDecoder {\n    Plain(Vec<Option<bytes::Bytes>>),\n    RaptorQ {\n        dec: raptorq::Decoder,\n        res: Option<bytes::Bytes>,\n    },\n}\n\n#[derive(Debug)]\nstruct WipPacket {\n    stream_seq: u64,\n    seq: u64,\n    pts: u64,\n    hierarchical_layer: u32,\n    decoder: FECDecoder,\n}\n\nimpl WipPacket {\n    fn new(incoming: impl Chunk) -> Result<Self, PacketRingError> {\n        let decoder = if let Some(md) = incoming.fec_metadata() {\n            if md.fec_scheme() != protocol::fec_metadata::FecScheme::Raptorq {\n                return Err(PacketRingError::UnsupportedFecScheme(md.fec_scheme));\n            }\n\n            let oti: &[u8] = &md.fec_oti;\n            let Ok(config) = oti\n                .try_into()\n                .map(raptorq::ObjectTransmissionInformation::deserialize)\n            else {\n                return Err(PacketRingError::InvalidFecMetadata);\n            };\n\n            FECDecoder::RaptorQ {\n                dec: raptorq::Decoder::new(config),\n                res: None,\n            }\n        } else {\n            FECDecoder::Plain(vec![None; incoming.num_chunks().max(1) as usize])\n        };\n\n        let mut this = Self {\n            stream_seq: incoming.stream_seq(),\n            seq: incoming.seq(),\n            pts: incoming.pts(),\n            hierarchical_layer: incoming.hierarchical_layer(),\n            decoder,\n        };\n\n        this.insert(incoming)?;\n        Ok(this)\n    }\n\n    fn insert(&mut self, incoming: impl Chunk) -> Result<(), PacketRingError> {\n        match &mut self.decoder {\n            FECDecoder::Plain(ref mut chunks) => {\n                let chunk = incoming.chunk() as usize;\n                let num_chunks = incoming.num_chunks() as usize;\n                if num_chunks != chunks.len() || chunk >= num_chunks {\n                    return Err(PacketRingError::InvalidChunk(chunk, num_chunks));\n                } else if chunks[chunk].is_some() {\n                    return Err(PacketRingError::DuplicateChunk(chunk));\n                }\n\n                chunks[chunk] = Some(incoming.data());\n                Ok(())\n            }\n            FECDecoder::RaptorQ { dec, .. } => {\n                let Some(md) = incoming.fec_metadata() else {\n                    return Err(PacketRingError::InvalidFecMetadata);\n                };\n\n                let b: &[u8] = &md.fec_payload_id;\n                let Ok(payload_id) = b.try_into().map(raptorq::PayloadId::deserialize) else {\n                    return Err(PacketRingError::InvalidFecMetadata);\n                };\n\n                dec.add_new_packet(raptorq::EncodingPacket::new(\n                    payload_id,\n                    incoming.data().into(),\n                ));\n                Ok(())\n            }\n        }\n    }\n\n    fn is_complete(&mut self) -> bool {\n        match &mut self.decoder {\n            FECDecoder::Plain(chunks) => chunks.iter().all(|c| c.is_some()),\n            FECDecoder::RaptorQ { dec, ref mut res } => {\n                if res.is_some() {\n                    true\n                } else if let Some(data) = dec.get_result() {\n                    *res = Some(bytes::Bytes::from(data));\n                    true\n                } else {\n                    false\n                }\n            }\n        }\n    }\n\n    /// Reconstructs the completed frame. Panics if the packet is not yet\n    /// recoverable.\n    fn complete(self) -> Packet {\n        let data = match self.decoder {\n            FECDecoder::Plain(chunks) => {\n                let chunks: Vec<_> = chunks\n                    .into_iter()\n                    .map(|c| c.expect(\"packet incomplete\"))\n                    .collect();\n\n                chunks.into()\n            }\n            FECDecoder::RaptorQ { dec, res } => {\n                let data = res.unwrap_or_else(|| {\n                    bytes::Bytes::from(dec.get_result().expect(\"packet incomplete\"))\n                });\n\n                [data].into()\n            }\n        };\n\n        Packet {\n            pts: self.pts,\n            seq: self.seq,\n            stream_seq: self.stream_seq,\n            hierarchical_layer: self.hierarchical_layer,\n            data,\n        }\n    }\n}\n\n#[derive(Debug, PartialEq, Eq, Clone, thiserror::Error)]\npub(crate) enum PacketRingError {\n    #[error(\"invalid chunk {0} of {1}\")]\n    InvalidChunk(usize, usize),\n    #[error(\"duplicate chunk {0}\")]\n    DuplicateChunk(usize),\n    #[error(\"unsupported FEC scheme: {0}\")]\n    UnsupportedFecScheme(i32),\n    #[error(\"invalid FEC metadatata\")]\n    InvalidFecMetadata,\n}\n\n#[derive(Default)]\npub(crate) struct PacketRing {\n    // Oldest frames at the front, newest at the back.\n    ring: VecDeque<WipPacket>,\n    min_stream_seq: u64,\n    min_seq: BTreeMap<u64, u64>, // Indexed by stream_seq.\n    dropped: VecDeque<DroppedPacket>,\n}\n\nimpl PacketRing {\n    pub(crate) fn new() -> Self {\n        Self::default()\n    }\n\n    pub(crate) fn recv_chunk(&mut self, incoming: impl Chunk) -> Result<(), PacketRingError> {\n        let stream_seq = incoming.stream_seq();\n        let seq_floor = self.min_seq.get(&stream_seq).copied().unwrap_or_default();\n        if incoming.stream_seq() < self.min_stream_seq || incoming.seq() < seq_floor {\n            return Ok(());\n        }\n\n        match self\n            .ring\n            .iter_mut()\n            .find(|wip| wip.stream_seq == incoming.stream_seq() && wip.seq == incoming.seq())\n        {\n            Some(wip) => wip.insert(incoming),\n            None => {\n                let wip = WipPacket::new(incoming)?;\n\n                // Insert into the ring in order with respect to packets with\n                // the same stream_seq.\n                if let Some(idx) = self\n                    .ring\n                    .iter()\n                    .position(|p| p.stream_seq == wip.stream_seq && p.seq > wip.seq)\n                {\n                    self.ring.insert(idx, wip);\n                } else {\n                    self.ring.push_back(wip);\n                }\n\n                loop {\n                    let len = self.ring.len();\n                    let front = self.ring.front_mut().unwrap();\n\n                    if front.is_complete() || len <= RING_TARGET_SIZE {\n                        break;\n                    }\n\n                    // If the oldest frame is incomplete, drop it to make room.\n                    if !front.is_complete() {\n                        let dropped = self.ring.pop_front().unwrap();\n\n                        warn!(\n                            seq = dropped.seq,\n                            stream_seq = dropped.stream_seq,\n                            hierarchical_layer = dropped.hierarchical_layer,\n                            \"dropped packet!\",\n                        );\n\n                        self.dropped.push_back(DroppedPacket {\n                            pts: dropped.pts,\n                            seq: dropped.seq,\n                            stream_seq: dropped.stream_seq,\n                            hierarchical_layer: dropped.hierarchical_layer,\n                        })\n                    } else {\n                        break;\n                    }\n                }\n\n                Ok(())\n            }\n        }\n    }\n\n    /// Removes packets matching the stream_seq for which all chunks are\n    /// accounted for, and returns them as an iterator. Stops before the first\n    /// incomplete packet that matches.\n    ///\n    /// The iterator must be used to actually remove packets from the ring.\n    /// Dropping the iterator early will not drop the remaining packets.\n    pub(crate) fn drain_completed(&mut self, stream_seq: u64) -> DrainCompleted {\n        DrainCompleted(self, stream_seq)\n    }\n\n    /// Removes all packets with the same stream_seq or lower.\n    pub(crate) fn discard(&mut self, stream_seq: u64) {\n        self.min_stream_seq = stream_seq + 1;\n        self.ring.retain(|wip| wip.stream_seq > stream_seq);\n        self.min_seq.retain(|x, _| *x > stream_seq);\n    }\n}\n\npub(crate) struct DrainCompleted<'a>(&'a mut PacketRing, u64);\n\nimpl Iterator for DrainCompleted<'_> {\n    type Item = Result<Packet, DroppedPacket>;\n\n    fn next(&mut self) -> Option<Self::Item> {\n        let dropped = self\n            .0\n            .dropped\n            .iter()\n            .position(|p| p.stream_seq == self.1)\n            .and_then(|idx| self.0.dropped.remove(idx));\n        if let Some(dropped) = dropped {\n            self.0.min_seq.insert(dropped.stream_seq, dropped.seq + 1);\n            return Some(Err(dropped));\n        }\n\n        let ring = &mut self.0.ring;\n        match ring\n            .iter_mut()\n            .enumerate()\n            .find(|(_, wip)| wip.stream_seq == self.1)\n        {\n            Some((idx, ref mut v)) => {\n                if v.is_complete() {\n                    self.0.min_seq.insert(v.stream_seq, v.seq + 1);\n                    Some(Ok(ring.remove(idx).unwrap().complete()))\n                } else {\n                    None\n                }\n            }\n            _ => None,\n        }\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn test_ring() {\n        let mut ring = PacketRing::default();\n\n        let assert_frames = |ring: &mut PacketRing, s: &[u64]| {\n            let completed = ring.drain_completed(0).collect::<Vec<_>>();\n            assert_eq!(s.len(), completed.len());\n\n            for (expected_seq, actual) in s.iter().zip(completed.into_iter()) {\n                let actual = actual.expect(\"no dropped packet\");\n                assert_eq!(actual.seq, *expected_seq);\n                assert_eq!(&actual.data(), &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);\n            }\n        };\n\n        let frame_one = make_chunks(0, &[&[0, 1, 2], &[3, 4, 5, 6], &[7, 8], &[9]]); // 4 chunks\n        let frame_two = make_chunks(1, &[&[0, 1, 2, 3, 4], &[5, 6], &[7, 8, 9]]); // 3 chunks\n        let frame_three = make_chunks(2, &[&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]]); // 1 chunk\n\n        ring.recv_chunk(frame_three[0].clone()).unwrap(); // Frame three complete.\n        ring.recv_chunk(frame_two[1].clone()).unwrap();\n        ring.recv_chunk(frame_one[0].clone()).unwrap();\n\n        assert_eq!(ring.drain_completed(0).collect::<Vec<_>>().len(), 0);\n\n        ring.recv_chunk(frame_one[1].clone()).unwrap();\n        ring.recv_chunk(frame_one[2].clone()).unwrap();\n        ring.recv_chunk(frame_two[0].clone()).unwrap();\n\n        assert_eq!(ring.drain_completed(0).collect::<Vec<_>>().len(), 0);\n\n        ring.recv_chunk(frame_one[3].clone()).unwrap(); // Frame one complete.\n        assert_frames(&mut ring, &[0]);\n\n        ring.recv_chunk(frame_two[2].clone()).unwrap(); // Frame two complete, frame three was already complete.\n        assert_frames(&mut ring, &[1, 2]);\n\n        assert_eq!(ring.drain_completed(0).collect::<Vec<_>>().len(), 0);\n    }\n\n    #[test]\n    fn test_ring_drop() {\n        let mut ring = PacketRing::default();\n        for i in 0..10 {\n            // Send ten partial frames (each missing one chunk.)\n            let chunks = make_chunks(i, &[&[0, 1], &[2, 3]]);\n            ring.recv_chunk(chunks[0].clone()).unwrap();\n        }\n\n        // Then send a complete frame.\n        let chunks = make_chunks(10, &[&[0, 1], &[2, 3], &[4, 5], &[6, 7], &[8, 9]]);\n        for chunk in chunks {\n            ring.recv_chunk(chunk).unwrap();\n        }\n\n        for i in 11..20 {\n            // Send more partial frames.\n            let chunks = make_chunks(i, &[&[0, 1], &[2, 3]]);\n            ring.recv_chunk(chunks[0].clone()).unwrap();\n        }\n\n        // The ring should have dropped the partial frames and should indicate\n        // that alongside the completed one.\n        let completed = ring.drain_completed(0).collect::<Vec<_>>();\n        assert_eq!(11, completed.len());\n        assert_eq!(completed[0].as_ref().err().unwrap().seq, 0);\n        assert_eq!(completed[1].as_ref().err().unwrap().seq, 1);\n        assert_eq!(completed[2].as_ref().err().unwrap().seq, 2);\n        assert_eq!(completed[3].as_ref().err().unwrap().seq, 3);\n        assert_eq!(completed[4].as_ref().err().unwrap().seq, 4);\n        assert_eq!(completed[5].as_ref().err().unwrap().seq, 5);\n        assert_eq!(completed[6].as_ref().err().unwrap().seq, 6);\n        assert_eq!(completed[7].as_ref().err().unwrap().seq, 7);\n        assert_eq!(completed[8].as_ref().err().unwrap().seq, 8);\n        assert_eq!(completed[9].as_ref().err().unwrap().seq, 9);\n        assert_eq!(completed[10].as_ref().unwrap().seq, 10);\n\n        let frame = completed.last().unwrap();\n        assert_eq!(\n            &frame.as_ref().unwrap().data(),\n            &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n        );\n    }\n\n    fn make_chunks(seq: u64, chunks: &[&[u8]]) -> Vec<protocol::VideoChunk> {\n        chunks\n            .iter()\n            .enumerate()\n            .map(|(i, chunk)| protocol::VideoChunk {\n                attachment_id: 0,\n                session_id: 0,\n                stream_seq: 0,\n                seq,\n                chunk: i as u32,\n                num_chunks: chunks.len() as u32,\n                data: bytes::Bytes::copy_from_slice(chunk),\n                timestamp: 0,\n                hierarchical_layer: 0,\n                fec_metadata: None,\n            })\n            .collect()\n    }\n}\n"
  },
  {
    "path": "mm-client-common/src/packet.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nmod ring;\nuse std::collections::VecDeque;\n\npub(crate) use ring::*;\n\n#[derive(Debug, Clone, uniffi::Object)]\npub struct Packet {\n    pub(crate) pts: u64,\n    pub(crate) seq: u64,\n    pub(crate) stream_seq: u64,\n    pub(crate) hierarchical_layer: u32,\n    data: VecDeque<bytes::Bytes>,\n}\n\n#[derive(Debug, Clone, uniffi::Record)]\npub struct DroppedPacket {\n    pub pts: u64,\n    pub seq: u64,\n    pub stream_seq: u64,\n    pub hierarchical_layer: u32,\n}\n\n#[uniffi::export]\nimpl Packet {\n    pub fn pts(&self) -> u64 {\n        self.pts\n    }\n\n    pub fn stream_seq(&self) -> u64 {\n        self.stream_seq\n    }\n\n    pub fn seq(&self) -> u64 {\n        self.seq\n    }\n\n    pub fn hierarchical_layer(&self) -> u32 {\n        self.hierarchical_layer\n    }\n\n    pub fn data(&self) -> Vec<u8> {\n        if self.data.len() == 1 {\n            self.data[0].to_vec()\n        } else {\n            use bytes::buf::BufMut;\n\n            let mut buf = Vec::with_capacity(self.len());\n            for chunk in self.data.iter() {\n                buf.put(chunk.clone());\n            }\n\n            buf\n        }\n    }\n}\n\nimpl Packet {\n    pub fn len(&self) -> usize {\n        self.data.iter().map(|c| c.len()).sum()\n    }\n\n    pub fn is_empty(&self) -> bool {\n        self.len() == 0\n    }\n\n    // Copies the packet data into dst. The length of dst must match the\n    pub fn copy_to_slice(&self, mut dst: &mut [u8]) {\n        use bytes::buf::BufMut;\n\n        for chunk in self.data.iter() {\n            dst.put(chunk.clone());\n        }\n    }\n}\n"
  },
  {
    "path": "mm-client-common/src/pixel_scale.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse mm_protocol as protocol;\n\nuse crate::validation::*;\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, uniffi::Record)]\npub struct PixelScale {\n    numerator: u32,\n    denominator: u32,\n}\n\nimpl PixelScale {\n    pub const ONE: Self = Self {\n        numerator: 1,\n        denominator: 1,\n    };\n\n    pub fn new(numerator: u32, denominator: u32) -> Self {\n        Self {\n            numerator,\n            denominator,\n        }\n    }\n\n    pub fn is_fractional(&self) -> bool {\n        (self.numerator % self.denominator) != 0\n    }\n\n    pub fn round_up(self) -> Self {\n        Self {\n            numerator: self.numerator.next_multiple_of(self.denominator) / self.denominator,\n            denominator: 1,\n        }\n    }\n}\n\nimpl std::fmt::Display for PixelScale {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        write!(f, \"{:.1}\", self.numerator as f64 / self.denominator as f64)\n    }\n}\n\nimpl TryFrom<protocol::PixelScale> for PixelScale {\n    type Error = ValidationError;\n\n    fn try_from(scale: protocol::PixelScale) -> Result<Self, Self::Error> {\n        if scale.denominator == 0 && scale.numerator != 0 {\n            Ok(Self::ONE)\n        } else if scale.denominator == 0 || scale.numerator == 0 {\n            Err(ValidationError::Required(\"denominator\".to_string()))\n        } else {\n            Ok(Self {\n                numerator: scale.numerator,\n                denominator: scale.denominator,\n            })\n        }\n    }\n}\n\nimpl From<PixelScale> for protocol::PixelScale {\n    fn from(scale: PixelScale) -> Self {\n        Self {\n            numerator: scale.numerator,\n            denominator: scale.denominator,\n        }\n    }\n}\n"
  },
  {
    "path": "mm-client-common/src/session.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::time;\n\nuse mm_protocol as protocol;\npub use protocol::ApplicationImageFormat;\n\nuse crate::display_params;\nuse crate::validation::*;\n\n/// A launchable application on the server.\n#[derive(Debug, Clone, PartialEq, Eq, uniffi::Record)]\npub struct Application {\n    pub id: String,\n    pub description: String,\n    pub folder: Vec<String>,\n    pub images_available: Vec<ApplicationImageFormat>,\n}\n\nimpl TryFrom<protocol::application_list::Application> for Application {\n    type Error = ValidationError;\n\n    fn try_from(value: protocol::application_list::Application) -> Result<Self, Self::Error> {\n        let images_available = value\n            .images_available\n            .into_iter()\n            .map(|v| match v.try_into() {\n                Err(_) | Ok(protocol::ApplicationImageFormat::Unknown) => {\n                    Err(ValidationError::InvalidEnum(\"images_available\".into()))\n                }\n                Ok(v) => Ok(v),\n            })\n            .collect::<Result<Vec<_>, _>>()?;\n\n        Ok(Application {\n            id: value.id,\n            description: value.description,\n            folder: value.folder,\n            images_available,\n        })\n    }\n}\n\n/// A running session on the server.\n#[derive(Debug, Clone, PartialEq, Eq, uniffi::Record)]\npub struct Session {\n    pub id: u64,\n    pub application_id: String,\n    pub start: time::SystemTime,\n    pub display_params: display_params::DisplayParams,\n}\n\nimpl TryFrom<protocol::session_list::Session> for Session {\n    type Error = ValidationError;\n\n    fn try_from(msg: protocol::session_list::Session) -> Result<Self, Self::Error> {\n        let start = match required_field!(msg.session_start)?.try_into() {\n            Ok(ts) => Ok(ts),\n            Err(_) => Err(ValidationError::InvalidTimestamp(\n                \"session_start\".to_string(),\n            )),\n        }?;\n\n        Ok(Session {\n            id: msg.session_id,\n            application_id: msg.application_id,\n            start,\n            display_params: required_field!(msg.display_params)?.try_into()?,\n        })\n    }\n}\n"
  },
  {
    "path": "mm-client-common/src/stats.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::{sync::atomic::AtomicU64, time};\n\n#[derive(Default)]\npub(crate) struct StatsCollector {\n    pub(crate) bytes_tx: AtomicU64,\n    pub(crate) bytes_rx: AtomicU64,\n    pub(crate) rtt_us: AtomicU64,\n}\n\nimpl StatsCollector {\n    pub(crate) fn snapshot(&self) -> ClientStats {\n        let rtt_us = self.rtt_us.load(std::sync::atomic::Ordering::SeqCst);\n        ClientStats {\n            bytes_tx: self.bytes_tx.load(std::sync::atomic::Ordering::SeqCst),\n            bytes_rx: self.bytes_rx.load(std::sync::atomic::Ordering::SeqCst),\n            rtt: time::Duration::from_micros(rtt_us),\n        }\n    }\n}\n\n/// A snapshot of the client's connection statistics.\n#[derive(uniffi::Record, Clone, Copy)]\npub struct ClientStats {\n    pub bytes_tx: u64,\n    pub bytes_rx: u64,\n    pub rtt: time::Duration,\n}\n"
  },
  {
    "path": "mm-client-common/src/validation.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\n#[derive(Debug, Clone, thiserror::Error)]\npub enum ValidationError {\n    #[error(\"{0} must not be null\")]\n    Required(String),\n    #[error(\"{0}: invalid enum value\")]\n    InvalidEnum(String),\n    #[error(\"{0}: invalid timestamp\")]\n    InvalidTimestamp(String),\n}\n\nmacro_rules! required_field {\n    ($msg:ident.$field:ident) => {\n        $msg.$field\n            .ok_or(crate::validation::ValidationError::Required(\n                stringify!($ident).to_string(),\n            ))\n    };\n}\n\npub(crate) use required_field;\n"
  },
  {
    "path": "mm-docgen/Cargo.toml",
    "content": "[package]\nname = \"mmserver-config-docgen\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[[bin]]\nname = \"config-docgen\"\n\n[[bin]]\nname = \"protocol-docgen\"\n\n[dependencies]\nregex = \"1\"\n"
  },
  {
    "path": "mm-docgen/src/bin/config-docgen.rs",
    "content": "//! Generates markdown docs from mmserver.default.toml. Tightly coupled\n//! to the format of that file.\n\nuse std::{\n    fs::File,\n    io::{BufRead as _, BufReader},\n};\n\nuse regex::Regex;\n\nconst FRONT_MATTER: &str = r#\"\n+++\ntitle = \"Configuration Reference\"\n\n[extra]\ntoc = true\n+++\n\"#;\n\nfn main() {\n    let mut args = std::env::args();\n\n    if args.len() != 2 {\n        eprintln!(\"usage: {} SRC\", args.next().unwrap());\n        std::process::exit(1);\n    }\n\n    let _ = args.next().unwrap();\n    let src = args.next().unwrap();\n\n    let r = BufReader::new(File::open(src).expect(\"source path does not exist\"));\n\n    let mut preamble = true;\n    let mut key_path: Vec<String> = Vec::new();\n    let mut docs = Vec::new();\n\n    let keypath_section_re = Regex::new(r\"\\A#?\\s*\\[([a-z0-9-_.]+)\\]\\s*\\z\").unwrap();\n    let key_re = Regex::new(r\"\\A(#?)\\s*([a-z0-9-_]+)\\s=\\s(.*)\\z\").unwrap();\n\n    println!(\"{}\", FRONT_MATTER);\n\n    for line in r.lines() {\n        let s = line.expect(\"io error\");\n        if s.is_empty() {\n            preamble = false;\n\n            for doc in docs.drain(..) {\n                println!(\"{}\", doc);\n            }\n\n            continue;\n        } else if preamble {\n            continue;\n        }\n\n        if let Some(header) = s.strip_prefix(\"## *** \") {\n            // Documentation sections.\n            println!(\"\\n## {}\", header.strip_suffix(\" ***\").unwrap());\n        } else if s.starts_with(\"## ***\") {\n            // Section decoration.\n            continue;\n        } else if let Some(doc) = s.strip_prefix(\"##\") {\n            // Key documentation.\n            docs.push(doc.trim_start().to_owned());\n        } else if let Some(m) = key_re.captures(&s) {\n            // Key, value.\n            let is_default = m.get(1).unwrap().is_empty();\n            let key = m.get(2).unwrap().as_str();\n            let value = m.get(3).unwrap().as_str();\n\n            let full_path = key_path\n                .iter()\n                .map(String::as_str)\n                .chain(key.split('.'))\n                .collect::<Vec<_>>()\n                .join(\".\");\n\n            println!(\"\\n#### `{}`\\n\", full_path);\n            if is_default {\n                println!(\"```toml\\n# Default\\n{} = {}\\n```\\n\", key, value);\n            } else {\n                println!(\n                    \"```toml\\n# Example (default unset)\\n{} = {}\\n```\\n\",\n                    key, value\n                );\n            }\n\n            for doc in docs.drain(..) {\n                println!(\"{}\", doc);\n            }\n        } else if let Some(m) = keypath_section_re.captures(&s) {\n            // Update keypath for TOML section headers.\n            key_path.clear();\n            for key in m.get(1).unwrap().as_str().split(\".\") {\n                // Example app becomes <app name> in the docs.\n                if key == \"steam-big-picture\" {\n                    key_path.push(\"<app name>\".to_owned());\n                } else {\n                    key_path.push(key.to_owned());\n                }\n            }\n        } else {\n            eprintln!(\"error: unmatched line: \\n{}\", s);\n            std::process::exit(1);\n        }\n    }\n}\n"
  },
  {
    "path": "mm-docgen/src/bin/protocol-docgen.rs",
    "content": "//! Generates markdown docs from mm-protoco/src/messages.proto. Tightly coupled\n//! to the format of that file.\n\nuse std::{\n    fs::File,\n    io::{BufRead as _, BufReader},\n};\n\nconst FRONT_MATTER: &str = r#\"\n+++\ntitle = \"Protocol Reference\"\n\n[extra]\ntoc = true\n+++\n\"#;\n\nfn main() {\n    let mut args = std::env::args();\n\n    if args.len() != 2 {\n        eprintln!(\"usage: {} SRC\", args.next().unwrap());\n        std::process::exit(1);\n    }\n\n    let _ = args.next().unwrap();\n    let src = args.next().unwrap();\n\n    let r = BufReader::new(File::open(src).expect(\"source path does not exist\"));\n\n    println!(\"{}\", FRONT_MATTER);\n\n    // Skip until the first <h1>.\n    let mut message_lines = Vec::new();\n    let mut comment_lines = Vec::new();\n    for line in r\n        .lines()\n        .skip_while(|s| !s.as_ref().unwrap().starts_with(\"// # \"))\n    {\n        let line = line.unwrap();\n        if message_lines.is_empty() && line.is_empty() {\n            emit_comments(&mut comment_lines);\n            println!();\n        } else if let Some(comment) = line.strip_prefix(\"// \").or_else(|| line.strip_prefix(\"//\")) {\n            emit_message_code_block(&mut message_lines);\n            comment_lines.push(comment.to_owned());\n        } else if !line.contains(\"TODO\") {\n            emit_comments(&mut comment_lines);\n            message_lines.push(line);\n        }\n    }\n\n    emit_comments(&mut comment_lines);\n    emit_message_code_block(&mut message_lines);\n}\n\nfn emit_comments(lines: &mut Vec<String>) {\n    let comment = lines.join(\"\\n\");\n\n    // Add internal links.\n    let comment = regex::Regex::new(r\"`(?s)(\\d+)\\s+-\\s+([\\w\\s]+)`\")\n        .unwrap()\n        .replace_all(&comment, |caps: &regex::Captures<'_>| {\n            let slug = caps[2]\n                .to_lowercase()\n                .split_whitespace()\n                .collect::<Vec<_>>()\n                .join(\"-\");\n\n            format!(\"[{}](#{}-{})\", &caps[0], &caps[1], slug)\n        });\n\n    println!(\"{}\", comment);\n    lines.clear();\n}\n\nfn emit_message_code_block(lines: &mut Vec<String>) {\n    if !lines.is_empty() {\n        let message = lines.join(\"\\n\");\n        println!(\"\\n```proto\\n{}\\n```\\n\", message.trim());\n        lines.clear();\n    }\n}\n"
  },
  {
    "path": "mm-protocol/Cargo.toml",
    "content": "# Copyright 2024 Colin Marc <hi@colinmarc.com>\n#\n# SPDX-License-Identifier: MIT\n\n[package]\nname = \"mm-protocol\"\nversion = \"0.3.0\"\nedition = \"2021\"\nlicense = \"MIT\"\n\n[dependencies]\nbytes = \"1\"\noctets = \"0.2\"\nprost = \"0.13\"\nthiserror = \"1\"\nuniffi = { version = \"0.28\", optional = true }\n\n[build-dependencies]\nprost-build = \"0.13\"\n\n[features]\nuniffi = [\"dep:uniffi\"]\n"
  },
  {
    "path": "mm-protocol/build.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nfn main() -> std::io::Result<()> {\n    let mut conf = prost_build::Config::new();\n\n    #[cfg(feature = \"uniffi\")]\n    conf.enum_attribute(\".\", \"#[derive(uniffi::Enum)]\");\n\n    conf.bytes([\".\"])\n        .include_file(\"_include.rs\")\n        .compile_protos(&[\"src/messages.proto\"], &[\"src/\"])?;\n\n    Ok(())\n}\n"
  },
  {
    "path": "mm-protocol/src/lib.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse prost::Message as _;\n\n#[cfg(feature = \"uniffi\")]\nuniffi::setup_scaffolding!();\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/_include.rs\"));\npub use messages::*;\n\nmod timestamp;\n\n#[derive(Debug, thiserror::Error)]\nenum ProtobufError {\n    #[error(transparent)]\n    ProtobufDecode(#[from] prost::DecodeError),\n    #[error(transparent)]\n    ProtobufEncode(#[from] prost::EncodeError),\n}\n\n#[derive(Debug, Clone, thiserror::Error)]\npub enum ProtocolError {\n    #[error(\"protobuf encode error: {0}\")]\n    ProtobufEncode(#[from] prost::EncodeError),\n    #[error(\"protobuf decode error: {0}\")]\n    ProtobufDecode(#[from] prost::DecodeError),\n    #[error(\"short buffer, need {0} bytes\")]\n    ShortBuffer(usize),\n    #[error(\"invalid message\")]\n    InvalidMessage,\n    #[error(\"invalid message type: {0} (len={1})\")]\n    InvalidMessageType(u32, usize),\n}\n\n/// The maximum size of a single message. Note that a lower limit may apply to\n/// messages sent as datagrams, based on the connection MTU and QUIC's overhead.\npub const MAX_MESSAGE_SIZE: usize = 1048576;\n\n/// The current protocol version.\npub const ALPN_PROTOCOL_VERSION: &[u8] = b\"mm00\";\n\n// This is a very simplified version of the enum_dispatch macro.\nmacro_rules! message_types {\n    ($($num:expr => $variant:ident),*,) => {\n        /// A protocol message.\n        #[repr(u32)]\n        #[derive(Clone, Debug, PartialEq)]\n        pub enum MessageType {\n            $($variant($variant) = $num),*\n        }\n\n        impl std::fmt::Display for MessageType {\n            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n                match self {\n                    $(MessageType::$variant(_) => write!(f, \"{}:{}\", $num, stringify!($variant))),*\n                }\n            }\n        }\n\n        impl MessageType {\n            fn message_type(&self) -> u32 {\n                match self {\n                    $(MessageType::$variant(_) => $num),*\n                }\n            }\n\n            fn encoded_len(&self) -> usize {\n                match self {\n                    $(MessageType::$variant(v) => v.encoded_len()),*\n                }\n            }\n\n            fn encode<B>(&self, buf: &mut B) -> Result<(), ProtocolError>\n            where\n                B: bytes::BufMut,\n            {\n                let res = match self {\n                    $(MessageType::$variant(v) => v.encode(buf)),*\n                };\n\n                res.map_err(|e| e.into())\n            }\n\n            fn decode<B: bytes::Buf>(msg_type: u32, total_len: usize, buf: B) -> Result<Self, ProtocolError> {\n                match msg_type {\n                    $($num => Ok($variant::decode(buf)?.into())),*,\n                    _ => Err(ProtocolError::InvalidMessageType(msg_type, total_len)),\n                }\n            }\n        }\n\n        $(impl From<$variant> for MessageType {\n            fn from(v: $variant) -> Self {\n                MessageType::$variant(v)\n            }\n        })*\n    };\n}\n\nmessage_types! {\n    1 => Error,\n    11 => ListApplications,\n    12 => ApplicationList,\n    13 => LaunchSession,\n    14 => SessionLaunched,\n    15 => UpdateSession,\n    16 => SessionUpdated,\n    17 => ListSessions,\n    18 => SessionList,\n    19 => EndSession,\n    20 => SessionEnded,\n    21 => FetchApplicationImage,\n    22 => ApplicationImage,\n    30 => Attach,\n    31 => Attached,\n    32 => KeepAlive,\n    33 => SessionParametersChanged,\n    35 => Detach,\n    51 => VideoChunk,\n    52 => RequestVideoRefresh,\n    56 => AudioChunk,\n    60 => KeyboardInput,\n    61 => PointerEntered,\n    62 => PointerLeft,\n    63 => PointerMotion,\n    64 => PointerInput,\n    65 => PointerScroll,\n    66 => UpdateCursor,\n    67 => LockPointer,\n    68 => ReleasePointer,\n    69 => RelativePointerMotion,\n    70 => GamepadAvailable,\n    71 => GamepadUnavailable,\n    72 => GamepadMotion,\n    73 => GamepadInput,\n}\n\n/// Reads a header-prefixed message from a byte slice, and returns the number\n/// of bytes consumed. Returns ProtocolError::ShortBuffer if the buffer\n/// contains a partial message.\npub fn decode_message(buf: &[u8]) -> Result<(MessageType, usize), ProtocolError> {\n    if buf.len() < 10 {\n        return Err(ProtocolError::ShortBuffer(10));\n    }\n\n    let (msg_type, data_off, total_len) = {\n        let mut hdr = octets::Octets::with_slice(&buf[..10]);\n\n        let remaining = get_varint32(&mut hdr)? as usize;\n        let prefix_off = hdr.off();\n\n        let msg_type = get_varint32(&mut hdr)?;\n        let off = hdr.off();\n\n        (msg_type, off, prefix_off + remaining)\n    };\n\n    if msg_type == 0 || total_len == 0 || total_len > MAX_MESSAGE_SIZE || data_off > total_len {\n        return Err(ProtocolError::InvalidMessage);\n    } else if data_off > buf.len() || total_len > buf.len() {\n        return Err(ProtocolError::ShortBuffer(total_len));\n    }\n\n    let padded_len = total_len.max(10);\n    let msg = MessageType::decode(msg_type, padded_len, &buf[data_off..total_len])?;\n    Ok((msg, padded_len))\n}\n\n/// Writes a header-prefixed message to a byte slice, and returns the number\n/// of bytes used. Returns ProtocolError::ShortBuffer if the slice doesn't have\n/// enough capacity.\npub fn encode_message(msg: &MessageType, buf: &mut [u8]) -> Result<usize, ProtocolError> {\n    let msg_type = msg.message_type();\n    let msg_len =\n        u32::try_from(msg.encoded_len()).map_err(|_| ProtocolError::InvalidMessage)? as usize;\n\n    let header_len = encode_header(msg_type, msg_len, buf)?;\n    let total_len = header_len + msg_len;\n\n    let mut msg_buf = &mut buf[header_len..];\n    msg.encode(&mut msg_buf)?;\n\n    if total_len < 10 {\n        buf[total_len..].fill(0);\n        Ok(10)\n    } else {\n        Ok(total_len)\n    }\n}\n\nfn encode_header(msg_type: u32, msg_len: usize, buf: &mut [u8]) -> Result<usize, ProtocolError> {\n    let msg_type_len = octets::varint_len(msg_type as u64);\n    let prefix_len = octets::varint_len((msg_type_len + msg_len) as u64);\n    let total_len = prefix_len + msg_type_len + msg_len;\n\n    if total_len > MAX_MESSAGE_SIZE {\n        return Err(ProtocolError::InvalidMessage);\n    } else if total_len > buf.len() || buf.len() < 10 {\n        return Err(ProtocolError::ShortBuffer(std::cmp::max(total_len, 10)));\n    }\n\n    let off = {\n        let mut hdr = octets::OctetsMut::with_slice(buf);\n        hdr.put_varint((msg_type_len + msg_len) as u64).unwrap();\n        hdr.put_varint(msg_type as u64).unwrap();\n        hdr.off()\n    };\n\n    Ok(off)\n}\n\n// get_varint correctly handles u64 varints, but the protocol specifies u32.\nfn get_varint32(buf: &mut octets::Octets) -> Result<u32, ProtocolError> {\n    let x = match buf.get_varint() {\n        Ok(x) => x,\n        Err(_) => return Err(ProtocolError::InvalidMessage),\n    };\n\n    u32::try_from(x).map_err(|_| ProtocolError::InvalidMessage)\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    macro_rules! test_roundtrip {\n        ($name:ident : $value:expr) => {\n            #[test]\n            fn $name() {\n                let msg = $value.into();\n                let mut buf = [0; MAX_MESSAGE_SIZE];\n                let len = encode_message(&msg, &mut buf).unwrap();\n                let (decoded_msg, decoded_len) = decode_message(&buf).unwrap();\n                assert_eq!(msg, decoded_msg);\n                assert_eq!(len, decoded_len);\n            }\n        };\n    }\n\n    test_roundtrip!(test_roundtrip_detach: Detach {});\n\n    test_roundtrip!(test_roundtrip_error: Error {\n        err_code: 1,\n        error_text: \"test\".to_string(),\n    });\n\n    test_roundtrip!(test_roundtrip_smallframe: VideoChunk {\n        attachment_id: 0,\n        session_id: 1,\n        stream_seq: 1,\n        seq: 2,\n        chunk: 3,\n        num_chunks: 4,\n        data: bytes::Bytes::from(vec![9; 52]),\n        timestamp: 1234,\n        ..Default::default()\n    });\n\n    test_roundtrip!(test_roundtrip_frame: VideoChunk {\n        attachment_id: 0,\n        session_id: 1,\n        stream_seq: 1,\n        seq: 2,\n        chunk: 3,\n        num_chunks: 4,\n        data: bytes::Bytes::from(vec![9; 1200]),\n        timestamp: 1234,\n        hierarchical_layer: 0,\n        ..Default::default()\n    });\n\n    #[test]\n    fn invalid_message_type() {\n        let msg_type = 999;\n\n        let msg_buf = [100_u8; 322];\n        let msg_len = msg_buf.len();\n\n        // Create a fake message with a msg_type of 999.\n        let mut buf = [0; MAX_MESSAGE_SIZE];\n        let header_len =\n            encode_header(msg_type, msg_len, &mut buf).expect(\"failed to encode fake message\");\n        let total_len = header_len + msg_len;\n        buf[header_len..total_len].copy_from_slice(&msg_buf);\n\n        match decode_message(&buf) {\n            Err(ProtocolError::InvalidMessageType(t, len)) => {\n                assert_eq!(t, 999);\n                assert_eq!(len, total_len);\n            }\n            v => panic!(\"expected InvalidMessageType, got {:?}\", v),\n        }\n    }\n}\n"
  },
  {
    "path": "mm-protocol/src/messages.proto",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nsyntax = \"proto3\";\n\npackage messages;\n\n// # The Magic Mirror Streaming Protocol (MMSP) version 0.3.0\n//\n// This document describes a protocol for remote application streaming. Using\n// this protocol, a client can remotely launch and/or attach an application on a\n// server accessible only via network connection and display the output locally,\n// while sending input commands.\n//\n// ## Differences to traditional remote desktop protocols\n//\n// Individual sessions in the protocol concern applications, rather than desktop\n// environments. Although not part of the protocol itself, server\n// implementations are expected to render applications offscreen, rather than\n// displaying a central, shared desktop either in whole or part. This allows\n// multiple sessions to coexist without interfering with each other.\n//\n// Following from this, session parameters such as framerate and resolution are\n// set by the client, not the server.\n//\n// Note that nothing stops a session from containing an entire desktop\n// environment. The protocol makes no general distinction between desktop\n// environments and other applications, such as GPU-accelerated games. It does,\n// however, provide some built-in support for client-side cursor and\n// wireframe rendering, as well as clipboard operations, in order to improve the\n// remote desktop experience should the server and desktop support it. TODO: no\n// wireframe or clipboard support yet\n//\n// ## Protocol basics\n//\n// At a high level, the protocol consists of messages passed bidirectionally\n// over a QUIC connection.\n//\n// Servers and clients should use the `mm00` ALPN identifier. The number\n// will increase in future revisions of the protocol - see 'Protocol Versioning'\n// below.\n//\n// A message is an arbitrary-length byte blob, beginning with two unsigned\n// varints. The first varint holds the total length of the rest of the message,\n// including the bytes required to hold the next varint. The second is a message\n// type. The remaining bytes are a protobuf-encoded message matching the message\n// type and this specification.\n//\n//     [ 1-5(A) bytes: prefixed length (N) ]\n//       [ 1-5(B) bytes: message type ]\n//       [ N - B bytes, protobuf-encoded ]\n//     [ max(0, 10 - N - A) bytes, padding ]\n//\n// The total length of a message must not exceed 1MiB (1048576 bytes), and the\n// message type must fit in an unsigned 32-bit integer. Neither value may be\n// zero. Therefore, the minimum size for each varint is 1 byte, and the maximum\n// is 5 bytes. The length of the message itself may be zero bytes, if the\n// message type has no required fields. However, if a message would be less than\n// 10 bytes, it should be padded with zeroes to ten bytes before being written\n// to the stream. The length N should not include those bytes.\n//\n// The protocol generally uses a single QUIC bidirectional stream for each\n// session attachment, and describes in the documentation for each message type\n// which stream it should use. The protocol also optionally makes use of the\n// [QUIC DATAGRAM extension][quic_datagram] extension, in particular for video\n// and audio frames. In the case that a message is sent in a datagram frame, the\n// max length must not exceed QUIC's maximum datagram size.\n//\n// QUIC streams are, by their nature, ordered, so messages sent in sequence in a\n// stream may be considered ordered. However, client and server messages are not\n// considered ordered with respect to each other, and messages sent as datagrams\n// are not inherently considered ordered with respect to any stream messages.\n// Where ordering is important, the protocol includes hints for the client and\n// server.\n//\n// Unless specified in the documentation below, all message fields are optional,\n// and their absence (in the form of empty values) should be handled gracefully\n// by the receiver. A field marked as required may not be empty.\n//\n// ## Protocol Versioning\n//\n// The protocol follows Semantic Versioning rules, as defined by this document:\n// <https://semver.org/>\n//\n// As such, servers should support clients using the same major version of the\n// protocol, and vice versa, with the exception of major version 0 (the current\n// version), for which these rules are relaxed.\n//\n// Compatibility considers the QUIC features used, protobuf wire compatibility\n// (such as changes to field tags, but not field or message naming), and\n// required/optional semantics of messages as documented (in particular, adding\n// a field that is documented as required is a breaking change).\n//\n// # Message types\n//\n// ## Common types\n//\n// Some protobuf messages are reused in multiple messages below.\n\n// ### Timestamp\n//\n// Represents an instant independent of time zone or local calendar, represented\n// as the sum of seconds and nanoseconds since the UNIX epoch of January 1st,\n// 1970.\nmessage Timestamp {\n  int64 seconds = 1; // Required.\n  int64 nanos = 2;   // Required.\n}\n\n// ### Size, Extent\n//\n// A `Size` is a width, height tuple, mainly used to describe areas. An `Extent`\n// includes a starting position. The coordinate space depends on where these\n// structs are used, but should always be oriented with [0, 0] in the top left\n// position.\nmessage Size {\n  uint32 width = 1;  // Required.\n  uint32 height = 2; // Required.\n}\n\nmessage Extent {\n  uint32 x = 1;\n  uint32 y = 2;\n  uint32 width = 3;  // Required.\n  uint32 height = 4; // Required.\n}\n\n// ### Pixel Scale\n//\n// Represents a rational number, used in the context of HiDPI displays.\n// Fractions less than one are not allowed. For example, a pixel density of 1.5\n// would be represented as 3/2.\nmessage PixelScale {\n  uint32 numerator = 1;   // Required.\n  uint32 denominator = 2; // Required.\n}\n\n// ### Virtual Output Params\n//\n// Represents the configuration of a virtual display, which is required to\n// launch a session.\nmessage VirtualDisplayParameters {\n  Size resolution = 1;     // Required.\n  uint32 framerate_hz = 2; // Required.\n  PixelScale ui_scale = 3; // Required.\n}\n\n// ### Attachment type\n//\n// This refers to the manner of attachment.\nenum AttachmentType {\n  ATTACHMENT_TYPE_UNKNOWN = 0;\n  ATTACHMENT_TYPE_OPERATOR = 1;\n  ATTACHMENT_TYPE_VIEWER = 2;\n}\n\n// ### Video codec\n//\n// This refers to the codec used for a video stream.\nenum VideoCodec {\n  VIDEO_CODEC_UNKNOWN = 0;\n  VIDEO_CODEC_H264 = 1;\n  VIDEO_CODEC_H265 = 2;\n  VIDEO_CODEC_AV1 = 3;\n}\n\n// ### Video profile\n//\n// This refers to the profile used for a video stream. Profiles are fully\n// defined in the output section, below.\nenum VideoProfile {\n  VIDEO_PROFILE_UNKNOWN = 0;\n  VIDEO_PROFILE_HD = 1;\n  VIDEO_PROFILE_HDR10 = 2;\n}\n\n// ### Audio codec\n//\n// This refers to the codec used for an audio stream.\nenum AudioCodec {\n  AUDIO_CODEC_UNKNOWN = 0;\n  AUDIO_CODEC_OPUS = 1;\n}\n\n// ### Audio channels\n//\n// This defines a map of channels to speaker positions.\nmessage AudioChannels {\n  enum Channel {\n    CHANNEL_MONO = 0;\n    CHANNEL_FRONT_LEFT = 1;\n    CHANNEL_FRONT_RIGHT = 2;\n    CHANNEL_FRONT_CENTER = 3;\n    CHANNEL_REAR_CENTER = 4;\n    CHANNEL_REAR_LEFT = 5;\n    CHANNEL_REAR_RIGHT = 6;\n    CHANNEL_LFE = 7;\n    CHANNEL_FRONT_LEFT_OF_CENTER = 8;\n    CHANNEL_FRONT_RIGHT_OF_CENTER = 9;\n    CHANNEL_SIDE_LEFT = 10;\n    CHANNEL_SIDE_RIGHT = 11;\n  }\n\n  repeated Channel channels = 1;\n}\n\n// ### FEC Scheme\n//\n// Indicates a Forward Error Correction scheme used to protect packets,\n// and contains any data needed to locate a chunk within a packet.\n//\n// Definitions for the concepts can be found in RFCs 3452 and 5052.\nmessage FECMetadata {\n  enum FECScheme {\n    FEC_SCHEME_UNKNOWN = 0;\n\n    // Uses the RaptorQ scheme defined in RFC 6630.\n    FEC_SCHEME_RAPTORQ = 6;\n  }\n\n  // Required. Indicates the scheme used.\n  FECScheme fec_scheme = 1;\n\n  // Required. Contains the scheme-specific serialized Payload ID.\n  bytes fec_payload_id = 2;\n\n  // Required. Contains the scheme-specific serialized Object Transmission\n  // Information (OTI).\n  bytes fec_oti = 3;\n}\n\n\n\n// ### Gamepad\n//\n// A gamepad ID and metadata.\nmessage Gamepad {\n  enum GamepadLayout {\n    GAMEPAD_LAYOUT_UNKNOWN = 0;\n    GAMEPAD_LAYOUT_GENERIC_DUAL_STICK = 1;\n    GAMEPAD_LAYOUT_SONY_DUALSHOCK = 2;\n  }\n\n  uint64 id = 1;            // Required.\n  GamepadLayout layout = 2; // Required.\n}\n\n// ### Application Image Format\n//\n// Distinguishes between different images associated with an application.\nenum ApplicationImageFormat {\n  APPLICATION_IMAGE_FORMAT_UNKNOWN = 0;\n  // A roughly 400x200 image for display in a list of applications.\n  APPLICATION_IMAGE_FORMAT_HEADER = 1;\n}\n\n// ## Errors and exceptions\n\n// ### 001 - Error\n//\n// This message may be sent by a server or client at any time on any stream.\nmessage Error {\n  enum ErrorCode {\n    ERROR_UNKNOWN = 0;\n    // Used to indicate an unrecoverable error on the server.\n    ERROR_SERVER = 10;\n    // Used to indicate a protocol violation.\n    ERROR_PROTOCOL = 20;\n    ERROR_PROTOCOL_UNEXPECTED_MESSAGE = 21;\n    ERROR_PROTOCOL_INCORRECT_STREAM = 22;\n    ERROR_PROTOCOL_UNKNOWN_MESSAGE_TYPE = 23;\n    ERROR_TIMEOUT = 24;\n    ERROR_APPLICATION_NOT_FOUND = 25;\n    ERROR_APPLICATION_NO_IMAGE = 26;\n    // Used to indicate that the server refuses to launch a session.\n    ERROR_SESSION_LAUNCH_FAILED = 30;\n    ERROR_SESSION_LAUNCH_REFUSED = 31;\n    // Used to indicate the session update couldn't be applied.\n    ERROR_SESSION_UPDATE_FAILED = 32;\n    // Used to indicate that the server refuses to allow the client to attach\n    // to the session.\n    ERROR_ATTACHMENT_REFUSED = 40;\n    ERROR_ATTACHMENT_PARAMS_NOT_SUPPORTED = 41;\n    // Used to indicate that the session has ended.\n    ERROR_SESSION_ENDED = 50;\n    ERROR_SESSION_ENDED_BY_CLIENT = 51;\n    ERROR_SESSION_ENDED_APPLICATION_EXIT = 52;\n    // Used for several session operations.\n    ERROR_SESSION_NOT_FOUND = 60;\n    ERROR_SESSION_INVALID_STATE = 61;\n    ERROR_SESSION_PARAMS_NOT_SUPPORTED = 62;\n    // Used to indicate a failed authentication attempt or ignored challenge.\n    ERROR_AUTHENTICATION_FAILED = 100;\n    // Used to indicate missing or insufficient credentials on another request.\n    ERROR_NOT_ALLOWED = 101;\n  }\n\n  ErrorCode err_code = 1; // Required.\n  string error_text = 3;\n}\n\n// ## Sessions and attachments\n//\n// A session represents a running application on the server. Creating a session\n// launches the application in the background. After the client *attaches* to\n// the session, then and only then must the server start sending video and audio\n// frames. These frames may either be on the attachment stream or sent\n// separately as QUIC datagrams.\n//\n// If supported by the server and application, sessions may have multiple\n// attachments, grouped into \"operators\" and \"viewers\".\n//\n// ### Render vs. streaming resolution\n//\n// Sessions are defined by a render resolution (with framerate and scale,\n// collectively referred to as the virtual display parameters), while individual\n// attachments are defined by a streaming resolution. The former results in the\n// resolution of the output texture the application renders to, while the latter\n// refers to the dimensions of the compressed video stream.\n//\n// Servers must support streaming at the exact render resolution, but they may\n// also optionally support different render and streaming resolutions. The most\n// common use case for this would be to render at a \"super resolution\", ie an\n// integer multiple of the streaming resolution, to improve quality in\n// environments with limited bandwidth, or to support \"preview\" attachments\n// which stream at a very low resolution.\n//\n// Servers must either obey the requested render resolution or reject the\n// corresponding `013 - Launch Session` or `015 - Update Session` message with\n// an error. Similarly, servers must either obey the requested streaming\n// resolution or reject the corresponding `030 - Attach` message.\n//\n// Servers must always emit encoded frames at the virtual display framerate.\n\n// ### Resolution changes\n//\n// Servers may choose to update the render resolution of a session at any time,\n// for example at the request of a client, or in the case that an app requests a\n// new resolution. Servers must inform existing attachments of the new\n// resolution using the `033 - Session Parameters Changed` message.\n// Additionally, if the streaming resolution of existing attachments is no\n// longer compatible with the new resolution, the server may indicate that in\n// the message.\n//\n// ### HiDPI passthrough\n//\n// Clients on screens with a pixel density higher than one may inform the server\n// at session creation time, or request a change to an existing session with\n// `015 - Update Session`. In any case, the render resolution specified is still\n// the final resolution, not the \"logical\" resolution. For example, a client\n// requesting a `render_resolution` of 2560x1600 with a UI scale of 2 would\n// still result in a render resolution of 2560x1600; the UI scale should be\n// passed as a hint to the application in whatever platform-specific way makes\n// sense. This is important because many applications are able to automatically\n// scale UI elements or make other user-experience improvements subject to UI\n// scale.\n//\n// ### Quality preset\n//\n// Clients can use the `quality_preset` field of the `030 - Attach` message to\n// tune the quality of the stream, which is inversely related to the bandwidth\n// usage. The value ranges from 1 to 10, with 1 indicating that the client\n// wishes the server to optimize for the the lowest possible bandwidth usage,\n// and 10 indicating that the client wishes the server to optimize for the\n// highest possible quality. How these values are interpreted is determined by\n// the server.\n//\n// ### Concurrent attachments\n//\n// Servers may support multiple concurrent attachments from different clients,\n// for example to support secondary \"viewer\" attachments. If the parameters of\n// the attachments differ, the server may choose to encode multiple streams at\n// different resolutions, or it may simply choose one (the operator's attachment\n// parameters should take precedence) and use that for all attachments.\n\n// TODO: attachments should probably be distinct for audio and video, so that\n// reattaching doesn't cause audio to skip\n\n// ### 011 - List Applications\n//\n// This message, which must originate from the client on a new stream, requests\n// a list of available applications to launch as sessions. The server must\n// either respond with an `012 - Application List` message or an `001 - Error`\n// message on the same stream.\nmessage ListApplications {}\n\n// ### 012 - Application List\n//\n// This message, which must originate from the server on the same stream as a\n// corresponding `011 - List Applications` message, indicates the list of\n// available applications to launch as sessions.\nmessage ApplicationList {\n  message Application {\n    string id = 1; // Required. Must be unique.\n    string description = 2;\n\n    // A list of path components, used to group applications for display.\n    repeated string folder = 3;\n\n    // If set, the image can be fetched with a `021 - Fetch Application Image`\n    // message.\n    repeated ApplicationImageFormat images_available = 4;\n  }\n\n  repeated Application list = 1;\n}\n\n// ### 013 - Launch Session\n//\n// This message, which must originate from the client on a new stream, requests\n// that the server launch the application specified by `id`. The id should match\n// the id of an application returned by `012 - Application List`.\n//\n// The server must either launch a session, replying with `014 - Session\n// Launched` once the session has started and is available to attach, or send an\n// `001 - Error` message on the same stream indicating why it refuses to do so.\nmessage LaunchSession {\n  // Required; must match the id of an application returned in \"12 - Application\n  // List\".\n  string application_id = 1;\n\n  VirtualDisplayParameters display_params = 10; // Required.\n\n  // Any gamepads that should be available at the start of the session. This is\n  // sometimes important for applications that don't correctly support\n  // hotplugged devices.\n  //\n  // These gamepads should be considered permanently connected, and\n  // GamepadUnavailable events should be ignored for them.\n  repeated Gamepad permanent_gamepads = 20;\n}\n\n// ### 014 - Session Launched\n//\n// This message, which must originate from the server on the same stream as the\n// corresponding `013 - Launch Session` message, indicates that the session has\n// successfully launched and may be attached.\nmessage SessionLaunched {\n  uint64 id = 1; // Required.\n\n  // Required. Must include at least the `render_resolution` specified in the\n  // corresponding `013 - Launch Session` message.\n  repeated Size supported_streaming_resolutions = 10;\n\n  // TODO supported_sample_rate?\n}\n\n// ### 015 - Update Session\n//\n// This message, which must originate from the client on a new stream, requests\n// that the server update the parameters of a running session. An ommitted value\n// indicates that the existing setting should remain. The server must respond\n// with either `016 - Session Updated` or `001 - Error` on the same stream.\nmessage UpdateSession {\n  uint64 session_id = 1; // Required.\n\n  VirtualDisplayParameters display_params = 10;\n}\n\n// ### 016 - Session Updated\n//\n// This message, which must originate from the server on the same stream as the\n// corresponding `015 - Update Session` message, indicates that the requested\n// update was successfully applied.\nmessage SessionUpdated {}\n\n// ### 017 - List Sessions\n//\n// This message, which must originate from the client on a new stream, requests\n// a list of attachable sessions. The server must respond with either `018 -\n// Session List` or an `001 - Error` on the same stream.\nmessage ListSessions {}\n\n// ### 018 - Session List\n//\n// This message, which must originate from the server on the same stream as the\n// corresponding `017 - List Sessions` request, indicates a list of attachable\n// sessions to the client.\nmessage SessionList {\n  message Session {\n    uint64 session_id = 1;       // Required.\n    string application_id = 2;   // Required.\n    Timestamp session_start = 3; // Required.\n\n    VirtualDisplayParameters display_params = 10; // Required.\n\n    // Required. Must include at least the `render_resolution` of the session.\n    repeated Size supported_streaming_resolutions = 13;\n\n    // Required if any were set in the original `013 - Launch Session` event.\n    repeated Gamepad permanent_gamepads = 20;\n\n    // TODO attachable type?\n    // TODO existing attachments?\n  }\n\n  repeated Session list = 1;\n}\n\n// ### 019 - End Session\n//\n// This message, which must originate from the client on a new stream, requests\n// that the server end the named session and detach all clients.\n//\n// If a server chooses to comply, it should send `001 - Error` messages to all\n// other attached clients (with ERR_SESSION_ENDED_BY_CLIENT), and an `020 -\n// Session Ended` message on this stream. Otherwise, it should send an `001 -\n// Error` message on this stream.\nmessage EndSession {\n  uint64 session_id = 1; // Required.\n}\n\n// ### 020 - Session Ended.\n//\n// This message, which must originate from the server on the same stream as the\n// corresponding `019 - End Session` message, confirms that the session has been\n// ended.\nmessage SessionEnded {}\n\n// ### 021 - Fetch Application Image\n//\n// This message, which must originate from the client on a new stream, requests\n// image metadata for an application. The Server must respond with either an\n// `022 - Application Image` message or an `001 - Error` message on the same\n// stream.\nmessage FetchApplicationImage {\n  string application_id = 1;  // Required.\n  ApplicationImageFormat format = 2; // Required.\n}\n\n// ### 022 - Application Image\n//\n// This message, which must originate from the server on the same stream as the\n// corresponding `021 - Fetch Application Image` message, sends the requested\n// image data to the client.\nmessage ApplicationImage {\n  // Required. Must be a complete PNG file and less than 1048576 bytes. Either\n  // restriction may be lifted or in the future.\n  bytes image_data = 1;\n}\n\n// ### 030 - Attach\n//\n// This message, which must originate from the client on a new stream, requests\n// that the server attach the client to the named session. Upon receipt of this\n// request, the server must either refuse the attachment with an `001 - Error`\n// message, or send an `031 - Attached` message on the same stream and start\n// sending video and audio packets to the client.\n//\n// Ommitted fields indicate that the server should choose the parameters.\n//\n// The server may choose to reject the attachment for any reason, including but\n// not limited to:\n//\n//  - The output parameters, such as resolution or codec, are invalid or not\n//    supported.\n//  - The server already has a client attached to that session, and wishes to\n//    limit the number of attachments (or doesn't support multiple attachments).\n//  - The authentication so far provided doesn't grant the client access to that\n//    session with that attachment type.\nmessage Attach {\n  uint64 session_id = 1;              // Required.\n  AttachmentType attachment_type = 2; // Required.\n  string client_name = 3;\n\n  VideoCodec video_codec = 10;\n  Size streaming_resolution = 11;\n  VideoProfile video_profile = 12;\n  uint32 quality_preset = 13; // Must be in the range 1-10.\n\n  AudioCodec audio_codec = 15;\n  AudioChannels channels = 16;\n  uint32 sample_rate_hz = 17;\n}\n\n// ### 031 - Attached\n//\n// This message, which must originate from the server on the same stream as the\n// original `030 - Attach` message, indicates that the\n// server accepts the client and will begin streaming with the client's\n// requested parameters. The parameters must match the parameters sent in the\n// original `030 - Attach` message, or represent the server-chosen default if\n// they were ommitted.\nmessage Attached {\n  uint64 session_id = 1;    // Required.\n  uint64 attachment_id = 2; // Required.\n\n  VideoCodec video_codec = 10;     // Required.\n  Size streaming_resolution = 11;  // Required.\n  VideoProfile video_profile = 12; // Required.\n  uint32 quality_preset = 13;      // Required.\n\n  AudioCodec audio_codec = 15; // Required.\n  AudioChannels channels = 16; // Required.\n  uint32 sample_rate_hz = 17;  // Required.\n}\n\n// ### 032 - Keep Alive\n//\n// This message, which must originate from the client on the stream where the\n// original `030 - Attach` message was sent, indicates that the client\n// is still attached. The server may take the absence of a regular `Keep Alive`\n// message to indicate that the client has gone away should be considered\n// detached.\nmessage KeepAlive {}\n\n// ### 033 - Session Parameters Changed\n//\n// This message, which must originate from the server on the same stream as the\n// original `030 - Attach` message, indicates that the parameters of the\n// attached session have changed. If `reattach_required` is set to true, the\n// client should consider the attachment to be ended and reattach with new\n// parameters.\nmessage SessionParametersChanged {\n  bool reattach_required = 1;\n\n  VirtualDisplayParameters display_params = 10;\n\n  // Required. Must include at least the `render_resolution` of the session.\n  repeated Size supported_streaming_resolutions = 13;\n}\n\n// ### 035 - Detach\n//\n// This message, which must originate from the client on the stream where the\n// original `030 - Attach` message was sent, indicates that the client\n// wishes to detach and end streaming. Upon receipt of this message, the server\n// must stop streaming frames or accepting input on the attachment stream.\nmessage Detach {}\n\n// ## Output\n//\n// This section pertains to the application output, streamed from server to\n// client.\n//\n// Output packets, whether audio or video, are always part of a session, an\n// attachment, and a stream. A session may have multiple attachments, and an\n// attachment may periodically restart its audio or video stream, resulting in a\n// new stream. As packets may be too large to send in one datagram, they may be\n// chunked by the server. Therefore, a fourth identifier, a packet sequence\n// number, is used to group chunks in a sequence of potentially unordered\n// datagrams.\n//\n// All four identifiers (session, attachment, stream, and packet) should be\n// considered opaque to the client. However, the stream and packet sequence\n// numbers should only increase monotonically as new packets and new streams are\n// created. See the section below for more detail.\n//\n// The contents of each packet are opaque, and depend on the codec being used.\n//\n// Servers should only send packets for one video and one audio stream for one\n// attachment at a time.\n//\n// ### Datagram support\n//\n// If both server and client support the QUIC Datagram extension (RFC 9221),\n// then output packets should be sent as datagrams. If either client or server\n// do not support datagrams, the chunks must be sent on the same stream as the\n// original `030 - Attach` message was sent.\n//\n// Since datagrams are not associated with any particular QUIC stream, the\n// `session_id` and `attachment_id` fields of the below messages may be\n// necessary to disambiguate received chunks. However, to reduce overhead, a\n// server may omit both fields if sending chunks on the original attachment\n// stream, rather than as datagrams.\n//\n// ### Multiple attachments\n//\n// To determine video stream parameters in the case of multiple concurrent\n// attachments to the same session, operator streams should take precedence.\n//\n// ### Video compression\n//\n// The following apply to all supported video codecs:\n//\n//  - The server must tag the video bitstream with resolution, framerate, and\n//    YCbCr color space/range using whatever mechanism is supported by the codec\n//    (for example, PPS/VUI frames in H.264). Clients should use this\n//    information to verify that the parameters match the requested attachment\n//    parameters.\n//  - The server must use YCbCr 4:2:0 chroma subsampling for the compressed\n//    stream (this is sometimes called YUV420P, and is the default for most\n//    implementations of H264, H265, and AV1).\n//  - For VIDEO_PROFILE_HD, a bit depth of 8, along with the Rec.709 color space\n//    and limited range must be used. For H.264, H.265, and AV1, this\n//    corresponds to `colour_primaries`, `transfer_characteristics`, and\n//    `matrix_coeffs` all equal to 1, and the `video_full_range_flag` set to 0\n//    (named `color_range` for AV1).\n//  - For VIDEO_PROFILE_HDR10, a bit depth of 10, along with the Rec. 2100 color\n//    space and limited range must be used. For H.264, H.265, and AV1, this\n//    corresponds to `colour_primaries` and `matrix_coeffs` equal to 9,\n//    `transfer_characteristics` equal to 16, and the `video_full_range_flag`\n//    set to 0 (named `color_range` for AV1). The server should additionally use\n//    SEI headers (or metadata OBUs for AV1) to communicate HDR metadata such as\n//    mastering display color volume (MDCV) and content light level (CLL)\n//    information.\n//  - The server may reuse an existing compression context for a new attachment,\n//    but in this case the stream must be resumable by the client within a\n//    reasonable time frame. For H.265, for example, this means sending headers\n//    with every keyframe, and a keyframe immediately after the attachment\n//    begins.\n//\n// ### Audio compression\n//\n// The following apply to all supported audio codecs:\n//\n// - The server must use a 10ms or smaller packet size.\n// - Audio streams must use a sample rate of between 16kHz and 48kHz.\n\n// ### 051 - Video Chunk\n//\n// This message, which must originate from the server as a datagram or on the\n// same stream as the original `030 - Attach` message, contains a part of a\n// video packet.\n//\n// Much of the metadata associated with the chunk applies to all chunks of a\n// given packet, but must be repeated on each chunk such that a client\n// receiving the first (potentially out-of-order) chunk has sufficient\n// information to deal with it.\nmessage VideoChunk {\n  // Required unless sent on the same stream as the original attach message.\n  uint64 session_id = 1;\n  uint64 attachment_id = 2;\n\n  // Required. Represents the ordering of packets in a stream and the\n  // association of packets to a video stream.\n  uint64 stream_seq = 10;\n  uint64 seq = 11;\n\n  // Required unless an FEC scheme is used. Taken together, these represent the\n  // placement of a chunk within a packet.\n  //\n  // If `fec_metadata` is set, both these fields must be unset.\n  uint32 chunk = 12;\n  uint32 num_chunks = 13;\n\n  // If the encoder is using hierarchical coding (sometimes called SVC), this\n  // field indicates the layer that the packet belongs to.\n  uint32 hierarchical_layer  = 16;\n\n  // Contains FEC metadata to locate the chunk within the overall packet.\n  FECMetadata fec_metadata = 15;\n\n  // Required. A millisecond timestamp with an arbitrary epoch, used to\n  // synchronize audio and video streams.\n  uint64 timestamp = 20;\n\n  // Required. The chunk of the video packet, or, if an FEC scheme is used, a\n  // single symbol from the stream of symbols.\n  bytes data = 99;\n}\n\n// ### 052 - Request Video Refresh\n//\n// This message, which must be sent by the client on the same stream as the\n// original `030 - Attach` message, requests that the server perform an IDR\n// refresh in the current video stream as soon as possible. The server should\n// send stream headers and a full keyframe as soon as possible, unless the\n// indicated stream is ended or restarted.\nmessage RequestVideoRefresh {\n  uint64 stream_seq = 1;\n}\n\n// ### 056 - Audio Chunk\n//\n// This message, which must originate from the server as a datagram or on the\n// same stream as the original `030 - Attach` message, contains a part of an\n// audio packet.\nmessage AudioChunk {\n  // Required unless sent on the same stream as the original attach message.\n  uint64 session_id = 1;\n  uint64 attachment_id = 2;\n\n  // Required. Represents the ordering of packets in a stream and the\n  // association of packets to an audio stream.\n  uint64 stream_seq = 10;\n  uint64 seq = 11;\n\n  // Required unless an FEC scheme is used. Taken together, these represent the\n  // placement of a chunk within a packet.\n  //\n  // If `fec_metadata` is set, both these fields must be unset.\n  uint32 chunk = 12;\n  uint32 num_chunks = 13;\n\n  // Contains FEC metadata to locate the chunk within the overall packet.\n  FECMetadata fec_metadata = 15;\n\n  // Required. A millisecond timestamp with an arbitrary epoch, used to\n  // synchronize audio and video streams.\n  uint64 timestamp = 20;\n\n  // Required. The chunk of the audio packet, or, if an FEC scheme is used, a\n  // single symbol from the stream of symbols.\n  bytes data = 99;\n}\n\n// ## Input\n//\n// Input messages are used by the client to indicate user interaction, whether\n// it be via a keyboard, mouse, gamepad, or some other input. Input is always\n// scoped to an attachment and sent on the attachment stream.\n//\n// ### Relative vs absolute cursor motion\n//\n// Clients are responsible for sending both absolute and relative pointer motion\n// events. The two event types are unrelated and do not compound; the former\n// represents the visible location of the cursor, while the latter represents\n// raw motion vectors from the device.\n//\n// Absolute motion is indicated by `063 - Pointer Motion` messages, while\n// relative motion is indicated by `069 - Relative Pointer Motion` messages.\n//\n// Absolute motion events are always necessary. Clients may choose to send\n// relative motion events only when the cursor is locked by a `067 - Lock\n// Pointer` event, until the cursor is released by a corresponding `068 -\n// Release Pointer` event.\n\n// ### 060 - Keyboard Input\n//\n// This message, which must originate from the client on the same stream as the\n// original `030 - Attach` message, represents keyboard input from the user.\nmessage KeyboardInput {\n  enum KeyState {\n    KEY_STATE_UNKNOWN = 0;\n    KEY_STATE_PRESSED = 1;\n    KEY_STATE_REPEAT = 2;\n    KEY_STATE_RELEASED = 3;\n  }\n\n  // These map to the keycodes from the W3C \"UI Events\" specification. It\n  // represents the key location, irrespective of keyboard layout or character\n  // output.\n  //\n  // Media and remote control keys are omitted.\n  //\n  // https://w3c.github.io/uievents-code/#code-value-tables\n  enum Key {\n    KEY_UNKNOWN = 0;\n    KEY_BACKQUOTE = 1;\n    KEY_BACKSLASH = 2;\n    KEY_BRACKET_LEFT = 3;\n    KEY_BRACKET_RIGHT = 4;\n    KEY_COMMA = 5;\n    KEY_DIGIT_0 = 10;\n    KEY_DIGIT_1 = 11;\n    KEY_DIGIT_2 = 12;\n    KEY_DIGIT_3 = 13;\n    KEY_DIGIT_4 = 14;\n    KEY_DIGIT_5 = 15;\n    KEY_DIGIT_6 = 16;\n    KEY_DIGIT_7 = 17;\n    KEY_DIGIT_8 = 18;\n    KEY_DIGIT_9 = 19;\n    KEY_EQUAL = 20;\n    KEY_INTL_BACKSLASH = 21;\n    KEY_INTL_RO = 22;\n    KEY_INTL_YEN = 23;\n    KEY_A = 30;\n    KEY_B = 31;\n    KEY_C = 32;\n    KEY_D = 33;\n    KEY_E = 34;\n    KEY_F = 35;\n    KEY_G = 36;\n    KEY_H = 37;\n    KEY_I = 38;\n    KEY_J = 39;\n    KEY_K = 40;\n    KEY_L = 41;\n    KEY_M = 42;\n    KEY_N = 43;\n    KEY_O = 44;\n    KEY_P = 45;\n    KEY_Q = 46;\n    KEY_R = 47;\n    KEY_S = 48;\n    KEY_T = 49;\n    KEY_U = 50;\n    KEY_V = 51;\n    KEY_W = 52;\n    KEY_X = 53;\n    KEY_Y = 54;\n    KEY_Z = 55;\n    KEY_MINUS = 60;\n    KEY_PERIOD = 61;\n    KEY_QUOTE = 62;\n    KEY_SEMICOLON = 63;\n    KEY_SLASH = 64;\n    KEY_ALT_LEFT = 65;\n    KEY_ALT_RIGHT = 66;\n    KEY_BACKSPACE = 67;\n    KEY_CAPS_LOCK = 68;\n    KEY_CONTEXT_MENU = 69;\n    KEY_CONTROL_LEFT = 70;\n    KEY_CONTROL_RIGHT = 71;\n    KEY_ENTER = 72;\n    KEY_META_LEFT = 73;\n    KEY_META_RIGHT = 74;\n    KEY_SHIFT_LEFT = 75;\n    KEY_SHIFT_RIGHT = 76;\n    KEY_SPACE = 77;\n    KEY_TAB = 78;\n    KEY_CONVERT = 79;\n    KEY_KANA_MODE = 80;\n    KEY_LANG_1 = 81;\n    KEY_LANG_2 = 82;\n    KEY_LANG_3 = 83;\n    KEY_LANG_4 = 84;\n    KEY_LANG_5 = 85;\n    KEY_NON_CONVERT = 86;\n    KEY_DELETE = 87;\n    KEY_END = 88;\n    KEY_HELP = 89;\n    KEY_HOME = 90;\n    KEY_INSERT = 91;\n    KEY_PAGE_DOWN = 92;\n    KEY_PAGE_UP = 93;\n    KEY_ARROW_DOWN = 94;\n    KEY_ARROW_LEFT = 95;\n    KEY_ARROW_RIGHT = 96;\n    KEY_ARROW_UP = 97;\n    KEY_NUM_LOCK = 100;\n    KEY_NUMPAD_0 = 101;\n    KEY_NUMPAD_1 = 102;\n    KEY_NUMPAD_2 = 103;\n    KEY_NUMPAD_3 = 104;\n    KEY_NUMPAD_4 = 105;\n    KEY_NUMPAD_5 = 106;\n    KEY_NUMPAD_6 = 107;\n    KEY_NUMPAD_7 = 108;\n    KEY_NUMPAD_8 = 109;\n    KEY_NUMPAD_9 = 110;\n    KEY_NUMPAD_ADD = 111;\n    KEY_NUMPAD_BACKSPACE = 112;\n    KEY_NUMPAD_CLEAR = 113;\n    KEY_NUMPAD_CLEAR_ENTRY = 114;\n    KEY_NUMPAD_COMMA = 115;\n    KEY_NUMPAD_DECIMAL = 116;\n    KEY_NUMPAD_DIVIDE = 117;\n    KEY_NUMPAD_ENTER = 118;\n    KEY_NUMPAD_EQUAL = 119;\n    KEY_NUMPAD_HASH = 120;\n    KEY_NUMPAD_MEMORY_ADD = 121;\n    KEY_NUMPAD_MEMORY_CLEAR = 122;\n    KEY_NUMPAD_MEMORY_RECALL = 123;\n    KEY_NUMPAD_MEMORY_STORE = 124;\n    KEY_NUMPAD_MEMORY_SUBTRACT = 125;\n    KEY_NUMPAD_MULTIPLY = 126;\n    KEY_NUMPAD_PAREN_LEFT = 127;\n    KEY_NUMPAD_PAREN_RIGHT = 128;\n    KEY_NUMPAD_SUBTRACT = 129;\n    KEY_ESCAPE = 200;\n    KEY_F1 = 201;\n    KEY_F2 = 202;\n    KEY_F3 = 203;\n    KEY_F4 = 204;\n    KEY_F5 = 205;\n    KEY_F6 = 206;\n    KEY_F7 = 207;\n    KEY_F8 = 208;\n    KEY_F9 = 209;\n    KEY_F10 = 210;\n    KEY_F11 = 211;\n    KEY_F12 = 212;\n    KEY_FN = 213;\n    KEY_FN_LOCK = 214;\n    KEY_PRINT_SCREEN = 215;\n    KEY_SCROLL_LOCK = 216;\n    KEY_PAUSE = 217;\n    KEY_HIRAGANA = 218;\n    KEY_KATAKANA = 219;\n  }\n\n  Key key = 1;        // Required. The physical key that was pressed.\n  KeyState state = 2; // Required.\n\n  // A unicode code point for text input, required unless the keypress would\n  // not result in a character.\n  //\n  // This may be completely unrelated to the physical key, depending on the\n  // software keyboard layout on the client side.\n  uint32 char = 3;\n}\n\n// ### 061 - Pointer Entered\n//\n// This message, which must be sent by the client on the same stream as the\n// original `030 - Attach` message, indicates that the Pointer has entered\n// the window area.\nmessage PointerEntered {}\n\n// ### 062 - Pointer Left\n//\n// This message, which must be sent by the client on the same stream as the\n// original `030 - Attach` message, indicates that the Pointer has left the\n// window area.\nmessage PointerLeft {}\n\n// ### 063 - Pointer Motion\n//\n// This message, which must be sent by the client on the same stream as the\n// original `030 - Attach` message, indicates that the Pointer has moved to a\n// new position.\n//\n// The coordinates should be in the space defined by the `streaming_resolution`\n// field of the `030 - Attach` message.\nmessage PointerMotion {\n  double x = 1; // Required.\n  double y = 2; // Required.\n}\n\n\n// ### 064 - Pointer Input\n//\n// This message, which must be sent by the client on the same stream as the\n// original `030 - Attach` message, indicates a Pointer button event.\nmessage PointerInput {\n  enum ButtonState {\n    BUTTON_STATE_UNKNOWN = 0;\n    BUTTON_STATE_PRESSED = 1;\n    BUTTON_STATE_RELEASED = 2;\n  }\n\n  enum Button {\n    BUTTON_UNKNOWN = 0;\n    BUTTON_LEFT = 1;\n    BUTTON_MIDDLE = 2;\n    BUTTON_RIGHT = 3;\n    BUTTON_BACK = 4;\n    BUTTON_FORWARD = 5;\n  }\n\n  Button button = 1;     // Required.\n  ButtonState state = 2; // Required.\n  double x = 3;          // Required.\n  double y = 4;          // Required.\n}\n\n// ### 065 - Pointer Scroll\n//\n// This message, which must be sent by the client on the same stream as the\n// original `030 - Attach` message, indicates that the user has scrolled,\n// either using the mouse wheel, a touchpad, or some other mechanism.\n//\n// The scroll_type determines how the values of x and y are interpreted.\n// `CONTINUOUS` indicates a vector in pixels, in the coordinate space defined\n// by the `resolution` parameter of the `VirtualDisplayParams` set on the\n// session. Discrete indicates individual steps, for example on a clicky\n// scroll wheel.\n//\n// In both cases, positive values indicate that the scrolled content should\n// move right and down, revealing more content to the top and left.\nmessage PointerScroll {\n  enum ScrollType {\n    SCROLL_TYPE_UNKNOWN = 0;\n    SCROLL_TYPE_CONTINUOUS = 1;\n    SCROLL_TYPE_DISCRETE = 2;\n  }\n\n  double x = 1;\n  double y = 2;\n  ScrollType scroll_type = 3;\n}\n\n// ### 066 - Update Cursor\n//\n// This message, which must be sent by the server on the same stream as the\n// original `030 - Attach` message, indicates that the cursor image has changed\n// and the client should use the new one when the cursor is over the window.\nmessage UpdateCursor {\n  // Corresponds to the W3C UI specification.\n  //\n  // https://www.w3.org/TR/css-ui-3/#cursor\n  enum CursorIcon {\n    CURSOR_ICON_UNKNOWN = 0;\n    CURSOR_ICON_AUTO = 1;\n    CURSOR_ICON_DEFAULT = 2;\n    CURSOR_ICON_NONE = 3;\n\n    CURSOR_ICON_CONTEXT_MENU = 4;\n    CURSOR_ICON_HELP = 5;\n    CURSOR_ICON_POINTER = 6;\n    CURSOR_ICON_PROGRESS = 7;\n    CURSOR_ICON_WAIT = 8;\n\n    CURSOR_ICON_CELL = 9;\n    CURSOR_ICON_CROSSHAIR = 10;\n    CURSOR_ICON_TEXT = 11;\n    CURSOR_ICON_VERTICAL_TEXT = 12;\n\n    CURSOR_ICON_ALIAS = 13;\n    CURSOR_ICON_COPY = 14;\n    CURSOR_ICON_MOVE = 15;\n    CURSOR_ICON_NO_DROP = 16;\n    CURSOR_ICON_NOT_ALLOWED = 17;\n    CURSOR_ICON_GRAB = 18;\n    CURSOR_ICON_GRABBING = 19;\n\n    CURSOR_ICON_E_RESIZE = 20;\n    CURSOR_ICON_N_RESIZE = 21;\n    CURSOR_ICON_NE_RESIZE = 22;\n    CURSOR_ICON_NW_RESIZE = 23;\n    CURSOR_ICON_S_RESIZE = 24;\n    CURSOR_ICON_SE_RESIZE = 25;\n    CURSOR_ICON_SW_RESIZE = 26;\n    CURSOR_ICON_W_RESIZE = 27;\n    CURSOR_ICON_EW_RESIZE = 28;\n    CURSOR_ICON_NS_RESIZE = 29;\n    CURSOR_ICON_NESW_RESIZE = 30;\n    CURSOR_ICON_NWSE_RESIZE = 31;\n    CURSOR_ICON_COL_RESIZE = 32;\n    CURSOR_ICON_ROW_RESIZE = 33;\n    CURSOR_ICON_ALL_SCROLL = 34;\n\n    CURSOR_ICON_ZOOM_IN = 35;\n    CURSOR_ICON_ZOOM_OUT = 36;\n  }\n\n  // Required.\n  CursorIcon icon = 1;\n\n  // The cursor image, encoded as a PNG file. If set, the client should use\n  // this and use the icon field solely as a fallback.\n  bytes image = 2;\n\n  // Relates to the image, if set.\n  uint32 hotspot_x = 3;\n  uint32 hotspot_y = 4;\n}\n\n// ### 067 - Lock Pointer\n//\n// This message, which must originate from the server on the same stream as the\n// original `030 - Attach` message, indicates that the pointer should be locked\n// to the given location.\n//\n// The coordinates should be in the space defined by the `streaming_resolution`\n// field of the `030 - Attach` message.\nmessage LockPointer {\n  double x = 1;\n  double y = 2;\n}\n\n// ### 068 - Release Pointer\n//\n// This message, which must originate from the server on the same stream as the\n// original `030 - Attach` message, indicates the pointer should be no longer\n// be locked.\nmessage ReleasePointer {}\n\n// ### 069 - Relative Pointer Motion\n//\n// This message, which must originate from the client on the same stream as the\n// original `030 - Attach` message, indicates that the Pointer has moved.\n//\n// The vector should be in the space defined by the `streaming_resolution`\n// field of the `030 - Attach` message.\nmessage RelativePointerMotion {\n  double x = 1; // Required.\n  double y = 2; // Required.\n}\n\n// ### 070 - Gamepad Available\n//\n// This message, which must originate from the client on the same stream as the\n// original `030 - Attach` message, indicates that a gamepad is available\n// on the client.\nmessage GamepadAvailable {\n  // Required. The ID should remain stable if the gamepad is unplugged and\n  // replugged.\n  Gamepad gamepad = 1;\n}\n\n// ### 071 - Gamepad Unavailable\n//\n// This message, which must originate from the client on the same stream as the\n// original `030 - Attach` message, indicates that a gamepad is no longer\n// available, for example because it was unplugged.\nmessage GamepadUnavailable {\n  uint64 id = 1; // Required.\n}\n\n// ### 072 - Gamepad Motion\n//\n// This message, which must originate from the client on the same stream as the\n// original `030 - Attach` message, indicates movement on a joystick or trigger.\nmessage GamepadMotion {\n  enum GamepadAxis {\n    GAMEPAD_AXIS_UNKNOWN = 0;\n\n    // The left and right joysticks on a standard two-stick gamepad.\n    GAMEPAD_AXIS_LEFT_X = 1;\n    GAMEPAD_AXIS_LEFT_Y = 2;\n    GAMEPAD_AXIS_RIGHT_X = 3;\n    GAMEPAD_AXIS_RIGHT_Y = 4;\n\n    // The soft triggers on a standard two-stick gamepad, usually called\n    // L2 and R2.\n    GAMEPAD_AXIS_LEFT_TRIGGER = 5;\n    GAMEPAD_AXIS_RIGHT_TRIGGER = 6;\n  }\n\n  uint64 gamepad_id = 1; // Required.\n  GamepadAxis axis = 2;  // Required.\n\n  // Required, with a value from -1.0 (towards the top of the gamepad) to 1.0\n  // (towards the bottom of the gamepad). Zero always represents the resting\n  // position, and triggers will therefore usually range from 0.0 to 1.0\n  // (fully pressed).\n  double value = 3;\n}\n\n// ### 073 - Gamepad Input\n//\n// This message, which must originate from the client on the same stream as the\n// original `030 - Attach` message, indicates input from a gamepad button.\nmessage GamepadInput {\n  enum GamepadButtonState {\n    GAMEPAD_BUTTON_STATE_UNKNOWN = 0;\n    GAMEPAD_BUTTON_STATE_PRESSED = 1;\n    GAMEPAD_BUTTON_STATE_RELEASED = 2;\n  }\n\n  enum GamepadButton {\n    GAMEPAD_BUTTON_UNKNOWN = 0;\n    GAMEPAD_BUTTON_DPAD_LEFT = 1;\n    GAMEPAD_BUTTON_DPAD_RIGHT = 2;\n    GAMEPAD_BUTTON_DPAD_UP = 3;\n    GAMEPAD_BUTTON_DPAD_DOWN = 4;\n\n    // X on a DualShock/DualSense, A on an Xbox gamepad, and B on a Nintendo\n    // gamepad.\n    GAMEPAD_BUTTON_SOUTH = 5;\n    GAMEPAD_BUTTON_EAST = 6;\n    GAMEPAD_BUTTON_NORTH = 7;\n    GAMEPAD_BUTTON_WEST = 8;\n\n    // The right and left shoulder buttons, usually called L1 and R1.\n    GAMEPAD_BUTTON_SHOULDER_LEFT = 9;\n    GAMEPAD_BUTTON_SHOULDER_RIGHT = 10;\n\n    // The left and right joystick buttons, usually called L3 and R3.\n    GAMEPAD_BUTTON_JOYSTICK_LEFT = 11;\n    GAMEPAD_BUTTON_JOYSTICK_RIGHT = 12;\n\n    // Assorted buttons on the face of the gamepad.\n    GAMEPAD_BUTTON_START = 13;\n    GAMEPAD_BUTTON_SELECT = 14;\n    GAMEPAD_BUTTON_LOGO = 15;\n    GAMEPAD_BUTTON_SHARE = 16;\n\n    // Occasionally, gamepads will have another two buttons next to the NESW\n    // buttons.\n    GAMEPAD_BUTTON_C = 17;\n    GAMEPAD_BUTTON_Z = 18;\n\n    // Very rarely, gamepads will have another set of buttons rather than\n    // triggers.\n    GAMEPAD_BUTTON_TRIGGER_LEFT = 19;\n    GAMEPAD_BUTTON_TRIGGER_RIGHT = 20;\n  }\n\n  uint64 gamepad_id = 1;        // Required.\n  GamepadButton button = 2;     // Required.\n  GamepadButtonState state = 3; // Required\n}\n"
  },
  {
    "path": "mm-protocol/src/timestamp.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::time;\n\nuse crate::{ProtocolError, Timestamp};\n\nimpl TryFrom<Timestamp> for std::time::SystemTime {\n    type Error = ProtocolError;\n\n    fn try_from(value: Timestamp) -> Result<Self, Self::Error> {\n        if value.seconds <= 0 || value.nanos < 0 {\n            return Err(ProtocolError::InvalidMessage);\n        }\n\n        std::time::SystemTime::UNIX_EPOCH\n            .checked_add(time::Duration::from_secs(value.seconds as u64))\n            .and_then(|ts| ts.checked_add(time::Duration::from_nanos(value.nanos as u64)))\n            .ok_or(ProtocolError::InvalidMessage)\n    }\n}\n\nimpl From<time::SystemTime> for Timestamp {\n    fn from(value: time::SystemTime) -> Self {\n        let d = value.duration_since(time::UNIX_EPOCH).unwrap();\n\n        Self {\n            seconds: d.as_secs() as i64,\n            nanos: d.subsec_nanos() as i64,\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/Cargo.toml",
    "content": "# Copyright 2024 Colin Marc <hi@colinmarc.com>\n#\n# SPDX-License-Identifier: BUSL-1.1\n\n[package]\nname = \"mm-server\"\nversion = \"0.8.4\"\nedition = \"2021\"\npublish = false\n\n[[bin]]\nname = \"mmserver\"\npath = \"src/main.rs\"\n\n[dependencies]\nanyhow = \"1\"\naudiopus_sys = { version = \"0.2\", features = [\"static\"] }\nboring = \"4\"\nbyteorder = \"1\"\nbytes = \"1\"\nclap = { version = \"4\", features = [\"derive\"] }\nclone3 = \"0.2\"\nconverge = \"0.0.5\"\ncrossbeam-channel = \"0.5\"\ncstr = \"0.2\"\nctrlc = \"3\"\ncursor-icon = \"1\"\ndasp = { version = \"0.11\", features = [\n    \"signal\",\n    \"interpolate\",\n    \"interpolate-sinc\",\n    \"ring_buffer\",\n] }\ndrm = \"0.14\"\ndrm-fourcc = \"2\"\neither = \"1\"\ngit-version = \"0.3\"\nglam = \"0.24\"\nhashbrown = \"0.15\"\nimage = { version = \"0.25\", default-features = false, features = [\"png\"] }\nip_rfc = \"0.1\"\nlazy_static = \"1.4\"\nlibc = \"0.2\"\nlibloading = \"0.8\"\nlistenfd = \"1\"\nmdns-sd = \"0.11\"\nmio = { version = \"1\", features = [\"net\", \"os-ext\", \"os-poll\"] }\nmio-timerfd = \"0.2\"\nmktemp = \"0.5\"\nmm-protocol = { path = \"../mm-protocol\" }\nnix = { version = \"0.29\", features = [\"net\", \"socket\", \"uio\"] }\nnum_enum = \"0.7\"\noctets = \"0.2\"\noneshot = { version = \"0.1\", default-features = false, features = [\"std\"] }\nopus = \"0.3\"\npaste = \"1\"\nparking_lot = \"0.12\"\npathsearch = \"0.2\"\nquiche = { version = \"0.23\", features = [\"boringssl-boring-crate\"] }\nrand = \"0.8\"\nraptorq = \"2.0\"\nrcgen = \"0.12\"\nregex = \"1\"\nring = \"0.17\"\nscopeguard = \"1.2\"\nserde = \"1\"\nserde_json = \"1\"\nsimple_moving_average = { version = \"1\" }\nslotmap = \"1\"\nthiserror = \"1\"\nthreadpool = \"1\"\ntiny_id = \"0.1\"\ntoml = \"0.8\"\ntracing = \"0.1\"\ntracing-subscriber = { version = \"0.3\", features = [\"env-filter\"] }\ntracing-tracy = { version = \"0.11\", default-features = false }\ntracy-client = { version = \"0.17\", default-features = false }\nuds = \"0.4\"\nuuid = \"1\"\nwayland-protocols = { version = \"0.32\", features = [\n    \"server\",\n    \"staging\",\n    \"unstable\",\n] }\nwayland-scanner = \"0.31\"\nwayland-server = { version = \"0.31\", features = [\"log\"] }\nx11rb = { version = \"0.13\", features = [\"composite\"] }\n\n[dependencies.ash]\ngit = \"https://github.com/ash-rs/ash\"\nrev = \"92084df65f52aa15b704279fb6d8d26a3ee71809\"\n\n[dependencies.fuser]\ngit = \"https://github.com/colinmarc/fuser\"\nrev = \"643facdc1bcc9a3b11d7a88ebfaaaa045c3596c1\"\ndefault-features = false\n\n[dependencies.pulseaudio]\ngit = \"https://github.com/colinmarc/pulseaudio-rs\"\nrev = \"70ddb748f20ceecc20e963e571188124aeb30186\"\n\n[dependencies.rustix]\nversion = \"1.0\"\nfeatures = [\n    \"core\",\n    \"event\",\n    \"fs\",\n    \"mm\",\n    \"mount\",\n    \"net\",\n    \"pipe\",\n    \"time\",\n    \"thread\",\n    \"stdio\",\n    \"system\",\n    \"process\",\n]\n\n[dependencies.southpaw]\ngit = \"https://github.com/colinmarc/southpaw\"\nrev = \"e022f2066b300c9600d69bac73e7d8ef7e19f08c\"\n\n[build-dependencies]\nsystem-deps = \"6\"\nxkbcommon = { version = \"0.7\", default-features = false }\n\n[build-dependencies.slang]\ngit = \"https://github.com/colinmarc/slang-rs\"\nrev = \"075daa4faa8d1ab6d7bfbb5293812b087a527207\"\n# Uses SLANG_DIR if set, otherwise builds slang from source\nfeatures = [\"from-source\"]\n\n[package.metadata.system-deps]\nlibavcodec = { version = \"6\", feature = \"ffmpeg_encode\" }\n\n[features]\ndefault = []\ntracy = [\n    \"tracy-client/enable\",\n    \"tracy-client/broadcast\",\n    \"tracing-tracy/enable\",\n]\n\n[dev-dependencies]\npretty_assertions = \"*\"\ntest-log = { version = \"*\", features = [\"trace\"] }\n\n[patch.crates-io]\nmio-timerfd = { git = \"https://github.com/colinmarc/mio-timerfd.git\" }\n"
  },
  {
    "path": "mm-server/build.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{ffi::CString, path::PathBuf};\n\nuse xkbcommon::xkb;\n\nextern crate slang;\n\nfn main() {\n    system_deps::Config::new().probe().unwrap();\n\n    let mut session = slang::GlobalSession::new();\n    let out_dir = std::env::var(\"OUT_DIR\")\n        .map(std::path::PathBuf::from)\n        .expect(\"OUT_DIR not set\");\n\n    compile_shader(\n        &mut session,\n        \"src/session/video/composite.slang\",\n        out_dir.join(\"shaders/composite_vert.spv\").to_str().unwrap(),\n        \"vert\",\n        slang::Stage::Vertex,\n        [],\n    );\n\n    compile_shader(\n        &mut session,\n        \"src/session/video/composite.slang\",\n        out_dir.join(\"shaders/composite_frag.spv\").to_str().unwrap(),\n        \"frag\",\n        slang::Stage::Fragment,\n        [],\n    );\n\n    compile_shader(\n        &mut session,\n        \"src/session/video/convert.slang\",\n        out_dir\n            .join(\"shaders/convert_multiplanar.spv\")\n            .to_str()\n            .unwrap(),\n        \"main\",\n        slang::Stage::Compute,\n        [],\n    );\n\n    compile_shader(\n        &mut session,\n        \"src/session/video/convert.slang\",\n        out_dir\n            .join(\"shaders/convert_semiplanar.spv\")\n            .to_str()\n            .unwrap(),\n        \"main\",\n        slang::Stage::Compute,\n        [(\"SEMIPLANAR\", \"1\")],\n    );\n\n    // We need a keymap for the compositor, but it shouldn't affect much, since we\n    // operate generally with physical keycodes and so do games. If this proves\n    // limiting, we could allow the configuration of other virtual keyboards.\n    let xkb_ctx = xkb::Context::new(0);\n    save_keymap(\n        &xkb_ctx,\n        out_dir.join(\"keymaps/iso_us.txt\").to_str().unwrap(),\n        \"\",\n        \"pc105\",\n        \"us\",\n        \"\",\n        None,\n    );\n}\n\nfn compile_shader<'a>(\n    session: &mut slang::GlobalSession,\n    in_path: &str,\n    out_path: &str,\n    entry_point: &str,\n    stage: slang::Stage,\n    defines: impl IntoIterator<Item = (&'a str, &'a str)>,\n) {\n    std::fs::create_dir_all(PathBuf::from(out_path).parent().unwrap())\n        .expect(\"failed to create output directory\");\n\n    let mut compile_request = session.create_compile_request();\n\n    compile_request\n        .add_search_path(\"../shader-common\")\n        .set_codegen_target(slang::CompileTarget::Spirv)\n        .set_optimization_level(slang::OptimizationLevel::Maximal)\n        .set_target_profile(session.find_profile(\"glsl_460\"));\n\n    for (name, value) in defines {\n        compile_request.add_preprocessor_define(name, value);\n    }\n\n    let entry_point = compile_request\n        .add_translation_unit(slang::SourceLanguage::Slang, None)\n        .add_source_file(in_path)\n        .add_entry_point(entry_point, stage);\n\n    let shader_bytecode = compile_request\n        .compile()\n        .expect(\"Shader compilation failed.\");\n\n    std::fs::write(out_path, shader_bytecode.get_entry_point_code(entry_point))\n        .expect(\"failed to write shader bytecode to file\");\n\n    println!(\"cargo::rerun-if-changed={}\", in_path);\n}\n\nfn save_keymap(\n    ctx: &xkb::Context,\n    out_path: &str,\n    rules: &str,\n    model: &str,\n    layout: &str,\n    variant: &str,\n    options: Option<&str>,\n) {\n    std::fs::create_dir_all(PathBuf::from(out_path).parent().unwrap())\n        .expect(\"failed to create output directory\");\n\n    let keymap = xkb::Keymap::new_from_names(\n        ctx,\n        rules,\n        model,\n        layout,\n        variant,\n        options.map(|s| s.to_string()),\n        xkb::KEYMAP_COMPILE_NO_FLAGS,\n    )\n    .expect(\"failed to create keymap\");\n\n    let s = keymap.get_as_string(xkb::FORMAT_TEXT_V1);\n\n    std::fs::write(out_path, CString::new(s).unwrap().to_bytes_with_nul())\n        .expect(\"failed to write keymap bytes to file\");\n}\n"
  },
  {
    "path": "mm-server/deny.toml",
    "content": "[licenses]\nallow = [\n    \"MIT\",\n    \"Apache-2.0\",\n    \"Apache-2.0 WITH LLVM-exception\",\n    \"BSD-2-Clause\",\n    \"BSD-3-Clause\",\n    \"Zlib\",\n    \"ISC\",\n    \"MPL-2.0\",\n    \"Unicode-3.0\",\n    \"Unicode-DFS-2016\",\n]\nconfidence-threshold = 0.8\n\n[licenses.private]\nignore = true\n\n[advisories]\nignore = [\"RUSTSEC-2024-0436\"]\n"
  },
  {
    "path": "mm-server/src/codec.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::sync::Arc;\n\nuse anyhow::anyhow;\nuse mm_protocol as protocol;\n\nuse crate::vulkan::VkContext;\n\n/// A codec used for an attachment video stream.\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\npub enum VideoCodec {\n    H264,\n    H265,\n    Av1,\n}\n\n/// A codec used for an attachment audio stream.\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\npub enum AudioCodec {\n    Opus,\n}\n\nimpl TryFrom<protocol::VideoCodec> for VideoCodec {\n    type Error = anyhow::Error;\n\n    fn try_from(codec: protocol::VideoCodec) -> anyhow::Result<Self> {\n        match codec {\n            protocol::VideoCodec::Unknown => Err(anyhow!(\"codec unset\")),\n            protocol::VideoCodec::H264 => Ok(Self::H264),\n            protocol::VideoCodec::H265 => Ok(Self::H265),\n            protocol::VideoCodec::Av1 => Ok(Self::Av1),\n        }\n    }\n}\n\nimpl From<VideoCodec> for protocol::VideoCodec {\n    fn from(codec: VideoCodec) -> Self {\n        match codec {\n            VideoCodec::H264 => protocol::VideoCodec::H264,\n            VideoCodec::H265 => protocol::VideoCodec::H265,\n            VideoCodec::Av1 => protocol::VideoCodec::Av1,\n        }\n    }\n}\n\nimpl TryFrom<protocol::AudioCodec> for AudioCodec {\n    type Error = anyhow::Error;\n\n    fn try_from(codec: protocol::AudioCodec) -> anyhow::Result<Self> {\n        match codec {\n            protocol::AudioCodec::Unknown => Err(anyhow!(\"codec unset\")),\n            protocol::AudioCodec::Opus => Ok(Self::Opus),\n        }\n    }\n}\n\nimpl From<AudioCodec> for protocol::AudioCodec {\n    fn from(codec: AudioCodec) -> Self {\n        match codec {\n            AudioCodec::Opus => protocol::AudioCodec::Opus,\n        }\n    }\n}\n\npub fn probe_codec(_vk: Arc<VkContext>, codec: VideoCodec) -> bool {\n    match codec {\n        VideoCodec::H264 if _vk.device_info.supports_h264 => true,\n        VideoCodec::H265 if _vk.device_info.supports_h265 => true,\n        _ => false,\n    }\n}\n"
  },
  {
    "path": "mm-server/src/color.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\n#![allow(dead_code)]\n\nuse mm_protocol as protocol;\n\n/// A combination of color primaries, white point, and transfer function. We\n/// generally ignore white point, since we deal only with colorspaces using the\n/// D65 white point.\n#[derive(Debug, Clone, Copy, Eq, PartialEq)]\npub enum ColorSpace {\n    /// Uses BT.709 primaries and the sRGB transfer function.\n    Srgb,\n    /// Uses BT.709 primaries and a linear transfer function. Usually encoded as\n    /// a float with negative values and values above 1.0 used to represent the\n    /// extended space.\n    LinearExtSrgb,\n    /// Uses BT.2020 primaries and the ST2084 (PQ) transfer function.\n    Hdr10,\n}\n\nimpl ColorSpace {\n    pub fn from_primaries_and_tf(\n        primaries: Primaries,\n        transfer_function: TransferFunction,\n    ) -> Option<Self> {\n        match (primaries, transfer_function) {\n            (Primaries::Srgb, TransferFunction::Srgb) => Some(ColorSpace::Srgb),\n            (Primaries::Srgb, TransferFunction::Linear) => Some(ColorSpace::LinearExtSrgb),\n            (Primaries::Bt2020, TransferFunction::Pq) => Some(ColorSpace::Hdr10),\n            _ => None,\n        }\n    }\n}\n\n// A configuration for a compressed video bitstream.\n#[derive(Debug, Clone, Copy, Eq, PartialEq)]\npub enum VideoProfile {\n    // Uses a bit depth of 8, BT.709 primaries and transfer function.\n    Hd,\n    // Uses a bit depth of 10, BT.2020 primaries and the ST2084 (PQ) transfer function.\n    Hdr10,\n}\n\nimpl TryFrom<protocol::VideoProfile> for VideoProfile {\n    type Error = String;\n\n    fn try_from(profile: protocol::VideoProfile) -> Result<Self, Self::Error> {\n        match profile {\n            protocol::VideoProfile::Hd => Ok(VideoProfile::Hd),\n            protocol::VideoProfile::Hdr10 => Ok(VideoProfile::Hdr10),\n            _ => Err(\"invalid video profile\".into()),\n        }\n    }\n}\n\nimpl From<VideoProfile> for protocol::VideoProfile {\n    fn from(profile: VideoProfile) -> Self {\n        match profile {\n            VideoProfile::Hd => protocol::VideoProfile::Hd,\n            VideoProfile::Hdr10 => protocol::VideoProfile::Hdr10,\n        }\n    }\n}\n\n#[derive(Debug, Clone, Copy)]\npub enum TransferFunction {\n    Linear,\n    Srgb,\n    Pq,\n}\n\n#[derive(Debug, Clone, Copy)]\npub enum Primaries {\n    Srgb,\n    Bt2020,\n}\n"
  },
  {
    "path": "mm-server/src/config.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    collections::BTreeMap,\n    ffi::{OsStr, OsString},\n    net::ToSocketAddrs,\n    num::NonZeroU32,\n    path::{Component, Path, PathBuf},\n    time,\n};\n\nuse anyhow::{bail, Context};\nuse lazy_static::lazy_static;\nuse regex::Regex;\nuse tracing::trace;\n\nlazy_static! {\n    static ref ID_RE: Regex = Regex::new(r\"\\A[a-z][a-z0-9-_]{0,256}\\z\").unwrap();\n    static ref DESCRIPTION_RE: Regex = Regex::new(r\"\\A[A-Za-z0-9-_:() ]{0,256}\\z\").unwrap();\n    static ref PATH_COMPONENT_RE: Regex = Regex::new(r\"\\A[A-Za-z0-9-_  ]{0,64}\\z\").unwrap();\n    static ref DEFAULT_CFG: parsed::Config =\n        toml::from_str(include_str!(\"../../mmserver.default.toml\")).unwrap();\n}\n\nconst MAX_APP_PATH_COMPONENTS: usize = 8;\npub const MAX_IMAGE_SIZE: u64 = 1024 * 1024;\n\n/// Serde representations of the configuration files.\nmod parsed {\n    use std::{collections::BTreeMap, num::NonZeroU32, path::PathBuf};\n\n    use converge::Converge;\n    use serde::Deserialize;\n\n    #[derive(Debug, Clone, PartialEq)]\n    pub(super) enum NonZeroOrInf {\n        Value(NonZeroU32),\n        Infinity,\n    }\n\n    impl<'de> Deserialize<'de> for NonZeroOrInf {\n        fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n        where\n            D: serde::Deserializer<'de>,\n        {\n            #[derive(Deserialize)]\n            #[serde(untagged, expecting = \"a positive integer or \\\"inf\\\"\")]\n            enum Variant {\n                Value(NonZeroU32),\n                Infinity(f64),\n            }\n\n            match Deserialize::deserialize(deserializer)? {\n                Variant::Value(n) => Ok(NonZeroOrInf::Value(n)),\n                Variant::Infinity(f) => {\n                    if f.is_infinite() {\n                        Ok(NonZeroOrInf::Infinity)\n                    } else {\n                        Err(serde::de::Error::invalid_value(\n                            serde::de::Unexpected::Float(f),\n                            &\"a positive integer or \\\"inf\\\"\",\n                        ))\n                    }\n                }\n            }\n        }\n    }\n\n    #[derive(Debug, Clone, PartialEq, Deserialize, Converge)]\n    pub(super) struct Config {\n        pub(super) include_apps: Option<Vec<PathBuf>>,\n        pub(super) apps: Option<BTreeMap<String, AppConfig>>,\n\n        pub(super) data_home: Option<PathBuf>,\n\n        #[converge(nest)]\n        pub(super) server: Option<ServerConfig>,\n        #[converge(nest)]\n        pub(super) default_app_settings: Option<DefaultAppSettings>,\n    }\n\n    #[derive(Debug, Clone, PartialEq, Deserialize, Converge)]\n    #[serde(deny_unknown_fields)]\n    pub(super) struct ServerConfig {\n        pub(super) bind: Option<String>,\n        pub(super) bind_systemd: Option<bool>,\n        pub(super) tls_cert: Option<PathBuf>,\n        pub(super) tls_key: Option<PathBuf>,\n        pub(super) worker_threads: Option<NonZeroU32>,\n        pub(super) max_connections: Option<NonZeroOrInf>,\n        pub(super) mdns: Option<bool>,\n        pub(super) mdns_hostname: Option<String>,\n        pub(super) mdns_instance_name: Option<String>,\n        pub(super) video_fec_ratios: Option<Vec<f32>>,\n    }\n\n    #[derive(Debug, Clone, PartialEq, Deserialize, Converge)]\n    #[serde(deny_unknown_fields)]\n    pub(super) struct DefaultAppSettings {\n        pub(super) xwayland: Option<bool>,\n        pub(super) force_1x_scale: Option<bool>,\n        pub(super) session_timeout: Option<NonZeroOrInf>,\n        pub(super) isolate_home: Option<bool>,\n        pub(super) tmp_home: Option<bool>,\n    }\n\n    #[derive(Debug, Clone, PartialEq, Deserialize)]\n    #[serde(deny_unknown_fields)]\n    pub(super) struct AppConfig {\n        pub(super) app_path: Option<String>,\n        pub(super) description: Option<String>,\n        pub(super) header_image: Option<PathBuf>,\n        pub(super) command: Vec<String>,\n        pub(super) environment: Option<BTreeMap<String, String>>,\n        pub(super) xwayland: Option<bool>,\n        pub(super) force_1x_scale: Option<bool>,\n        pub(super) session_timeout: Option<NonZeroOrInf>,\n        pub(super) isolate_home: Option<bool>,\n        pub(super) shared_home_name: Option<String>,\n        pub(super) tmp_home: Option<bool>,\n    }\n}\n\n#[derive(Debug, Clone, PartialEq)]\npub struct Config {\n    pub server: ServerConfig,\n    pub apps: BTreeMap<String, AppConfig>,\n    pub data_home: PathBuf,\n\n    pub bug_report_dir: Option<PathBuf>,\n}\n\n#[derive(Debug, Clone, PartialEq)]\npub struct ServerConfig {\n    pub bind: String,\n    pub bind_systemd: bool,\n    pub tls_cert: Option<PathBuf>,\n    pub tls_key: Option<PathBuf>,\n    pub worker_threads: NonZeroU32,\n    pub max_connections: Option<NonZeroU32>,\n    pub mdns: bool,\n    pub mdns_hostname: Option<String>,\n    pub mdns_instance_name: Option<String>,\n    pub video_fec_ratios: Vec<f32>,\n}\n\n#[derive(Debug, Clone, PartialEq)]\npub struct AppConfig {\n    pub description: Option<String>,\n    pub path: Vec<String>,\n    pub header_image: Option<PathBuf>,\n    pub command: Vec<OsString>,\n    pub env: BTreeMap<OsString, OsString>,\n    pub xwayland: bool,\n    pub force_1x_scale: bool,\n    pub session_timeout: Option<time::Duration>,\n    pub home_isolation_mode: HomeIsolationMode,\n}\n\n#[derive(Debug, Clone, PartialEq, Eq)]\npub enum HomeIsolationMode {\n    Unisolated,\n    Tmpfs,\n    Permanent(PathBuf),\n}\n\nimpl Config {\n    pub fn new(path: Option<&PathBuf>, includes: &[PathBuf]) -> anyhow::Result<Config> {\n        let file = path\n            .map(|p| p.to_owned())\n            .or_else(locate_default_config_file);\n\n        let cfg = if let Some(file) = file {\n            let content = std::fs::read_to_string(&file)?;\n            let parsed: parsed::Config = toml::from_str(&content)\n                .context(format!(\"parsing configuration file {}\", file.display()))?;\n\n            Some(parsed)\n        } else {\n            None\n        };\n\n        let this = Self::build(cfg, includes)?;\n        this.validate()?;\n\n        Ok(this)\n    }\n\n    fn build(cfg: Option<parsed::Config>, includes: &[PathBuf]) -> anyhow::Result<Self> {\n        // This is the parsed mmserver.defaults.toml.\n        let defaults = DEFAULT_CFG.clone();\n\n        let input = if let Some(cfg) = cfg {\n            // Merge the default config with the input config, giving the input\n            // precedence.\n            use converge::Converge;\n            cfg.converge(defaults)\n        } else {\n            defaults\n        };\n\n        let data_home = input.data_home.or_else(|| {\n            if let Ok(xdg_data_home) = std::env::var(\"XDG_DATA_HOME\") {\n                Some(Path::new(&xdg_data_home).join(\"mmserver\"))\n            } else if let Ok(home) = std::env::var(\"HOME\") {\n                Some(\n                    Path::new(&home)\n                        .join(\".local\")\n                        .join(\"share\")\n                        .join(\"mmserver\"),\n                )\n            } else {\n                None\n            }\n        });\n\n        let data_home = data_home.ok_or(anyhow::anyhow!(\n            \"failed to determine `data_home`. Set it explicitly or set one of $HOME or \\\n             $XDG_DATA_HOME\"\n        ))?;\n\n        // We only unwrap values that should have been set in the default\n        // config. This is verified by a test.\n        let server = input.server.unwrap();\n        let default_app_settings = input.default_app_settings.unwrap();\n\n        let mut this = Config {\n            server: ServerConfig {\n                bind: server.bind.unwrap(),\n                bind_systemd: server.bind_systemd.unwrap(),\n                tls_cert: server.tls_cert,\n                tls_key: server.tls_key,\n                worker_threads: server.worker_threads.unwrap(),\n                max_connections: match server.max_connections.unwrap() {\n                    parsed::NonZeroOrInf::Value(n) => Some(n),\n                    parsed::NonZeroOrInf::Infinity => None,\n                },\n                mdns: server.mdns.unwrap(),\n                mdns_hostname: server.mdns_hostname,\n                mdns_instance_name: server.mdns_instance_name,\n                video_fec_ratios: server.video_fec_ratios.unwrap(),\n            },\n            data_home: data_home.clone(),\n            apps: BTreeMap::new(), // Handled below.\n            bug_report_dir: None,  // This is only set from the command line.\n        };\n\n        // Collect additional app definitions from app_dirs.\n        let cfg_includes = input.include_apps.unwrap_or_default();\n\n        let includes = cfg_includes.iter().chain(includes);\n        let apps = input.apps.unwrap_or_default();\n\n        let additional_apps = includes\n            .map(|p| collect_includes(p).context(format!(\"searching {}\", p.display())))\n            .collect::<Result<Vec<_>, _>>()?\n            .into_iter()\n            .flatten();\n\n        for (id, app) in apps.into_iter().chain(additional_apps) {\n            if this.apps.contains_key(&id) {\n                bail!(\"duplicate app name: {}\", id);\n            }\n            let app = validate_app(&id, app, &default_app_settings, &data_home)\n                .context(format!(\"failed to load app config for '{}'\", id))?;\n            this.apps.insert(id, app);\n        }\n\n        trace!(\"using config: {:#?}\", this);\n\n        Ok(this)\n    }\n\n    /// Performs high-level validation on the final configuration.\n    fn validate(&self) -> anyhow::Result<()> {\n        if self.apps.is_empty() {\n            bail!(\"at least one application must be defined\");\n        }\n\n        for (name, app) in &self.apps {\n            if app.command.is_empty() {\n                bail!(\"empty command for application {name:?}\");\n            }\n        }\n\n        let addr = self\n            .server\n            .bind\n            .to_socket_addrs()\n            .map(|mut addrs| addrs.next().unwrap())\n            .map_err(|_| anyhow::anyhow!(\"invalid address \\\"{}\\\"\", self.server.bind))?;\n\n        // Check that TLS is set up (for non-private addresses).\n        let ip = addr.ip();\n        let tls_required = (ip_rfc::global(&ip) || ip.is_unspecified())\n            && (self.server.tls_cert.is_none() || self.server.tls_key.is_none());\n        if tls_required && (self.server.tls_cert.is_none() || self.server.tls_key.is_none()) {\n            bail!(\"TLS required for non-private address \\\"{}\\\"\", addr);\n        }\n\n        // Validate that the TLS cert and key exist.\n        match self.server.tls_cert {\n            Some(ref cert) if !cert.exists() => {\n                bail!(\"TLS certificate not found at {}\", cert.display());\n            }\n            _ => {}\n        }\n\n        match self.server.tls_key {\n            Some(ref key) if !key.exists() => {\n                bail!(\"TLS private key not found at {}\", key.display());\n            }\n            _ => {}\n        }\n\n        Ok(())\n    }\n}\n\nimpl Default for Config {\n    fn default() -> Self {\n        Config::build(None, &[]).expect(\"failed to build default config\")\n    }\n}\n\nfn collect_includes(p: impl AsRef<Path>) -> anyhow::Result<Vec<(String, parsed::AppConfig)>> {\n    let mut res = Vec::new();\n    let p = p.as_ref();\n\n    if !p.is_dir() {\n        return Ok(vec![include_file(p)?]);\n    }\n\n    for entry in p.read_dir()? {\n        let entry = entry?;\n\n        match entry.file_type() {\n            Ok(t) if t.is_file() => {\n                let path = entry.path();\n                let ext = path.extension().and_then(OsStr::to_str);\n                if matches!(ext, Some(\"toml\") | Some(\"json\")) {\n                    res.push(include_file(&path).context(format!(\"reading {}\", path.display()))?)\n                }\n            }\n            _ => continue,\n        }\n    }\n\n    Ok(res)\n}\n\nfn include_file(p: impl AsRef<Path>) -> anyhow::Result<(String, parsed::AppConfig)> {\n    let p = p.as_ref();\n    let name = p\n        .file_stem()\n        .and_then(OsStr::to_str)\n        .ok_or_else(|| anyhow::anyhow!(\"invalid file name\"))?;\n\n    let content = std::fs::read_to_string(p)?;\n\n    let app = match p.extension().and_then(OsStr::to_str) {\n        Some(\"toml\") => toml::from_str(&content)?,\n        Some(\"json\") => serde_json::from_str(&content)?,\n        _ => bail!(\"invalid file extension\"),\n    };\n\n    Ok((name.to_owned(), app))\n}\n\nfn locate_default_config_file() -> Option<PathBuf> {\n    const BASENAME: &str = \"/etc/magic-mirror/mmserver\";\n\n    for ext in &[\"toml\", \"json\"] {\n        let path = PathBuf::from(BASENAME).with_extension(ext);\n        if path.exists() {\n            return Some(path);\n        }\n    }\n\n    None\n}\n\nfn validate_app(\n    id: &str,\n    app: parsed::AppConfig,\n    defaults: &parsed::DefaultAppSettings,\n    data_home: &Path,\n) -> anyhow::Result<AppConfig> {\n    if !ID_RE.is_match(id) {\n        bail!(\"invalid name: {}\", id);\n    }\n\n    if app\n        .description\n        .as_ref()\n        .is_some_and(|desc| !DESCRIPTION_RE.is_match(desc))\n    {\n        bail!(\"invalid description: {}\", app.description.unwrap())\n    }\n\n    let path = match app.app_path {\n        None => Vec::new(),\n        Some(p) => validate_app_path(p)?,\n    };\n\n    if let Some(p) = &app.header_image {\n        let len = p.metadata()?.len();\n        if len > MAX_IMAGE_SIZE {\n            bail!(\n                \"image is {} bytes, over the maximum of {}: {}\",\n                len,\n                MAX_IMAGE_SIZE,\n                p.display()\n            );\n        }\n    }\n\n    let session_timeout = match app\n        .session_timeout\n        .or(defaults.session_timeout.clone())\n        .unwrap()\n    {\n        parsed::NonZeroOrInf::Value(v) => Some(time::Duration::from_secs(v.get() as u64)),\n        parsed::NonZeroOrInf::Infinity => None,\n    };\n\n    let isolate_home = app.isolate_home.or(defaults.isolate_home).unwrap();\n    let tmp_home = app.tmp_home.or(defaults.tmp_home).unwrap();\n    let home_isolation_mode = match (isolate_home, tmp_home) {\n        (false, true) => bail!(\"if isolate_home = false, tmp_home must also be false\"),\n        (false, false) => HomeIsolationMode::Unisolated,\n        (true, true) => HomeIsolationMode::Tmpfs,\n        (true, false) => {\n            if let Some(s) = app.shared_home_name {\n                if !ID_RE.is_match(&s) {\n                    bail!(\"invalid shared_home_name: {s}\",)\n                }\n\n                HomeIsolationMode::Permanent(data_home.join(\"homes\").join(s))\n            } else {\n                HomeIsolationMode::Permanent(data_home.join(\"homes\").join(id))\n            }\n        }\n    };\n\n    Ok(AppConfig {\n        path,\n        description: app.description,\n        header_image: app.header_image,\n        command: app.command.into_iter().map(OsString::from).collect(),\n        env: app\n            .environment\n            .unwrap_or_default()\n            .into_iter()\n            .map(|(k, v)| (OsString::from(k), OsString::from(v)))\n            .collect(),\n        xwayland: app.xwayland.or(defaults.xwayland).unwrap(),\n        force_1x_scale: app.force_1x_scale.or(defaults.force_1x_scale).unwrap(),\n        session_timeout,\n        home_isolation_mode,\n    })\n}\n\nfn validate_app_path(p: String) -> anyhow::Result<Vec<String>> {\n    let components = Path::new(&p).components();\n    let mut out = Vec::new();\n\n    for component in components {\n        if let Some(s) = validate_app_path_component(component) {\n            out.push(s);\n        } else {\n            bail!(\"invalid path compontent: {:?}\", component.as_os_str());\n        }\n    }\n\n    if out.len() > MAX_APP_PATH_COMPONENTS {\n        bail!(\"app_path has too many components\");\n    }\n\n    Ok(out)\n}\n\nfn validate_app_path_component(component: Component) -> Option<String> {\n    match component {\n        Component::Normal(s) => {\n            let comp = s.to_str()?;\n            if !PATH_COMPONENT_RE.is_match(comp) {\n                None\n            } else {\n                Some(comp.trim().to_owned())\n            }\n        }\n        _ => None,\n    }\n}\n\n#[cfg(test)]\nmod test {\n    use pretty_assertions::assert_eq;\n\n    use super::*;\n\n    lazy_static! {\n        static ref EXAMPLE_APP: AppConfig = AppConfig {\n            path: Vec::new(),\n            description: None,\n            header_image: None,\n            command: vec![\"echo\".to_owned().into(), \"hello\".to_owned().into()],\n            env: Default::default(),\n            xwayland: true,\n            force_1x_scale: false,\n            session_timeout: Some(time::Duration::from_secs(3600)),\n            home_isolation_mode: HomeIsolationMode::Unisolated,\n        };\n    }\n\n    fn config_from_str(s: &str) -> anyhow::Result<Config> {\n        let input: parsed::Config = toml::from_str(s)?;\n        Config::build(Some(input), &[])\n    }\n\n    #[test]\n    fn test_default() {\n        let mut config = Config::default();\n        config\n            .apps\n            .insert(\"example\".to_string(), EXAMPLE_APP.clone());\n\n        config.validate().expect(\"default config is valid\");\n        assert_eq!(config.server.bind, \"localhost:9599\");\n    }\n\n    #[test]\n    fn test_only_app() {\n        let config = config_from_str(\n            r#\"\n            [apps.example]\n            command = [\"echo\", \"hello\"]\n            isolate_home = false\n            \"#,\n        )\n        .unwrap();\n\n        config.validate().expect(\"empty config is valid\");\n\n        let mut expected = Config::default();\n        expected\n            .apps\n            .insert(\"example\".to_string(), EXAMPLE_APP.clone());\n\n        assert_eq!(config, expected);\n    }\n\n    #[test]\n    fn tls_required_for_global_addr() {\n        let config = config_from_str(\n            r#\"\n            [server]\n            bind = \"8.8.8.8:9599\"\n            [apps.example]\n            command = [\"echo\", \"hello\"]\n            \"#,\n        )\n        .unwrap();\n\n        eprintln!(\"{:?}\", config.server);\n\n        match config.validate() {\n            Err(e) => {\n                assert_eq!(\n                    e.to_string(),\n                    \"TLS required for non-private address \\\"8.8.8.8:9599\\\"\"\n                )\n            }\n            _ => panic!(\"expected error\"),\n        }\n    }\n\n    #[test]\n    fn tls_required_for_unspecified() {\n        let config = config_from_str(\n            r#\"\n            [server]\n            bind = \"[::]:9599\"\n            [apps.example]\n            command = [\"echo\", \"hello\"]\n            \"#,\n        )\n        .unwrap();\n\n        match config.validate() {\n            Err(e) => {\n                assert_eq!(\n                    e.to_string(),\n                    \"TLS required for non-private address \\\"[::]:9599\\\"\"\n                )\n            }\n            _ => panic!(\"expected error\"),\n        }\n    }\n\n    #[test]\n    fn tls_not_required_for_tailscale() {\n        let config = config_from_str(\n            r#\"\n            [server]\n            bind = \"100.64.123.45:9599\"\n            [apps.example]\n            command = [\"echo\", \"hello\"]\n            \"#,\n        )\n        .unwrap();\n\n        config\n            .validate()\n            .expect(\"TLS not required for shared NAT address\");\n    }\n\n    #[test]\n    fn app_paths() {\n        assert!(validate_app_path(\"foo!\".into()).is_err());\n        assert!(validate_app_path(\"C:\\\\\\\\foo\\\\bar\".into()).is_err());\n\n        let expected: Vec<String> = vec![\"Foo Bar\".into(), \"Baz\".into(), \"Qux\".into()];\n        assert_eq!(\n            expected,\n            validate_app_path(\"Foo Bar/ Baz/Qux \".into()).unwrap()\n        )\n    }\n}\n"
  },
  {
    "path": "mm-server/src/container/ipc.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::os::fd::{AsFd, AsRawFd, FromRawFd, OwnedFd};\nuse std::{io, time};\n\nuse rustix::event::{eventfd, poll, EventfdFlags, PollFd, PollFlags};\nuse rustix::io::{read, write, Errno};\n\n/// An IPC barrier using eventfd(2).\npub struct EventfdBarrier {\n    a: OwnedFd,\n    b: OwnedFd,\n    other: bool,\n}\n\nimpl EventfdBarrier {\n    pub fn new() -> io::Result<(Self, Self)> {\n        let a = eventfd(0, EventfdFlags::NONBLOCK)?;\n        let b = eventfd(0, EventfdFlags::NONBLOCK)?;\n\n        let a2 = a.try_clone()?;\n        let b2 = b.try_clone()?;\n\n        Ok((\n            Self { a, b, other: false },\n            Self {\n                a: a2,\n                b: b2,\n                other: true,\n            },\n        ))\n    }\n\n    // Waits at the barrier, timing out after the given duration.\n    pub fn sync(&self, timeout: time::Duration) -> rustix::io::Result<()> {\n        if self.other {\n            wait_eventfd(&self.a, timeout)?;\n            signal_eventfd(&self.b)?;\n        } else {\n            signal_eventfd(&self.a)?;\n            wait_eventfd(&self.b, timeout)?;\n        }\n\n        Ok(())\n    }\n}\n\n/// Creates an IPC channel for sending a file descriptor.\npub fn fd_oneshot() -> io::Result<(FdSender, FdReceiver)> {\n    let (sender, receiver) = uds::UnixSeqpacketConn::pair()?;\n    Ok((FdSender(sender), FdReceiver(receiver)))\n}\n\npub struct FdSender(uds::UnixSeqpacketConn);\n\nimpl FdSender {\n    pub fn send_timeout(self, fd: OwnedFd, timeout: time::Duration) -> io::Result<()> {\n        self.0.set_write_timeout(Some(timeout))?;\n\n        let raw_fd = fd.as_raw_fd();\n        self.0.send_fds(&[], &[raw_fd])?;\n\n        // The FD gets dropped here, along with our end of the connection.\n        Ok(())\n    }\n}\n\npub struct FdReceiver(uds::UnixSeqpacketConn);\n\nimpl FdReceiver {\n    pub fn recv_timeout(self, timeout: time::Duration) -> io::Result<OwnedFd> {\n        self.0.set_read_timeout(Some(timeout))?;\n\n        let mut fds = [-1];\n\n        self.0.recv_fds(&mut [], &mut fds)?;\n        if fds[0] <= 0 {\n            return Err(io::Error::new(\n                io::ErrorKind::InvalidData,\n                \"unexpected message received\",\n            ));\n        }\n\n        let fd = unsafe { OwnedFd::from_raw_fd(fds[0]) };\n        Ok(fd)\n    }\n}\n\nfn signal_eventfd(fd: impl AsFd) -> rustix::io::Result<()> {\n    loop {\n        match write(&fd, &1_u64.to_ne_bytes()).map(|_| ()) {\n            Err(Errno::INTR) => continue,\n            v => return v,\n        }\n    }\n}\n\nfn wait_eventfd(fd: impl AsFd, timeout: time::Duration) -> rustix::io::Result<()> {\n    let mut pollfd = [PollFd::new(&fd, PollFlags::IN)];\n    let mut buf = [0; 8];\n    let timespec = timeout.try_into().expect(\"invalid duration\");\n    loop {\n        match poll(&mut pollfd, Some(&timespec)) {\n            Ok(0) => return Err(Errno::TIMEDOUT),\n            Ok(_) => return read(fd, &mut buf).map(|_| ()),\n            Err(Errno::INTR) => continue,\n            Err(e) => return Err(e),\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/container/runtime.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    ffi::{CStr, CString, OsStr, OsString},\n    fs::OpenOptions,\n    io,\n    os::{\n        fd::{AsFd, AsRawFd as _, BorrowedFd, FromRawFd as _, OwnedFd},\n        unix::process::CommandExt as _,\n    },\n    path::{Path, PathBuf},\n    process::Command,\n    str::FromStr as _,\n    time,\n};\n\nuse anyhow::{anyhow, bail, Context as _};\nuse pathsearch::{find_executable_in_path, unix::is_executable};\nuse rand::distributions::{Alphanumeric, DistString as _};\nuse rustix::{\n    fs::{mkdirat, openat, symlinkat, FileType, Gid, Mode, OFlags, Uid, CWD as AT_FDCWD},\n    io::{fcntl_dupfd_cloexec, write, Errno},\n    mount::{\n        fsconfig_create, fsconfig_set_flag, fsconfig_set_string, fsmount, fsopen, move_mount,\n        open_tree, FsMountFlags, FsOpenFlags, MountAttrFlags, MoveMountFlags, OpenTreeFlags,\n    },\n    process::{getgid, getuid, set_parent_process_death_signal, waitpid, Pid, Signal, WaitOptions},\n    thread::{move_into_link_name_space, LinkNameSpaceType},\n};\nuse tracing::debug;\n\nuse super::ipc;\nuse crate::config::HomeIsolationMode;\n\n// In CPU-constrained testing environments, we sometimes need to wait\n// to get scheduled.\n#[cfg(test)]\nconst SYNC_TIMEOUT: time::Duration = time::Duration::from_secs(5);\n\n#[cfg(not(test))]\nconst SYNC_TIMEOUT: time::Duration = time::Duration::from_secs(1);\n\n#[derive(Debug, Clone, Copy)]\nstruct DevBindMount {\n    path: &'static str,\n    is_dir: bool,\n}\n\nconst DEV_BIND_MOUNTS: &[DevBindMount] = &[\n    DevBindMount {\n        path: \"/dev/null\",\n        is_dir: false,\n    },\n    DevBindMount {\n        path: \"/dev/zero\",\n        is_dir: false,\n    },\n    DevBindMount {\n        path: \"/dev/full\",\n        is_dir: false,\n    },\n    DevBindMount {\n        path: \"/dev/tty\",\n        is_dir: false,\n    },\n    DevBindMount {\n        path: \"/dev/random\",\n        is_dir: false,\n    },\n    DevBindMount {\n        path: \"/dev/urandom\",\n        is_dir: false,\n    },\n    DevBindMount {\n        path: \"/dev/dri\",\n        is_dir: true,\n    },\n    DevBindMount {\n        path: \"/dev/fuse\",\n        is_dir: false,\n    },\n    // Needed for NVIDIA proprietary drivers.\n    DevBindMount {\n        path: \"/dev/nvidiactl\",\n        is_dir: false,\n    },\n    DevBindMount {\n        path: \"/dev/nvidia0\",\n        is_dir: false,\n    },\n    DevBindMount {\n        path: \"/dev/nvidia-modeset\",\n        is_dir: false,\n    },\n    DevBindMount {\n        path: \"/dev/nvidia-uvm\",\n        is_dir: false,\n    },\n    DevBindMount {\n        path: \"/dev/nvidia-uvm-tools\",\n        is_dir: false,\n    },\n    DevBindMount {\n        path: \"/dev/nvidia-caps\",\n        is_dir: true,\n    },\n];\n\n#[cfg(debug_assertions)]\nstruct UnbufferedStderr<'a>(BorrowedFd<'a>);\n\n#[cfg(debug_assertions)]\nimpl std::fmt::Write for UnbufferedStderr<'_> {\n    fn write_str(&mut self, s: &str) -> std::fmt::Result {\n        write(self.0, s.as_bytes()).map_err(|_| std::fmt::Error)?;\n        Ok(())\n    }\n}\n\n#[cfg(debug_assertions)]\nmacro_rules! preexec_debug {\n    ($($arg:tt)+) => {\n        #[allow(unused_imports)]\n        use std::fmt::Write as _;\n\n        let mut stderr = UnbufferedStderr(rustix::stdio::stderr());\n        let _ = std::write!(stderr, \"[PRE-EXEC] \");\n        let _ = std::writeln!(stderr, $($arg)*);\n    }\n}\n\n#[cfg(not(debug_assertions))]\nmacro_rules! preexec_debug {\n    ($($arg:tt)*) => {};\n}\n\nunsafe fn _must<T>(_op: &str, res: rustix::io::Result<T>) -> T {\n    loop {\n        match res {\n            Ok(v) => return v,\n            Err(Errno::INTR) => continue,\n            Err(_e) => {\n                #[cfg(debug_assertions)]\n                {\n                    use std::fmt::Write as _;\n                    let mut stderr = UnbufferedStderr(rustix::stdio::stderr());\n\n                    let _ = std::writeln!(stderr, \"[PRE-EXEC] {_op}: {_e}\");\n                    let _ = std::writeln!(stderr);\n                }\n\n                libc::_exit(1);\n            }\n        }\n    }\n}\n\nmacro_rules! must {\n    ($n:ident( $($args:tt)* )) => {{\n        let res = $n( $($args)* );\n        _must(stringify!($n), res)\n    }};\n}\n\ntype SetupHook = Box<dyn FnOnce(&mut super::ContainerHandle) -> anyhow::Result<()>>;\n\n/// A lightweight linux container. Currently we use the following namespaces:\n///  - A mount namespace, to mount tmpfs on /dev, /tmp, /run, etc, and\n///    potentially to isolate home as well. We don't pivot_root/chroot.\n///  - A PID namespace, so that processes get cleaned up when a session ends.\n///    Note that we currently don't use a \"stub init\" process to handle\n///    reparenting or reaping, since we don't expect to spawn lots of grandchild\n///    processes.\n///  - A user namespace, to enable the above. We just map the current user to\n///    itself.\n///\n/// IMPORTANT: This container is not a secure container. Under NO CIRCUMSTANCES\n/// should you use it to run untrusted code. Any security benefits are purely\n/// incidental; this is more about containing mess (I'm looking at you, Steam).\npub struct Container {\n    child_cmd: Command,\n\n    // Note: we don't use Command::env or Command::env_clear, because those\n    // cause Command::exec to allocate, which we don't want to do after forking.\n    envs: Vec<CString>,\n\n    tmp_stderr: Option<OwnedFd>,\n\n    extern_home_path: Option<PathBuf>,\n    intern_home_path: PathBuf,\n    clear_home: bool,\n\n    intern_run_path: PathBuf,\n    extern_run_path: PathBuf,\n\n    additional_bind_mounts: Vec<(PathBuf, PathBuf)>,\n    internal_bind_mounts: Vec<(PathBuf, PathBuf, bool)>,\n\n    // Stores a closure to run before unfreeze.\n    setup_hooks: Vec<SetupHook>,\n\n    uid: Uid,\n    gid: Gid,\n}\n\nimpl Container {\n    pub fn new(\n        mut args: Vec<OsString>,\n        home_isolation_mode: HomeIsolationMode,\n    ) -> anyhow::Result<Self> {\n        let exe_path = validate_exe(args.remove(0))?;\n        let mut envs = Vec::new();\n\n        for key in [\n            \"PATH\",\n            \"USER\",\n            \"SHELL\",\n            \"EDITOR\",\n            \"LANG\",\n            \"LC_ALL\",\n            \"LC_ADDRESS\",\n            \"LC_IDENTIFICATION\",\n            \"LC_MEASUREMENT\",\n            \"LC_MONETARY\",\n            \"LC_NAME\",\n            \"LC_NUMERIC\",\n            \"LC_PAPER\",\n            \"LC_TELEPHONE\",\n            \"LC_TIME\",\n        ] {\n            if let Some(value) = std::env::var_os(key) {\n                envs.push(make_putenv(key, value));\n            }\n        }\n\n        let uid = getuid();\n        let gid = getgid();\n\n        let intern_run_path: OsString = format!(\"/run/user/{}\", uid.as_raw()).try_into().unwrap();\n        envs.push(make_putenv(\"XDG_RUNTIME_DIR\", intern_run_path.clone()));\n\n        let extern_run_path = std::env::temp_dir().join(format!(\n            \"mm.{}\",\n            Alphanumeric.sample_string(&mut rand::thread_rng(), 16),\n        ));\n        std::fs::create_dir_all(&extern_run_path)?;\n\n        let intern_home_path: OsString = std::env::var_os(\"HOME\").unwrap_or(\"/home/mm\".into());\n        envs.push(make_putenv(\"HOME\", intern_home_path.clone()));\n\n        debug!(home_mode = ?home_isolation_mode, \"using home mode\");\n        let (extern_home_path, clear_home) = match home_isolation_mode {\n            HomeIsolationMode::Unisolated => (None, false),\n            HomeIsolationMode::Tmpfs => (None, true),\n            HomeIsolationMode::Permanent(path) => {\n                std::fs::create_dir_all(&path).context(format!(\n                    \"failed to create home directory {}\",\n                    path.display()\n                ))?;\n\n                (Some(path), true)\n            }\n        };\n\n        if clear_home && exe_path.starts_with(&intern_home_path) {\n            bail!(\n                \"command {:?} will be unavailable in container (set isolate_home = false to avoid \\\n                 this error)\",\n                exe_path.display(),\n            );\n        }\n\n        let mut child_cmd = Command::new(exe_path);\n        child_cmd.current_dir(\"/\");\n        child_cmd.args(args);\n\n        Ok(Self {\n            child_cmd,\n            envs,\n            tmp_stderr: None,\n\n            intern_home_path: intern_home_path.into(),\n            extern_home_path,\n            clear_home,\n            intern_run_path: intern_run_path.into(),\n            extern_run_path,\n\n            additional_bind_mounts: Vec::new(),\n            internal_bind_mounts: Vec::new(),\n\n            setup_hooks: Vec::new(),\n\n            uid,\n            gid,\n        })\n    }\n\n    pub fn intern_run_path(&self) -> &Path {\n        &self.intern_run_path\n    }\n\n    pub fn extern_run_path(&self) -> &Path {\n        &self.extern_run_path\n    }\n\n    pub fn bind_mount(&mut self, src: impl AsRef<Path>, dst: impl AsRef<Path>) {\n        self.additional_bind_mounts\n            .push((src.as_ref().to_owned(), dst.as_ref().to_owned()));\n    }\n\n    pub fn internal_bind_mount(&mut self, src: impl AsRef<Path>, dst: impl AsRef<Path>) {\n        self.internal_bind_mounts\n            .push((src.as_ref().to_owned(), dst.as_ref().to_owned(), true));\n    }\n\n    pub fn setup_hook(\n        &mut self,\n        f: impl FnOnce(&mut super::ContainerHandle) -> anyhow::Result<()> + 'static,\n    ) {\n        self.setup_hooks.push(Box::new(f))\n    }\n\n    pub unsafe fn pre_exec(&mut self, f: impl FnMut() -> io::Result<()> + Send + Sync + 'static) {\n        self.child_cmd.pre_exec(f);\n    }\n\n    pub fn set_env<K, V>(&mut self, key: K, val: V)\n    where\n        K: AsRef<OsStr>,\n        V: AsRef<OsStr>,\n    {\n        self.envs.push(make_putenv(key, val))\n    }\n\n    pub fn set_stdout<T: AsFd>(&mut self, stdio: T) -> anyhow::Result<()> {\n        let stdout = fcntl_dupfd_cloexec(&stdio, 0)?;\n        self.child_cmd.stdout(stdout);\n\n        Ok(())\n    }\n\n    pub fn set_stderr<T: AsFd>(&mut self, stdio: T) -> anyhow::Result<()> {\n        let stderr = fcntl_dupfd_cloexec(&stdio, 0)?;\n        let tmp_stderr = fcntl_dupfd_cloexec(&stdio, 0)?;\n\n        self.child_cmd.stderr(stderr);\n        self.tmp_stderr = Some(tmp_stderr);\n\n        Ok(())\n    }\n\n    pub fn spawn(mut self) -> anyhow::Result<super::ContainerHandle> {\n        // Prepare bind mounts.\n        let mut mounts = DEV_BIND_MOUNTS\n            .iter()\n            .map(|m| {\n                Ok((\n                    PathBuf::from_str(m.path).unwrap(),\n                    PathBuf::from_str(m.path).unwrap(),\n                    m.is_dir,\n                    None,\n                ))\n            })\n            .collect::<anyhow::Result<Vec<_>>>()?;\n\n        for (src, dst) in self.additional_bind_mounts.drain(..) {\n            let is_dir = std::fs::metadata(&src)\n                .context(\"failed to stat bind mount\")?\n                .is_dir();\n\n            mounts.push((src, dst, is_dir, None))\n        }\n\n        let mut child_pidfd = -1;\n        let mut args = clone3::Clone3::default();\n        args.flag_pidfd(&mut child_pidfd)\n            .exit_signal(libc::SIGCHLD as _)\n            .flag_newuser()\n            .flag_newns()\n            .flag_newpid();\n\n        debug!(cmd = ?self.child_cmd, \"spawning child process\");\n\n        let (barrier, child_barrier) = ipc::EventfdBarrier::new()?;\n\n        // clone off a child process, which does some setup before execing the\n        // app.\n        let child_stderr = self.tmp_stderr.take();\n        let child_pid = match unsafe { args.call().context(\"clone3\")? } {\n            0 => unsafe {\n                self.child_after_fork(child_stderr.as_ref(), child_barrier, &mut mounts)\n            },\n            pid => pid,\n        };\n\n        let child_pidfd = unsafe { OwnedFd::from_raw_fd(child_pidfd) };\n\n        set_uid_map(child_pid, self.uid, self.gid).context(\"failed to set uid/gid map\")?;\n\n        // Wait for the child to signal that it's ready.\n        barrier\n            .sync(SYNC_TIMEOUT)\n            .context(\"timed out waiting for forked child (phase 1)\")?;\n\n        let mut handle = super::ContainerHandle {\n            pid: Pid::from_raw(child_pid).unwrap(),\n            pidfd: child_pidfd,\n            run_path: self.extern_run_path,\n        };\n\n        for hook in self.setup_hooks.drain(..) {\n            hook(&mut handle)?;\n        }\n\n        // Unfreeze the child.\n        barrier\n            .sync(SYNC_TIMEOUT)\n            .context(\"timed out waiting for forked child (phase 2)\")?;\n\n        Ok(handle)\n    }\n\n    // Signal safety dictates what we can do here, and it's not a lot. The main\n    // thing we avoid is allocations.\n    unsafe fn child_after_fork<FD>(\n        mut self,\n        stderr: Option<FD>,\n        barrier: ipc::EventfdBarrier,\n        bind_mounts: &mut [(PathBuf, PathBuf, bool, Option<OwnedFd>)],\n    ) -> !\n    where\n        FD: AsFd,\n    {\n        // See above for how logging is implemented to avoid the possibility of\n        // allocation.\n        if let Some(fd) = &stderr {\n            let _ = rustix::stdio::dup2_stderr(fd.as_fd()); // Replace stderr.\n        }\n\n        // Tell the kernel to SIGKILL us when our parent (mmserver) dies. this\n        // is particularly important because we're PID 1, so the kernel won't\n        // kill on SIGINT/SIGQUIT/etc if the child process doesn't have a signal\n        // handler set up for them.\n        must!(set_parent_process_death_signal(Some(Signal::KILL)));\n\n        preexec_debug!(\"starting container setup\");\n\n        // Mount /proc first.\n        must!(mount_fs(\n            c\"proc\",\n            c\"/proc\",\n            MountAttrFlags::MOUNT_ATTR_NOEXEC\n                | MountAttrFlags::MOUNT_ATTR_NOSUID\n                | MountAttrFlags::MOUNT_ATTR_NODEV,\n            &[],\n        ));\n\n        // Collect detached mounts we want to bind-mount later. We can't\n        // allocate a vec, so we fill in the Options in the passed-in vec\n        // instead.\n        preexec_debug!(\"collecting detached bind mounts\");\n        for (src_path, _, _, ref mut device_fd) in bind_mounts.iter_mut() {\n            if src_path.exists() {\n                let fd = must!(detach_mount(src_path,));\n                *device_fd = Some(fd)\n            }\n        }\n\n        // Grab a detached mount for the temporary dir we're going to mount as\n        // XDG_RUNTIME_DIR.\n        let detached_run_fd = must!(detach_mount(&self.extern_run_path,));\n\n        // Grab a detached mount for home, if we're using one.\n        let detached_home = self\n            .extern_home_path\n            .as_ref()\n            .map(|p| must!(detach_mount(p)));\n\n        // Mount /dev and a few other filesystems.\n        must!(mount_fs(\n            c\"tmpfs\",\n            c\"/dev\",\n            MountAttrFlags::MOUNT_ATTR_NOEXEC | MountAttrFlags::MOUNT_ATTR_STRICTATIME,\n            &[(c\"mode\", c\"0755\")],\n        ));\n\n        must!(mount_fs(\n            c\"tmpfs\",\n            c\"/dev/shm\",\n            MountAttrFlags::MOUNT_ATTR_NOEXEC\n                | MountAttrFlags::MOUNT_ATTR_NOSUID\n                | MountAttrFlags::MOUNT_ATTR_NODEV,\n            &[(c\"mode\", c\"1777\"), (c\"size\", c\"512m\")],\n        ));\n\n        // TODO: this errors with EPERM.\n        // must!(mount_fs(\n        //     \"mqueue\",\n        //     \"/dev/mqueue\",\n        //     MountAttrFlags::MOUNT_ATTR_NOEXEC\n        //         | MountAttrFlags::MOUNT_ATTR_NOSUID\n        //         | MountAttrFlags::MOUNT_ATTR_NODEV,\n        //     &[],\n        // ));\n\n        must!(mount_fs(\n            c\"devpts\",\n            c\"/dev/pts\",\n            MountAttrFlags::MOUNT_ATTR_NOEXEC | MountAttrFlags::MOUNT_ATTR_NOSUID,\n            &[\n                (c\"newinstance\", c\"\"),\n                (c\"ptmxmode\", c\"0666\"),\n                (c\"mode\", c\"0620\"),\n                // TODO: do we need to add a tty group?\n                // (\"gid\", \"5\"),\n            ],\n        ));\n\n        // Symlink /dev/fd -> /proc/self/fd, etc.\n        must!(symlinkat(c\"/proc/self/fd\", AT_FDCWD, c\"/dev/fd\"));\n        must!(symlinkat(c\"/proc/self/fd/0\", AT_FDCWD, c\"/dev/stdin\"));\n        must!(symlinkat(c\"/proc/self/fd/1\", AT_FDCWD, c\"/dev/stdout\"));\n        must!(symlinkat(c\"/proc/self/fd/2\", AT_FDCWD, c\"/dev/stderr\"));\n\n        // Prepare /dev/input.\n        must!(mkdirat(\n            AT_FDCWD,\n            \"/dev/input\",\n            Mode::from_bits(0o755).unwrap()\n        ));\n\n        must!(mount_fs(\n            c\"tmpfs\",\n            c\"/run/user\",\n            MountAttrFlags::MOUNT_ATTR_NOSUID\n                | MountAttrFlags::MOUNT_ATTR_NODEV\n                | MountAttrFlags::MOUNT_ATTR_RELATIME,\n            &[(c\"mode\", c\"0700\"), (c\"size\", c\"1g\")],\n        ));\n\n        must!(mount_fs(\n            c\"tmpfs\",\n            c\"/tmp\",\n            MountAttrFlags::MOUNT_ATTR_NOSUID | MountAttrFlags::MOUNT_ATTR_NOATIME,\n            &[(c\"mode\", c\"0777\"), (c\"size\", c\"1g\")],\n        ));\n\n        if self.clear_home {\n            must!(mount_fs(\n                c\"tmpfs\",\n                c\"/home\",\n                MountAttrFlags::MOUNT_ATTR_NOSUID\n                    | MountAttrFlags::MOUNT_ATTR_NOEXEC\n                    | MountAttrFlags::MOUNT_ATTR_NOATIME,\n                &[(c\"mode\", c\"0777\"), (c\"size\", c\"1g\")],\n            ));\n\n            must!(mkdirat(\n                AT_FDCWD,\n                &self.intern_home_path,\n                Mode::from_bits(0o700).unwrap()\n            ));\n        }\n\n        // Mount XDG_RUNTIME_DIR.\n        preexec_debug!(\n            \"bind-mounting {} to {}\",\n            self.extern_run_path.display(),\n            self.intern_run_path.display()\n        );\n\n        must!(mkdirat(AT_FDCWD, &self.intern_run_path, Mode::empty()));\n        must!(reattach_mount(detached_run_fd, &self.intern_run_path));\n\n        // Mount HOME.\n        if let Some(fd) = detached_home {\n            preexec_debug!(\n                \"bind-mounting {} to {}\",\n                self.extern_home_path.as_ref().unwrap().display(),\n                self.intern_home_path.display()\n            );\n\n            must!(reattach_mount(fd, &self.intern_home_path));\n        }\n\n        // Attach detached bind mounts, now that the filesystem is prepared.\n        for (_src_path, dst_path, is_dir, mount_fd) in bind_mounts {\n            if let Some(detached_mount_fd) = mount_fd.take() {\n                preexec_debug!(\n                    \"bind-mounting {} (outside) to {} (inside)\",\n                    _src_path.display(),\n                    dst_path.display()\n                );\n\n                if *is_dir {\n                    let _ = mkdirat(AT_FDCWD, &*dst_path, Mode::empty());\n                } else {\n                    must!(touch(&*dst_path, Mode::empty()));\n                }\n\n                must!(reattach_mount(detached_mount_fd, dst_path));\n            }\n        }\n\n        preexec_debug!(\"finished initial setup, waiting for mmserver\");\n\n        // Sync with mmserver.\n        must!(sync_barrier(&barrier));\n        must!(sync_barrier(&barrier));\n\n        // Finally, internal bind mounts. We do this after syncing with mmserver\n        // in case mmserver wants us to bind-mount something it just mounted.\n        for (src_path, dst_path, is_dir) in &self.internal_bind_mounts {\n            preexec_debug!(\n                \"bind-mounting {} to {}\",\n                src_path.display(),\n                dst_path.display()\n            );\n\n            let fd = must!(detach_mount(src_path));\n\n            if *is_dir {\n                let _ = mkdirat(AT_FDCWD, dst_path, Mode::empty());\n            } else {\n                must!(touch(dst_path, Mode::empty()));\n            }\n\n            must!(reattach_mount(fd, dst_path));\n        }\n\n        // TODO: Install seccomp handlers here.\n\n        // We don't trust std::os::Command's env handling, because sometimes\n        // it allocates.\n        libc::clearenv();\n        for v in &mut self.envs {\n            libc::putenv(v.as_ptr() as *mut _);\n        }\n\n        // If successful, this never returns.\n        let _e = self.child_cmd.exec();\n\n        preexec_debug!(\"execve failed: {_e}\");\n        libc::_exit(1);\n    }\n}\n\nfn set_uid_map(child_pid: i32, uid: rustix::fs::Uid, gid: rustix::fs::Gid) -> anyhow::Result<()> {\n    let uid = uid.as_raw();\n    let gid = gid.as_raw();\n\n    write(\n        OpenOptions::new()\n            .write(true)\n            .open(format!(\"/proc/{}/setgroups\", child_pid))?,\n        b\"deny\",\n    )\n    .context(\"failed to write setgroups=deny\")?;\n\n    write(\n        OpenOptions::new()\n            .write(true)\n            .open(format!(\"/proc/{}/uid_map\", child_pid))\n            .context(\"open failed\")?,\n        format!(\"{uid} {uid} 1\\n\").as_bytes(),\n    )\n    .context(\"failed to write uid_map\")?;\n\n    write(\n        OpenOptions::new()\n            .write(true)\n            .open(format!(\"/proc/{}/gid_map\", child_pid))\n            .context(\"open failed\")?,\n        format!(\"{gid} {gid} 1\\n\").as_bytes(),\n    )\n    .context(\"failed to write gid_map\")?;\n\n    Ok(())\n}\n\nfn run_in_container<F>(ns_pidfd: impl AsFd, stderr: Option<BorrowedFd<'_>>, f: F) -> io::Result<()>\nwhere\n    F: FnOnce() -> io::Result<()>,\n{\n    let child_pid = unsafe { libc::fork() };\n    if child_pid == -1 {\n        return Err(io::Error::last_os_error());\n    } else if child_pid == 0 {\n        unsafe {\n            if let Some(fd) = &stderr {\n                let _ = rustix::stdio::dup2_stderr(fd.as_fd()); // Replace stderr.\n            }\n\n            must!(set_parent_process_death_signal(Some(Signal::KILL)));\n\n            must!(move_into_link_name_space(\n                ns_pidfd.as_fd(),\n                Some(LinkNameSpaceType::User)\n            ));\n\n            must!(move_into_link_name_space(\n                ns_pidfd.as_fd(),\n                Some(LinkNameSpaceType::Mount)\n            ));\n\n            if let Err(_e) = f() {\n                preexec_debug!(\"run_in_container: {_e}\");\n                libc::_exit(1);\n            }\n\n            libc::_exit(0);\n        }\n    }\n\n    loop {\n        match waitpid(\n            Some(Pid::from_raw(child_pid).unwrap()),\n            WaitOptions::empty(),\n        ) {\n            Ok(st) => match st {\n                Some((_, st)) if st.as_raw() == 0 => return Ok(()),\n                _ => return Err(io::Error::other(\"forked process exited with error\")),\n            },\n            Err(Errno::INTR) => continue,\n            Err(e) => return Err(e.into()),\n        }\n    }\n}\n\npub(super) fn fs_mount_into(\n    ns_pidfd: impl AsFd,\n    dst: impl AsRef<Path>,\n    fsname: String,\n    attr: MountAttrFlags,\n    options: &[(&CStr, &CStr)],\n) -> io::Result<()> {\n    debug!(\"mounting {fsname} to {}\", dst.as_ref().display());\n\n    let fsname = CString::new(fsname).unwrap();\n    let dst = CString::new(dst.as_ref().as_os_str().as_encoded_bytes()).unwrap();\n\n    run_in_container(ns_pidfd, None, move || {\n        mount_fs(&fsname, &dst, attr, options)?;\n        Ok(())\n    })?;\n\n    Ok(())\n}\n\npub(super) fn fuse_mount_into(\n    ns_pidfd: impl AsFd,\n    dst: impl AsRef<Path>,\n    fsname: String,\n    st_mode: u32,\n) -> io::Result<OwnedFd> {\n    debug!(\"mounting {fsname} to {}\", dst.as_ref().display());\n\n    let (fd_tx, fd_rx) = ipc::fd_oneshot()?;\n    let uid = CString::new(format!(\"{}\", getuid().as_raw())).unwrap();\n    let gid = CString::new(format!(\"{}\", getgid().as_raw())).unwrap();\n    let rootmode = CString::new(format!(\"{st_mode:o}\")).unwrap();\n\n    let is_dir = FileType::from_raw_mode(st_mode) == FileType::Directory;\n\n    run_in_container(ns_pidfd, None, move || {\n        let fuse_device_fd = openat(\n            AT_FDCWD,\n            \"/dev/fuse\",\n            OFlags::RDWR | OFlags::CLOEXEC,\n            Mode::empty(),\n        )?;\n\n        // Send the fd back to mmserver.\n        fd_tx.send_timeout(fuse_device_fd.try_clone()?, SYNC_TIMEOUT)?;\n\n        // format! allocates.\n        let mut fd_buf = [0_u8; 32];\n        let fd_str = {\n            use std::io::Write;\n            write!(\n                &mut io::Cursor::new(&mut fd_buf[..]),\n                \"{}\",\n                fuse_device_fd.as_raw_fd()\n            )?;\n\n            CStr::from_bytes_until_nul(&fd_buf[..])\n                .map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, \"invalid FD\"))?\n        };\n\n        if is_dir {\n            let _ = mkdirat(AT_FDCWD, dst.as_ref(), Mode::from_raw_mode(st_mode));\n        } else {\n            let _ = touch(dst.as_ref(), Mode::from_raw_mode(st_mode));\n        }\n\n        let fsfd = fsopen(c\"fuse\", FsOpenFlags::FSOPEN_CLOEXEC)?;\n        fsconfig_set_string(fsfd.as_fd(), c\"fd\", fd_str)?;\n        fsconfig_set_string(fsfd.as_fd(), c\"user_id\", &uid)?;\n        fsconfig_set_string(fsfd.as_fd(), c\"group_id\", &gid)?;\n        fsconfig_set_string(fsfd.as_fd(), c\"rootmode\", &rootmode)?;\n        fsconfig_create(fsfd.as_fd())?;\n\n        let mount_fd = fsmount(\n            fsfd.as_fd(),\n            FsMountFlags::FSMOUNT_CLOEXEC,\n            MountAttrFlags::MOUNT_ATTR_NOEXEC\n                | MountAttrFlags::MOUNT_ATTR_NOSUID\n                | MountAttrFlags::MOUNT_ATTR_NODEV,\n        )?;\n\n        move_mount(\n            mount_fd.as_fd(),\n            c\"\",\n            AT_FDCWD,\n            dst.as_ref(),\n            MoveMountFlags::MOVE_MOUNT_F_EMPTY_PATH | MoveMountFlags::MOVE_MOUNT_T_SYMLINKS,\n        )?;\n\n        Ok(())\n    })?;\n\n    fd_rx.recv_timeout(SYNC_TIMEOUT)\n}\n\nfn touch(path: impl AsRef<Path>, mode: impl Into<Mode>) -> rustix::io::Result<()> {\n    let _ = openat(\n        AT_FDCWD,\n        path.as_ref(),\n        OFlags::WRONLY | OFlags::CREATE | OFlags::CLOEXEC,\n        mode.into(),\n    )?;\n\n    Ok(())\n}\n\nfn detach_mount(path: impl AsRef<Path>) -> rustix::io::Result<OwnedFd> {\n    open_tree(\n        AT_FDCWD,\n        path.as_ref(),\n        OpenTreeFlags::OPEN_TREE_CLONE\n            | OpenTreeFlags::AT_RECURSIVE\n            | OpenTreeFlags::OPEN_TREE_CLOEXEC,\n    )\n}\n\nfn reattach_mount(fd: OwnedFd, path: impl AsRef<Path>) -> rustix::io::Result<()> {\n    move_mount(\n        fd.as_fd(),\n        \"\",\n        AT_FDCWD,\n        path.as_ref(),\n        MoveMountFlags::MOVE_MOUNT_F_EMPTY_PATH | MoveMountFlags::MOVE_MOUNT_T_SYMLINKS,\n    )\n}\n\nfn mount_fs(\n    fstype: &CStr,\n    dst: &CStr,\n    options: MountAttrFlags,\n    configs: &[(&CStr, &CStr)],\n) -> rustix::io::Result<()> {\n    preexec_debug!(\"mounting {fstype:?} on {dst:?}\");\n\n    let fsfd = fsopen(fstype, FsOpenFlags::FSOPEN_CLOEXEC)?;\n\n    for (k, v) in configs {\n        if v.is_empty() {\n            fsconfig_set_flag(fsfd.as_fd(), *k)?;\n        } else {\n            fsconfig_set_string(fsfd.as_fd(), *k, *v)?;\n        }\n    }\n\n    fsconfig_create(fsfd.as_fd())?;\n    let mount_fd = fsmount(fsfd.as_fd(), FsMountFlags::FSMOUNT_CLOEXEC, options)?;\n\n    let _ = mkdirat(AT_FDCWD, dst, Mode::empty());\n    move_mount(\n        mount_fd.as_fd(),\n        c\"\",\n        AT_FDCWD,\n        dst,\n        MoveMountFlags::MOVE_MOUNT_F_EMPTY_PATH | MoveMountFlags::MOVE_MOUNT_T_SYMLINKS,\n    )?;\n\n    Ok(())\n}\n\n// Wrapped in a function for compatibility with the must! macro.\nfn sync_barrier(barrier: &ipc::EventfdBarrier) -> rustix::io::Result<()> {\n    barrier.sync(SYNC_TIMEOUT)\n}\n\n/// Generates a CString in the format key=value, for putenv(3).\nfn make_putenv(k: impl AsRef<OsStr>, v: impl AsRef<OsStr>) -> CString {\n    CString::new(format!(\n        \"{}={}\",\n        k.as_ref().to_str().unwrap(),\n        v.as_ref().to_str().unwrap()\n    ))\n    .unwrap()\n}\n\n/// Validates an executable path, and returns the canonical version.\nfn validate_exe(p: impl AsRef<Path>) -> anyhow::Result<PathBuf> {\n    let p = p.as_ref();\n    if p.components().count() == 1 {\n        return find_executable_in_path(p)\n            .ok_or(anyhow!(\"command {:?} is not in PATH\", p.display()));\n    }\n\n    if !p.is_absolute() {\n        bail!(\"path {:?} must be absolute\", p.display());\n    } else if !p.exists() {\n        bail!(\"path {:?} does not exist\", p.display());\n    } else if !is_executable(p)? {\n        bail!(\"path {:?} is not executable\", p.display());\n    }\n\n    match p.canonicalize() {\n        Ok(p) => Ok(p),\n        Err(_) => bail!(\"invalid path: {:?}\", p.display()),\n    }\n}\n\n#[cfg(test)]\nmod test {\n    use std::{fs::File, io::Read as _};\n\n    use rustix::pipe::{pipe_with, PipeFlags};\n\n    use super::validate_exe;\n    use crate::{config::HomeIsolationMode, container::Container};\n\n    #[test_log::test]\n    fn echo() -> anyhow::Result<()> {\n        let mut container =\n            Container::new(vec![\"echo\".into(), \"done\".into()], HomeIsolationMode::Tmpfs)?;\n        let (pipe_rx, pipe_tx) = pipe_with(PipeFlags::CLOEXEC)?;\n        container.set_stdout(pipe_tx)?;\n\n        let mut child = container.spawn()?;\n        child.wait()?;\n\n        let mut buf = String::new();\n        File::from(pipe_rx).read_to_string(&mut buf)?;\n\n        pretty_assertions::assert_eq!(buf, \"done\\n\");\n        Ok(())\n    }\n\n    #[test]\n    fn test_validate_exe() {\n        let cat = pathsearch::find_executable_in_path(\"cat\").unwrap();\n        assert_eq!(cat, validate_exe(\"cat\").unwrap());\n\n        assert_eq!(\n            validate_exe(\"nonexistent\").unwrap_err().to_string(),\n            \"command \\\"nonexistent\\\" is not in PATH\",\n        );\n\n        assert_eq!(\n            validate_exe(\"foo/../bar\").unwrap_err().to_string(),\n            \"path \\\"foo/../bar\\\" must be absolute\",\n        );\n\n        assert_eq!(\n            validate_exe(\"/nonexistent\").unwrap_err().to_string(),\n            \"path \\\"/nonexistent\\\" does not exist\",\n        );\n\n        let f = mktemp::Temp::new_file().unwrap();\n        assert_eq!(\n            validate_exe(&f).unwrap_err().to_string(),\n            format!(\"path {:?} is not executable\", f.as_path().display())\n        )\n    }\n}\n"
  },
  {
    "path": "mm-server/src/container.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    ffi::CStr,\n    os::fd::{AsFd, BorrowedFd, OwnedFd},\n    path::{Path, PathBuf},\n};\n\nuse anyhow::{bail, Context as _};\nuse rustix::{\n    mount::MountAttrFlags,\n    process::{Pid, Signal, WaitId, WaitIdOptions},\n};\nuse tracing::{debug, info};\n\nmod ipc;\nmod runtime;\npub use runtime::Container;\n\n/// A handle to a running container.\npub struct ContainerHandle {\n    pid: Pid,\n    pidfd: OwnedFd,\n\n    run_path: PathBuf,\n}\n\nimpl AsFd for ContainerHandle {\n    fn as_fd(&self) -> BorrowedFd<'_> {\n        self.pidfd()\n    }\n}\n\nimpl ContainerHandle {\n    pub fn pid(&self) -> Pid {\n        self.pid\n    }\n\n    pub(crate) fn pidfd(&self) -> BorrowedFd<'_> {\n        self.pidfd.as_fd()\n    }\n\n    pub fn signal(&mut self, signal: Signal) -> anyhow::Result<()> {\n        debug!(?signal, pid = self.pid.as_raw_nonzero(), \"signaling child\");\n\n        rustix::process::pidfd_send_signal(self, signal).context(\"pidfd_send_signal\")?;\n        Ok(())\n    }\n\n    pub fn wait(&mut self) -> anyhow::Result<()> {\n        let exit_status =\n            rustix::process::waitid(WaitId::PidFd(self.as_fd()), WaitIdOptions::EXITED)\n                .context(\"waitid\")?\n                .and_then(|x| x.exit_status())\n                .unwrap_or_default();\n\n        info!(exit_status, \"child process exited\");\n        if exit_status != 0 {\n            bail!(\"child process exited with status: {exit_status}\");\n        }\n\n        Ok(())\n    }\n\n    /// Mounts a named filesystem inside the container at the given path.\n    pub fn fs_mount<S>(\n        &self,\n        dst: impl AsRef<Path>,\n        fstype: impl AsRef<str>,\n        attr: MountAttrFlags,\n        options: impl AsRef<[(S, S)]>,\n    ) -> anyhow::Result<()>\n    where\n        S: AsRef<CStr>,\n    {\n        let options = options\n            .as_ref()\n            .iter()\n            .map(|(k, v)| (k.as_ref(), v.as_ref()))\n            .collect::<Vec<_>>();\n\n        runtime::fs_mount_into(&self.pidfd, dst, fstype.as_ref().to_owned(), attr, &options)?;\n        Ok(())\n    }\n\n    /// Opens /dev/fuse inside the container, mounts it to the given path,\n    /// and returns the FD for use in a FUSE daemon.\n    pub fn fuse_mount(\n        &self,\n        dst: impl AsRef<Path>,\n        fsname: impl AsRef<str>,\n        st_mode: u32,\n    ) -> anyhow::Result<OwnedFd> {\n        let fd = runtime::fuse_mount_into(&self.pidfd, &dst, fsname.as_ref().to_owned(), st_mode)?;\n\n        Ok(fd)\n    }\n}\n\nimpl Drop for ContainerHandle {\n    fn drop(&mut self) {\n        let _ = std::fs::remove_dir_all(&self.run_path);\n    }\n}\n"
  },
  {
    "path": "mm-server/src/encoder/dpb.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{collections::BTreeMap, sync::Arc};\n\nuse ash::vk;\n\nuse crate::vulkan::*;\n\n#[derive(Debug, Copy, Clone)]\npub struct DpbPicture {\n    pub image: vk::Image,\n    pub picture_resource_info: vk::VideoPictureResourceInfoKHR<'static>,\n    pub index: usize,\n    pub currently_active: bool,\n\n    free: bool,\n}\n\npub struct DpbPool {\n    _store: Vec<VkImage>,\n    slots: Vec<DpbPicture>,\n    ids: BTreeMap<u32, usize>,\n}\n\nimpl DpbPool {\n    /// Creates a DPB pool using one layer of a shared image for each picture.\n    /// Guaranteed to be supported, where distinct images are not, but otherwise\n    /// unoptimal and awkward.\n    pub fn new(\n        vk: Arc<VkContext>,\n        format: vk::Format,\n        width: u32,\n        height: u32,\n        profile: &mut vk::VideoProfileInfoKHR,\n        size: usize,\n    ) -> anyhow::Result<Self> {\n        let image = create_dpb_image(vk.clone(), profile, format, width, height, size as u32)?;\n\n        // Each array layer of the image is used as a separate slot, with a\n        // one-to-one correspondence between the layer index and the slot index.\n        let mut slots = Vec::with_capacity(size);\n        for i in 0..size {\n            slots.push(DpbPicture {\n                image: image.image,\n                picture_resource_info: vk::VideoPictureResourceInfoKHR::default()\n                    .image_view_binding(image.view)\n                    .coded_extent(vk::Extent2D { width, height })\n                    .base_array_layer(i as u32),\n                index: i,\n                currently_active: false,\n                free: true,\n            });\n        }\n\n        Ok(Self {\n            _store: vec![image],\n            slots,\n            ids: BTreeMap::new(),\n        })\n    }\n\n    /// Creates a dpb pool using separate images for each slot.\n    pub fn new_separate_images(\n        vk: Arc<VkContext>,\n        format: vk::Format,\n        width: u32,\n        height: u32,\n        profile: &mut vk::VideoProfileInfoKHR,\n        size: usize,\n    ) -> anyhow::Result<Self> {\n        let mut store = Vec::with_capacity(size);\n        let mut slots = Vec::with_capacity(size);\n        for i in 0..size {\n            let image = create_dpb_image(vk.clone(), profile, format, width, height, 1)?;\n\n            slots.push(DpbPicture {\n                image: image.image,\n                picture_resource_info: vk::VideoPictureResourceInfoKHR::default()\n                    .image_view_binding(image.view)\n                    .coded_extent(vk::Extent2D { width, height })\n                    .base_array_layer(0),\n                index: i,\n                currently_active: false,\n                free: true,\n            });\n\n            store.push(image);\n        }\n\n        Ok(Self {\n            _store: store,\n            slots,\n            ids: BTreeMap::new(),\n        })\n    }\n\n    /// Returns the index of a free slot and the backing picture resource for\n    /// it. Note that this does not mark the slot as active, or retain an\n    /// association between a picture ID and the slot. After the setup pic is\n    /// used in an encode operation, it should be marked as active if the\n    /// picture is a reference with `mark_active`.\n    pub fn setup_pic(&self) -> DpbPicture {\n        for slot in &self.slots {\n            if slot.free {\n                return *slot;\n            }\n        }\n\n        panic!(\"no free slots in the dpb\");\n    }\n\n    /// Retrieves the picture, along with its slot index, for a picture ID that\n    /// was previously passed to `mark_active`.\n    pub fn get_pic(&self, id: u32) -> Option<DpbPicture> {\n        self.ids.get(&id).map(|&slot| self.slots[slot])\n    }\n\n    /// Marks a slot as active, with the picture referenced by `id` stored in\n    /// it. Active slots are reserved until marked inactive, and will\n    /// not be overwritten.\n    ///\n    /// The pool maintains a mapping of IDs to slots, so that the slot can be\n    /// retrieved by ID. If an ID is reused, the previous slot is automatically\n    /// marked as free for re-use.\n    pub fn mark_active(&mut self, slot: usize, id: u32) {\n        self.slots[slot].currently_active = true;\n        self.slots[slot].free = false;\n        if let Some(old_slot) = self.ids.insert(id, slot) {\n            self.slots[old_slot].free = true;\n        }\n    }\n\n    /// Mark a slot as inactive. Inactive slots are always considered free.\n    pub fn mark_inactive(&mut self, slot: usize) {\n        self.slots[slot].currently_active = false;\n        self.slots[slot].free = true;\n    }\n\n    /// Mark all slots inactive.\n    pub fn clear(&mut self) {\n        self.ids.clear();\n        for slot in &mut self.slots {\n            slot.currently_active = false;\n            slot.free = true;\n        }\n    }\n}\n\nfn create_dpb_image(\n    vk: Arc<VkContext>,\n    profile: &mut vk::VideoProfileInfoKHR,\n    format: vk::Format,\n    width: u32,\n    height: u32,\n    layers: u32,\n) -> anyhow::Result<VkImage> {\n    let image = {\n        let mut profile_list_info = super::single_profile_list_info(profile);\n        let create_info = vk::ImageCreateInfo::default()\n            .image_type(vk::ImageType::TYPE_2D)\n            .format(format)\n            .extent(vk::Extent3D {\n                width,\n                height,\n                depth: 1,\n            })\n            .mip_levels(1)\n            .array_layers(layers)\n            .samples(vk::SampleCountFlags::TYPE_1)\n            .tiling(vk::ImageTiling::OPTIMAL)\n            .usage(vk::ImageUsageFlags::VIDEO_ENCODE_DPB_KHR)\n            .sharing_mode(vk::SharingMode::EXCLUSIVE)\n            .initial_layout(vk::ImageLayout::UNDEFINED)\n            .push_next(&mut profile_list_info);\n\n        unsafe { vk.device.create_image(&create_info, None)? }\n    };\n\n    let memory = unsafe { bind_memory_for_image(&vk.device, &vk.device_info.memory_props, image)? };\n\n    let view = {\n        let create_info = vk::ImageViewCreateInfo::default()\n            .image(image)\n            .view_type(vk::ImageViewType::TYPE_2D_ARRAY)\n            .format(format)\n            .components(vk::ComponentMapping {\n                r: vk::ComponentSwizzle::IDENTITY,\n                g: vk::ComponentSwizzle::IDENTITY,\n                b: vk::ComponentSwizzle::IDENTITY,\n                a: vk::ComponentSwizzle::IDENTITY,\n            })\n            .subresource_range(vk::ImageSubresourceRange {\n                aspect_mask: vk::ImageAspectFlags::COLOR,\n                base_mip_level: 0,\n                level_count: vk::REMAINING_MIP_LEVELS,\n                base_array_layer: 0,\n                layer_count: vk::REMAINING_ARRAY_LAYERS,\n            });\n\n        unsafe { vk.device.create_image_view(&create_info, None)? }\n    };\n\n    Ok(VkImage::wrap(\n        vk.clone(),\n        image,\n        view,\n        memory,\n        format,\n        width,\n        height,\n    ))\n}\n"
  },
  {
    "path": "mm-server/src/encoder/gop_structure.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct GopFrame {\n    pub stream_position: u64,\n    pub gop_position: u64,\n\n    pub id: u32,\n    /// The frame IDs this frame references.\n    pub ref_ids: Vec<u32>,\n    pub is_keyframe: bool,\n    /// The number of frames referencing this one.\n    pub forward_ref_count: u32,\n}\n\n/// This implements hierarchical P-coding, which looks like this:\n/// https://eymenkurdoglu.github.io/2016/07/01/hierp-one.html\n///\n/// This is also called a \"dyadic\" structure by the Vulkan spec (42.17.11. H.264\n/// Encode Rate Control).\n///\n/// Each frame references at most one other frame. The pattern repeats every\n/// (2^(layers-1)) frames, but an intra frame is only used once per GOP. Note\n/// that a 1-layer structure is equivalent to a flat P structure, with each\n/// frame referencing the one before.\npub struct HierarchicalP {\n    pub layers: u32,\n    pub gop_size: u32,\n    pub mini_gop_size: u32,\n\n    frame_num: u64,\n    gop_position: u64,\n    needs_refresh: bool,\n}\n\nimpl HierarchicalP {\n    pub fn new(layers: u32, gop_size: u32) -> Self {\n        assert!(layers > 0);\n        assert!(layers <= 5);\n\n        let mini_gop_size = 2_u32.pow(layers - 1);\n        assert_eq!(gop_size % mini_gop_size, 0);\n\n        Self {\n            layers,\n            gop_size,\n            mini_gop_size,\n            frame_num: 0,\n            gop_position: 0,\n            needs_refresh: true,\n        }\n    }\n\n    pub fn next_frame(&mut self) -> GopFrame {\n        let mini_gop_pos = (self.frame_num % self.mini_gop_size as u64) as u32;\n        let (layer, ref_layer) = if mini_gop_pos == 0 {\n            (0, 0)\n        } else {\n            let ref_pos = mini_gop_pos ^ (1 << mini_gop_pos.trailing_zeros());\n\n            (\n                temporal_layer(mini_gop_pos, self.layers),\n                temporal_layer(ref_pos, self.layers),\n            )\n        };\n\n        let is_keyframe;\n        if self.needs_refresh && mini_gop_pos == 0 {\n            self.needs_refresh = false;\n\n            // Close the GOP, and start a new one.\n            self.gop_position = 0;\n            is_keyframe = true;\n        } else {\n            is_keyframe = false;\n        }\n\n        let ref_ids = if is_keyframe { vec![] } else { vec![ref_layer] };\n\n        let forward_ref_count = if layer == 0 {\n            // One for each layer above, plus the next mini-GOP.\n            self.layers\n        } else {\n            // One for each layer above.\n            self.layers - layer - 1\n        };\n\n        // We use the layer as the frame ID.\n        let frame = GopFrame {\n            stream_position: self.frame_num,\n            gop_position: self.gop_position,\n\n            id: layer,\n            ref_ids,\n            is_keyframe,\n            forward_ref_count,\n        };\n\n        self.frame_num += 1;\n        self.gop_position = (self.gop_position + 1) % (self.gop_size as u64);\n        frame\n    }\n\n    /// Causes a keyframe to be generated at the start of the next mini-GOP.\n    pub fn request_refresh(&mut self) {\n        self.needs_refresh = true\n    }\n\n    pub fn required_dpb_size(&self) -> usize {\n        // We should have one slot for each layer.\n        std::cmp::max(self.layers as usize, 2)\n    }\n\n    /// Returns the number of frames per second belonging to a particular layer\n    /// as a fractional number, given the layer and the total framerate.\n    pub fn layer_framerate(&self, layer: u32, base_framerate: u32) -> (u32, u32) {\n        if self.layers == 1 {\n            return (base_framerate, 1);\n        }\n\n        let frames_per_mini_gop = 2_u32.pow(layer.saturating_sub(1)); // 1, 1, 2, 4, 8, 16...\n        assert!(frames_per_mini_gop <= self.mini_gop_size / 2);\n\n        (base_framerate * frames_per_mini_gop, self.mini_gop_size)\n    }\n}\n\nfn temporal_layer(frame: u32, layers: u32) -> u32 {\n    if frame == 0 {\n        return 0;\n    }\n\n    layers - frame.trailing_zeros() - 1\n}\n\n#[cfg(test)]\nmod test {\n    use super::*;\n\n    #[test]\n    fn test_temporal_layer_4_layers() {\n        assert_eq!(temporal_layer(0, 4), 0);\n        assert_eq!(temporal_layer(1, 4), 3);\n        assert_eq!(temporal_layer(2, 4), 2);\n        assert_eq!(temporal_layer(3, 4), 3);\n        assert_eq!(temporal_layer(4, 4), 1);\n        assert_eq!(temporal_layer(5, 4), 3);\n        assert_eq!(temporal_layer(6, 4), 2);\n        assert_eq!(temporal_layer(7, 4), 3);\n    }\n\n    #[test]\n    fn test_gop() {\n        let mut structure = HierarchicalP::new(3, 60);\n        assert_eq!(structure.gop_size, 60);\n        assert_eq!(structure.mini_gop_size, 4);\n\n        let expected = [\n            GopFrame {\n                stream_position: 0,\n                gop_position: 0,\n                id: 0,\n                ref_ids: vec![],\n                is_keyframe: true,\n                forward_ref_count: 3,\n            },\n            GopFrame {\n                stream_position: 1,\n                gop_position: 1,\n                id: 2,\n                ref_ids: vec![0],\n                is_keyframe: false,\n                forward_ref_count: 0,\n            },\n            GopFrame {\n                stream_position: 2,\n                gop_position: 2,\n                id: 1,\n                ref_ids: vec![0],\n                is_keyframe: false,\n                forward_ref_count: 1,\n            },\n            GopFrame {\n                stream_position: 3,\n                gop_position: 3,\n                id: 2,\n                ref_ids: vec![1],\n                is_keyframe: false,\n                forward_ref_count: 0,\n            },\n            GopFrame {\n                stream_position: 4,\n                gop_position: 4,\n                id: 0,\n                ref_ids: vec![0],\n                is_keyframe: false,\n                forward_ref_count: 3,\n            },\n            GopFrame {\n                stream_position: 5,\n                gop_position: 5,\n                id: 2,\n                ref_ids: vec![0],\n                is_keyframe: false,\n                forward_ref_count: 0,\n            },\n            GopFrame {\n                stream_position: 6,\n                gop_position: 6,\n                id: 1,\n                ref_ids: vec![0],\n                is_keyframe: false,\n                forward_ref_count: 1,\n            },\n            GopFrame {\n                stream_position: 7,\n                gop_position: 7,\n                id: 2,\n                ref_ids: vec![1],\n                is_keyframe: false,\n                forward_ref_count: 0,\n            },\n        ];\n\n        for (i, frame) in expected.iter().enumerate() {\n            assert_eq!(structure.next_frame(), *frame, \"Frame {}\", i);\n        }\n    }\n\n    #[test]\n    fn test_flat() {\n        let mut structure = HierarchicalP::new(1, 60);\n\n        let expected = [\n            GopFrame {\n                stream_position: 0,\n                gop_position: 0,\n                id: 0,\n                ref_ids: vec![],\n                is_keyframe: true,\n                forward_ref_count: 1,\n            },\n            GopFrame {\n                stream_position: 1,\n                gop_position: 1,\n                id: 0,\n                ref_ids: vec![0],\n                is_keyframe: false,\n                forward_ref_count: 1,\n            },\n            GopFrame {\n                stream_position: 2,\n                gop_position: 2,\n                id: 0,\n                ref_ids: vec![0],\n                is_keyframe: false,\n                forward_ref_count: 1,\n            },\n            GopFrame {\n                stream_position: 3,\n                gop_position: 3,\n                id: 0,\n                ref_ids: vec![0],\n                is_keyframe: false,\n                forward_ref_count: 1,\n            },\n        ];\n\n        for (i, frame) in expected.iter().enumerate() {\n            assert_eq!(structure.next_frame(), *frame, \"Frame {}\", i);\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/encoder/h264.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::sync::Arc;\n\nuse anyhow::{bail, Context};\nuse ash::vk;\nuse ash::vk::native::{\n    StdVideoH264ChromaFormatIdc_STD_VIDEO_H264_CHROMA_FORMAT_IDC_420,\n    StdVideoH264PictureParameterSet, StdVideoH264PocType_STD_VIDEO_H264_POC_TYPE_0,\n    StdVideoH264SequenceParameterSet, StdVideoH264SequenceParameterSetVui,\n};\nuse bytes::Bytes;\nuse tracing::{debug, trace};\n\nuse super::gop_structure::HierarchicalP;\nuse super::rate_control::{self, RateControlMode};\nuse crate::codec::VideoCodec;\nuse crate::{color::VideoProfile, session::control::VideoStreamParams, vulkan::*};\n\nvk_chain! {\n    pub struct H264EncodeProfile<'a> {\n        pub profile_info: vk::VideoProfileInfoKHR<'a>,\n        pub encode_usage_info: vk::VideoEncodeUsageInfoKHR<'a>,\n        pub h264_profile: vk::VideoEncodeH264ProfileInfoEXT<'a>,\n    }\n}\n\nvk_chain! {\n    pub struct H264EncodeCapabilities<'a> {\n        pub video_caps: vk::VideoCapabilitiesKHR<'a>,\n        pub encode_caps: vk::VideoEncodeCapabilitiesKHR<'a>,\n        pub h264_caps: vk::VideoEncodeH264CapabilitiesEXT<'a>,\n    }\n}\n\nvk_chain! {\n    pub struct H264QualityLevelProperties<'a> {\n        pub props: vk::VideoEncodeQualityLevelPropertiesKHR<'a>,\n        pub h264_props: vk::VideoEncodeH264QualityLevelPropertiesEXT<'a>,\n    }\n}\n\n#[derive(Debug, Default, Clone, Copy)]\nstruct H264Metadata {\n    frame_num: u32,\n    pic_order_cnt: i32,\n}\n\npub struct H264Encoder {\n    inner: super::EncoderInner,\n    profile: H264EncodeProfile,\n    rc_mode: RateControlMode,\n\n    structure: HierarchicalP,\n    pic_metadata: Vec<H264Metadata>, // Indexed by layer.\n    idr_num: u32,\n    frame_num: u32,\n\n    headers: Bytes,\n}\n\nimpl H264Encoder {\n    pub fn new(\n        vk: Arc<VkContext>,\n        params: VideoStreamParams,\n        framerate: u32,\n        sink: impl super::Sink,\n    ) -> anyhow::Result<Self> {\n        let (video_loader, encode_loader) = vk.video_apis.as_ref().unwrap();\n\n        let op = vk::VideoCodecOperationFlagsKHR::ENCODE_H264_EXT;\n        let (profile, profile_idc) = match params.profile {\n            VideoProfile::Hd => (super::default_profile(op), 100),\n            VideoProfile::Hdr10 => (super::default_hdr10_profile(op), 110),\n        };\n\n        let h264_profile_info =\n            vk::VideoEncodeH264ProfileInfoEXT::default().std_profile_idc(profile_idc);\n\n        let mut profile = H264EncodeProfile::new(\n            profile,\n            super::default_encode_usage(vk.device_info.driver_version.clone()),\n            h264_profile_info,\n        );\n\n        let mut caps = H264EncodeCapabilities::default();\n\n        unsafe {\n            video_loader\n                .get_physical_device_video_capabilities(\n                    vk.device_info.pdevice,\n                    &profile.profile_info,\n                    caps.as_mut(),\n                )\n                .context(\"vkGetPhysicalDeviceVideoCapabilitiesKHR\")?;\n        };\n\n        trace!(\"video capabilities: {:#?}\", caps.video_caps);\n        trace!(\"encode capabilities: {:#?}\", caps.encode_caps);\n        trace!(\"h264 capabilities: {:#?}\", caps.h264_caps);\n\n        // unsafe {\n        //     let get_info =\n        // vk::PhysicalDeviceVideoEncodeQualityLevelInfoKHR::default()\n        //         .video_profile(&profile.profile_info)\n        //         .quality_level(quality_level);\n\n        //     encode_loader.get_physical_device_video_encode_quality_level_properties(\n        //         vk.device_info.pdevice,\n        //         &get_info,\n        //         quality_props.as_mut(),\n        //     )?;\n        // }\n\n        // trace!(\"quality level properties: {:#?}\", quality_props.props);\n        // trace!(\n        //     \"h264 quality level properties: {:#?}\",\n        //     quality_props.h264_props\n        // );\n\n        let structure = super::default_structure(\n            VideoCodec::H264,\n            caps.h264_caps\n                .max_temporal_layer_count\n                .min(caps.encode_caps.max_rate_control_layers),\n            caps.video_caps.max_dpb_slots,\n        )?;\n\n        let rc_mode = rate_control::select_rc_mode(\n            params,\n            &caps.encode_caps,\n            caps.h264_caps.min_qp.try_into().unwrap_or(17),\n            caps.h264_caps.max_qp.try_into().unwrap_or(50),\n            &structure,\n        );\n        debug!(?rc_mode, \"selected rate control mode\");\n\n        // TODO check more caps\n        // TODO autoselect level\n        let level_idc = vk::native::StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_5_2;\n        if caps.h264_caps.max_level_idc != 0 && caps.h264_caps.max_level_idc < level_idc {\n            bail!(\"video resolution too large for hardware\");\n        }\n\n        assert_eq!(\n            caps.video_caps.picture_access_granularity.width,\n            caps.video_caps.picture_access_granularity.height\n        );\n\n        let mb_width = caps.video_caps.picture_access_granularity.width;\n        let mb_height = caps.video_caps.picture_access_granularity.height;\n        trace!(\"mb size: {mb_width}x{mb_height}\");\n\n        let aligned_width = params.width.next_multiple_of(mb_width);\n        let aligned_height = params.height.next_multiple_of(mb_height);\n\n        trace!(\n            \"aligned width: {}, height: {}\",\n            aligned_width,\n            aligned_height\n        );\n\n        // Divide by two because of chroma subsampling, I guess?\n        let crop_right = (aligned_width - params.width) / 2;\n        let crop_bottom = (aligned_height - params.height) / 2;\n\n        trace!(\"crop right: {}, bottom: {}\", crop_right, crop_bottom);\n\n        let (colour_primaries, transfer_characteristics, matrix_coefficients) = match params.profile\n        {\n            VideoProfile::Hd => (1, 1, 1),\n            VideoProfile::Hdr10 => (9, 16, 9),\n        };\n\n        let mut vui = StdVideoH264SequenceParameterSetVui {\n            colour_primaries,\n            transfer_characteristics,\n            matrix_coefficients,\n            // Unspecified.\n            video_format: 5,\n            ..unsafe { std::mem::zeroed() }\n        };\n\n        vui.flags.set_video_signal_type_present_flag(1);\n        vui.flags.set_video_full_range_flag(0); // Narrow range.\n        vui.flags.set_color_description_present_flag(1);\n\n        let log2_max_frame_num_minus4 = structure\n            .gop_size\n            .next_power_of_two()\n            .ilog2()\n            .saturating_sub(4) as u8;\n\n        let bit_depth = match params.profile {\n            VideoProfile::Hd => 8,\n            VideoProfile::Hdr10 => 10,\n        };\n\n        let mut sps = StdVideoH264SequenceParameterSet {\n            profile_idc,\n            level_idc,\n            chroma_format_idc: StdVideoH264ChromaFormatIdc_STD_VIDEO_H264_CHROMA_FORMAT_IDC_420,\n\n            bit_depth_chroma_minus8: bit_depth - 8,\n            bit_depth_luma_minus8: bit_depth - 8,\n\n            max_num_ref_frames: 1,\n            pic_order_cnt_type: StdVideoH264PocType_STD_VIDEO_H264_POC_TYPE_0,\n            log2_max_pic_order_cnt_lsb_minus4: log2_max_frame_num_minus4,\n            log2_max_frame_num_minus4,\n            pic_width_in_mbs_minus1: (aligned_width / mb_width) - 1,\n            pic_height_in_map_units_minus1: (aligned_height / mb_height) - 1,\n            frame_crop_right_offset: crop_right,\n            frame_crop_bottom_offset: crop_bottom,\n\n            pSequenceParameterSetVui: <*const _>::cast(&vui),\n            ..unsafe { std::mem::zeroed() }\n        };\n\n        sps.flags.set_vui_parameters_present_flag(1);\n        sps.flags.set_frame_mbs_only_flag(1);\n        if crop_right > 0 || crop_bottom > 0 {\n            sps.flags.set_frame_cropping_flag(1);\n        }\n\n        let pps = StdVideoH264PictureParameterSet {\n            ..unsafe { std::mem::zeroed() }\n        };\n\n        let sps = [sps];\n        let pps = [pps];\n\n        let h264_add_info = vk::VideoEncodeH264SessionParametersAddInfoEXT::default()\n            .std_sp_ss(&sps)\n            .std_pp_ss(&pps);\n        let mut session_params = vk::VideoEncodeH264SessionParametersCreateInfoEXT::default()\n            .parameters_add_info(&h264_add_info)\n            .max_std_pps_count(1)\n            .max_std_sps_count(1);\n\n        let inner = super::EncoderInner::new(\n            vk.clone(),\n            params.width,\n            params.height,\n            framerate,\n            structure.required_dpb_size(),\n            profile.as_mut(),\n            caps.video_caps,\n            &mut session_params,\n            sink,\n        )?;\n\n        // Generate encoded stream headers.\n        let headers = unsafe {\n            let mut h264_get_info = vk::VideoEncodeH264SessionParametersGetInfoEXT::default()\n                .write_std_sps(true)\n                .write_std_pps(true);\n\n            let mut h264_feedback_info =\n                vk::VideoEncodeH264SessionParametersFeedbackInfoEXT::default();\n\n            let mut feedback_info = vk::VideoEncodeSessionParametersFeedbackInfoKHR::default()\n                .push_next(&mut h264_feedback_info);\n\n            let get_info = vk::VideoEncodeSessionParametersGetInfoKHR::default()\n                .video_session_parameters(inner.session_params)\n                .push_next(&mut h264_get_info);\n\n            encode_loader\n                .get_encoded_video_session_parameters(&get_info, &mut feedback_info)\n                .context(\"vkGetEncodedVideoSessionParametersKHR\")?\n        };\n\n        if headers.is_empty() {\n            bail!(\"failed to generate sps/pps\");\n        } else {\n            trace!(\"generated {} bytes of h264 headers\", headers.len());\n        }\n\n        let pic_metadata = vec![H264Metadata::default(); structure.layers as usize];\n\n        Ok(Self {\n            inner,\n            profile,\n            rc_mode,\n            structure,\n            pic_metadata,\n            idr_num: 0,\n            frame_num: 0,\n            headers: Bytes::copy_from_slice(&headers),\n        })\n    }\n\n    pub unsafe fn submit_encode(\n        &mut self,\n        input: &VkImage,\n        tp_acquire: VkTimelinePoint,\n        tp_release: VkTimelinePoint,\n    ) -> anyhow::Result<()> {\n        let frame_state = self.structure.next_frame();\n        if frame_state.is_keyframe {\n            self.idr_num += 1;\n        }\n\n        if frame_state.gop_position == 0 {\n            self.frame_num = 0;\n        }\n\n        let pattern = if self.structure.layers > 1 {\n            vk::VideoEncodeH264RateControlFlagsEXT::TEMPORAL_LAYER_PATTERN_DYADIC\n        } else {\n            vk::VideoEncodeH264RateControlFlagsEXT::REFERENCE_PATTERN_FLAT\n        };\n\n        let mut h264_rc_layers = Vec::new();\n        let mut rc_layers = Vec::new();\n\n        if let RateControlMode::Vbr(vbr) = self.rc_mode {\n            let layer_settings = (0..self.structure.layers)\n                .map(|layer| vbr.layer(layer))\n                .collect::<Vec<_>>();\n\n            for settings in &layer_settings {\n                h264_rc_layers.push(\n                    vk::VideoEncodeH264RateControlLayerInfoEXT::default()\n                        .use_min_qp(true)\n                        .use_max_qp(true)\n                        .min_qp(vk::VideoEncodeH264QpEXT {\n                            qp_i: settings.min_qp as i32,\n                            qp_p: settings.min_qp as i32,\n                            qp_b: settings.min_qp as i32,\n                        })\n                        .max_qp(vk::VideoEncodeH264QpEXT {\n                            qp_i: settings.max_qp as i32,\n                            qp_p: settings.max_qp as i32,\n                            qp_b: settings.max_qp as i32,\n                        }),\n                );\n            }\n\n            // We can't do this in one step because the borrow checker doesn't\n            // like the way push_next borrows.\n            // TODO: Ash 0.39 may make this easier.\n            for (layer, (settings, h264)) in layer_settings\n                .iter()\n                .zip(h264_rc_layers.iter_mut())\n                .enumerate()\n            {\n                let (fps_numerator, fps_denominator) = self\n                    .structure\n                    .layer_framerate(layer as u32, self.inner.framerate);\n\n                rc_layers.push(\n                    vk::VideoEncodeRateControlLayerInfoKHR::default()\n                        .max_bitrate(settings.peak_bitrate)\n                        .average_bitrate(settings.average_bitrate)\n                        .frame_rate_numerator(fps_numerator)\n                        .frame_rate_denominator(fps_denominator)\n                        .push_next(h264),\n                );\n            }\n        }\n\n        let mut h264_rc_info = vk::VideoEncodeH264RateControlInfoEXT::default()\n            .gop_frame_count(self.structure.gop_size)\n            .idr_period(self.structure.gop_size)\n            .consecutive_b_frame_count(0)\n            .temporal_layer_count(rc_layers.len() as u32)\n            .flags(vk::VideoEncodeH264RateControlFlagsEXT::REGULAR_GOP | pattern);\n\n        let vbv_size = match self.rc_mode {\n            RateControlMode::Vbr(vbr) => vbr.vbv_size_ms,\n            _ => 0,\n        };\n\n        let mut rc_info = vk::VideoEncodeRateControlInfoKHR::default()\n            .rate_control_mode(self.rc_mode.as_vk_flags())\n            .virtual_buffer_size_in_ms(vbv_size)\n            .layers(&rc_layers);\n\n        // Doesn't have a push_next method, because we're supposed to call it on the\n        // parent struct.\n        rc_info.p_next = <*mut _>::cast(&mut h264_rc_info);\n\n        let weight_table: vk::native::StdVideoEncodeH264WeightTable = std::mem::zeroed();\n\n        let slice_type = if frame_state.is_keyframe {\n            vk::native::StdVideoH264SliceType_STD_VIDEO_H264_SLICE_TYPE_I\n        } else {\n            vk::native::StdVideoH264SliceType_STD_VIDEO_H264_SLICE_TYPE_P\n        };\n\n        let primary_pic_type = if frame_state.is_keyframe {\n            vk::native::StdVideoH264PictureType_STD_VIDEO_H264_PICTURE_TYPE_IDR\n        } else {\n            vk::native::StdVideoH264PictureType_STD_VIDEO_H264_PICTURE_TYPE_P\n        };\n\n        let mut std_slice_header = vk::native::StdVideoEncodeH264SliceHeader {\n            slice_type,\n            pWeightTable: &weight_table,\n            ..std::mem::zeroed()\n        };\n\n        // Per the spec, this indicates that all slices in the picture are the same.\n        std_slice_header.slice_type += 5;\n\n        let nalu_slice_entries = [vk::VideoEncodeH264NaluSliceInfoEXT::default()\n            .std_slice_header(&std_slice_header)\n            .constant_qp(if let RateControlMode::ConstantQp(qp) = self.rc_mode {\n                qp.layer(frame_state.id) as i32\n            } else {\n                0\n            })];\n\n        let list0_mod_ops = std::mem::zeroed();\n        let list1_mod_ops = std::mem::zeroed();\n        let marking_ops = std::mem::zeroed();\n\n        let mut ref_lists_info = vk::native::StdVideoEncodeH264ReferenceListsInfo {\n            pRefList0ModOperations: &list0_mod_ops,\n            pRefList1ModOperations: &list1_mod_ops,\n            pRefPicMarkingOperations: &marking_ops,\n            RefPicList0: [u8::MAX; 32],\n            RefPicList1: [u8::MAX; 32],\n            ..std::mem::zeroed()\n        };\n\n        // Point to the references.\n        for (idx, id) in frame_state.ref_ids.iter().enumerate() {\n            let slot = self\n                .inner\n                .dpb\n                .get_pic(*id)\n                .ok_or(anyhow::anyhow!(\"ref pic {id} missing from dpb\",))?;\n            ref_lists_info.RefPicList0[idx] = slot.index as u8;\n        }\n\n        let mut std_pic_info = vk::native::StdVideoEncodeH264PictureInfo {\n            flags: std::mem::zeroed(),\n            seq_parameter_set_id: 0,\n            pic_parameter_set_id: 0,\n            idr_pic_id: self.idr_num as u16,\n            primary_pic_type,\n            frame_num: self.frame_num,\n            PicOrderCnt: frame_state.gop_position as i32,\n            temporal_id: frame_state.id as u8,\n            pRefLists: &ref_lists_info,\n            ..std::mem::zeroed()\n        };\n\n        std_pic_info\n            .flags\n            .set_IdrPicFlag(frame_state.is_keyframe as u32);\n        std_pic_info\n            .flags\n            .set_is_reference((frame_state.forward_ref_count > 0) as u32);\n\n        let mut h264_pic_info = vk::VideoEncodeH264PictureInfoEXT::default()\n            .nalu_slice_entries(&nalu_slice_entries)\n            .std_picture_info(&std_pic_info);\n\n        let mut std_ref_infos = frame_state\n            .ref_ids\n            .iter()\n            .map(|id| vk::native::StdVideoEncodeH264ReferenceInfo {\n                FrameNum: self.pic_metadata[*id as usize].frame_num,\n                PicOrderCnt: self.pic_metadata[*id as usize].pic_order_cnt,\n                temporal_id: *id as u8,\n                ..std::mem::zeroed()\n            })\n            .collect::<Vec<_>>();\n\n        let mut ref_info = std_ref_infos\n            .iter_mut()\n            .map(|info| vk::VideoEncodeH264DpbSlotInfoEXT::default().std_reference_info(info))\n            .collect::<Vec<_>>();\n\n        let setup_std_ref_info = vk::native::StdVideoEncodeH264ReferenceInfo {\n            FrameNum: self.frame_num,\n            PicOrderCnt: frame_state.gop_position as i32,\n            temporal_id: frame_state.id as u8,\n            ..std::mem::zeroed()\n        };\n\n        trace!(\n            frame_num = self.frame_num,\n            pic_order_cnt = frame_state.gop_position,\n            \"setting up h264 pic\"\n        );\n\n        let mut setup_info =\n            vk::VideoEncodeH264DpbSlotInfoEXT::default().std_reference_info(&setup_std_ref_info);\n\n        let insert = if frame_state.is_keyframe {\n            Some(self.headers.clone())\n        } else {\n            None\n        };\n\n        self.inner.submit_encode(\n            input,\n            tp_acquire,\n            tp_release,\n            &frame_state,\n            &mut rc_info,\n            &mut h264_pic_info,\n            &mut setup_info,\n            &mut ref_info,\n            insert,\n        )?;\n\n        // Save the reference info for the DPB slot we just wrote.\n        self.pic_metadata[frame_state.id as usize] = H264Metadata {\n            frame_num: self.frame_num,\n            pic_order_cnt: frame_state.gop_position as i32,\n        };\n\n        // This is supposed to increment only for reference frames.\n        if frame_state.forward_ref_count > 0 {\n            self.frame_num += 1;\n        }\n\n        Ok(())\n    }\n\n    pub fn input_format(&self) -> vk::Format {\n        self.inner.input_format\n    }\n\n    pub fn create_input_image(&mut self) -> anyhow::Result<VkImage> {\n        self.inner.create_input_image(self.profile.as_mut())\n    }\n\n    pub fn request_refresh(&mut self) {\n        self.structure.request_refresh()\n    }\n}\n"
  },
  {
    "path": "mm-server/src/encoder/h265.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::sync::Arc;\n\nuse anyhow::{bail, Context};\nuse ash::vk;\nuse bytes::Bytes;\nuse tracing::{debug, trace};\n\nuse super::gop_structure::HierarchicalP;\nuse super::rate_control::{self, RateControlMode};\nuse crate::codec::VideoCodec;\nuse crate::color::VideoProfile;\nuse crate::{session::control::VideoStreamParams, vulkan::*};\n\nvk_chain! {\n    pub struct H265EncodeProfile<'a> {\n        pub profile_info: vk::VideoProfileInfoKHR<'a>,\n        pub encode_usage_info: vk::VideoEncodeUsageInfoKHR<'a>,\n        pub h265_profile: vk::VideoEncodeH265ProfileInfoEXT<'a>,\n    }\n}\n\nvk_chain! {\n    pub struct H265EncodeCapabilities<'a> {\n        pub video_caps: vk::VideoCapabilitiesKHR<'a>,\n        pub encode_caps: vk::VideoEncodeCapabilitiesKHR<'a>,\n        pub h265_caps: vk::VideoEncodeH265CapabilitiesEXT<'a>,\n    }\n}\n\nvk_chain! {\n    pub struct H265QualityLevelProperties<'a> {\n        pub props: vk::VideoEncodeQualityLevelPropertiesKHR<'a>,\n        pub h265_props: vk::VideoEncodeH265QualityLevelPropertiesEXT<'a>,\n    }\n}\n\n#[derive(Debug, Default, Clone, Copy)]\nstruct H265Metadata {\n    pic_type: u32,\n    pic_order_cnt: i32,\n    ref_count: u32,\n}\n\npub struct H265Encoder {\n    inner: super::EncoderInner,\n    profile: H265EncodeProfile,\n    rc_mode: super::rate_control::RateControlMode,\n\n    structure: HierarchicalP,\n    pic_metadata: Vec<H265Metadata>, // Indexed by layer.\n    idr_num: u32,\n    frame_num: u32,\n\n    headers: Bytes,\n}\n\nimpl H265Encoder {\n    pub fn new(\n        vk: Arc<VkContext>,\n        params: VideoStreamParams,\n        framerate: u32,\n        sink: impl super::Sink,\n    ) -> anyhow::Result<Self> {\n        let (video_loader, encode_loader) = vk.video_apis.as_ref().unwrap();\n\n        let op = vk::VideoCodecOperationFlagsKHR::ENCODE_H265_EXT;\n        let (profile, profile_idc) = match params.profile {\n            VideoProfile::Hd => (super::default_profile(op), 1), // Main\n            VideoProfile::Hdr10 => (super::default_hdr10_profile(op), 2), // Main10\n        };\n\n        let h265_profile_info =\n            vk::VideoEncodeH265ProfileInfoEXT::default().std_profile_idc(profile_idc);\n\n        let mut profile = H265EncodeProfile::new(\n            profile,\n            super::default_encode_usage(vk.device_info.driver_version.clone()),\n            h265_profile_info,\n        );\n\n        let mut caps = H265EncodeCapabilities::default();\n\n        unsafe {\n            video_loader\n                .get_physical_device_video_capabilities(\n                    vk.device_info.pdevice,\n                    &profile.profile_info,\n                    caps.as_mut(),\n                )\n                .context(\"vkGetPhysicalDeviceVideoCapabilitiesKHR\")?;\n        };\n\n        trace!(\"video capabilities: {:#?}\", caps.video_caps);\n        trace!(\"encode capabilities: {:#?}\", caps.encode_caps);\n        trace!(\"h265 capabilities: {:#?}\", caps.h265_caps);\n\n        let quality_level = caps.encode_caps.max_quality_levels - 1;\n        let mut quality_props = H265QualityLevelProperties::default();\n\n        unsafe {\n            let get_info = vk::PhysicalDeviceVideoEncodeQualityLevelInfoKHR::default()\n                .video_profile(&profile.profile_info)\n                .quality_level(quality_level);\n\n            encode_loader.get_physical_device_video_encode_quality_level_properties(\n                vk.device_info.pdevice,\n                &get_info,\n                quality_props.as_mut(),\n            )?;\n        }\n\n        trace!(\"quality level properties: {:#?}\", quality_props.props);\n        trace!(\n            \"h265 quality level properties: {:#?}\",\n            quality_props.h265_props\n        );\n\n        let structure = super::default_structure(\n            VideoCodec::H265,\n            caps.h265_caps\n                .max_sub_layer_count\n                .min(caps.encode_caps.max_rate_control_layers),\n            caps.video_caps.max_dpb_slots,\n        )?;\n\n        let rc_mode = rate_control::select_rc_mode(\n            params,\n            &caps.encode_caps,\n            caps.h265_caps.min_qp.try_into().unwrap_or(17),\n            caps.h265_caps.max_qp.try_into().unwrap_or(50),\n            &structure,\n        );\n\n        debug!(?rc_mode, \"selected rate control mode\");\n\n        // TODO check more caps\n        // TODO autoselect level\n        let level_idc = vk::native::StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_5_2;\n        if caps.h265_caps.max_level_idc != 0 && caps.h265_caps.max_level_idc < level_idc {\n            bail!(\"video resolution too large for hardware\");\n        }\n\n        const CTB_SIZES: [(vk::VideoEncodeH265CtbSizeFlagsEXT, usize); 3] = [\n            (vk::VideoEncodeH265CtbSizeFlagsEXT::TYPE_16, 16),\n            (vk::VideoEncodeH265CtbSizeFlagsEXT::TYPE_32, 32),\n            (vk::VideoEncodeH265CtbSizeFlagsEXT::TYPE_64, 64),\n        ];\n\n        let min_ctb = CTB_SIZES\n            .iter()\n            .filter(|(flag, _)| caps.h265_caps.ctb_sizes.contains(*flag))\n            .map(|(_, size)| *size)\n            .min()\n            .expect(\"no ctb size found\");\n\n        let max_ctb = CTB_SIZES\n            .iter()\n            .filter(|(flag, _)| caps.h265_caps.ctb_sizes.contains(*flag))\n            .map(|(_, size)| *size)\n            .max()\n            .expect(\"no ctb size found\");\n\n        const TBS_SIZES: [(vk::VideoEncodeH265TransformBlockSizeFlagsEXT, usize); 4] = [\n            (vk::VideoEncodeH265TransformBlockSizeFlagsEXT::TYPE_4, 4),\n            (vk::VideoEncodeH265TransformBlockSizeFlagsEXT::TYPE_8, 8),\n            (vk::VideoEncodeH265TransformBlockSizeFlagsEXT::TYPE_16, 16),\n            (vk::VideoEncodeH265TransformBlockSizeFlagsEXT::TYPE_32, 32),\n        ];\n\n        let min_tbs = TBS_SIZES\n            .iter()\n            .filter(|(flag, _)| caps.h265_caps.transform_block_sizes.contains(*flag))\n            .map(|(_, size)| *size)\n            .min()\n            .expect(\"no tbs size found\");\n\n        let max_tbs = TBS_SIZES\n            .iter()\n            .filter(|(flag, _)| caps.h265_caps.transform_block_sizes.contains(*flag))\n            .map(|(_, size)| *size)\n            .max()\n            .expect(\"no tbs size found\");\n\n        let aligned_width = params\n            .width\n            .next_multiple_of(caps.encode_caps.encode_input_picture_granularity.width);\n        let aligned_height = params\n            .height\n            .next_multiple_of(caps.encode_caps.encode_input_picture_granularity.height);\n\n        trace!(\n            min_ctb,\n            max_ctb,\n            min_tbs,\n            max_tbs,\n            aligned_width,\n            aligned_height,\n            \"block sizes\",\n        );\n\n        let crop_right = (aligned_width - params.width) / 2;\n        let crop_bottom = (aligned_height - params.height) / 2;\n\n        trace!(\"crop right: {}, bottom: {}\", crop_right, crop_bottom);\n\n        let (colour_primaries, transfer_characteristics, matrix_coeffs) = match params.profile {\n            VideoProfile::Hd => (1, 1, 1),\n            VideoProfile::Hdr10 => (9, 16, 9),\n        };\n\n        let mut vui = vk::native::StdVideoH265SequenceParameterSetVui {\n            colour_primaries,\n            transfer_characteristics,\n            matrix_coeffs,\n            // Unspecified.\n            video_format: 5,\n            ..unsafe { std::mem::zeroed() }\n        };\n\n        vui.flags.set_video_signal_type_present_flag(1);\n        vui.flags.set_colour_description_present_flag(1);\n        vui.flags.set_video_full_range_flag(0); // Narrow range.\n\n        let ptl = vk::native::StdVideoH265ProfileTierLevel {\n            general_profile_idc: profile_idc,\n            general_level_idc: level_idc,\n            ..unsafe { std::mem::zeroed() }\n        };\n\n        // ptl.flags.set_general_progressive_source_flag(1);\n        // ptl.flags.set_general_interlaced_source_flag(0);\n\n        let layers_minus_1 = (structure.layers - 1) as u8;\n        let mut pbm: vk::native::StdVideoH265DecPicBufMgr = unsafe { std::mem::zeroed() };\n        pbm.max_dec_pic_buffering_minus1[layers_minus_1 as usize] =\n            (structure.required_dpb_size() - 1) as u8;\n        // No picture reordering.\n        pbm.max_num_reorder_pics[layers_minus_1 as usize] = 0;\n        pbm.max_latency_increase_plus1[layers_minus_1 as usize] = 0;\n\n        let mut vps = vk::native::StdVideoH265VideoParameterSet {\n            vps_max_sub_layers_minus1: layers_minus_1,\n            pDecPicBufMgr: &pbm,\n            pHrdParameters: std::ptr::null(),\n            pProfileTierLevel: &ptl,\n            ..unsafe { std::mem::zeroed() }\n        };\n\n        vps.flags.set_vps_sub_layer_ordering_info_present_flag(1);\n        vps.flags.set_vps_temporal_id_nesting_flag(1);\n\n        let min_cb = 8_u8;\n        let max_cb = max_ctb;\n\n        let max_transform_hierarchy_depth = (max_ctb.ilog2() - min_tbs.ilog2()) as u8;\n\n        let bit_depth = match params.profile {\n            VideoProfile::Hd => 8,\n            VideoProfile::Hdr10 => 10,\n        };\n\n        let mut sps = vk::native::StdVideoH265SequenceParameterSet {\n            chroma_format_idc:\n                vk::native::StdVideoH265ChromaFormatIdc_STD_VIDEO_H265_CHROMA_FORMAT_IDC_420,\n            pic_width_in_luma_samples: aligned_width,\n            pic_height_in_luma_samples: aligned_height,\n            sps_max_sub_layers_minus1: layers_minus_1,\n            bit_depth_luma_minus8: bit_depth - 8,\n            bit_depth_chroma_minus8: bit_depth - 8,\n            log2_max_pic_order_cnt_lsb_minus4: 4,\n            log2_min_luma_coding_block_size_minus3: (min_cb.ilog2() - 3) as u8,\n            log2_diff_max_min_luma_coding_block_size: (max_cb.ilog2() - min_cb.ilog2()) as u8,\n            log2_min_luma_transform_block_size_minus2: (min_tbs.ilog2() - 2) as u8,\n            log2_diff_max_min_luma_transform_block_size: (max_tbs.ilog2() - min_tbs.ilog2()) as u8,\n            max_transform_hierarchy_depth_inter: max_transform_hierarchy_depth,\n            max_transform_hierarchy_depth_intra: max_transform_hierarchy_depth,\n            conf_win_right_offset: crop_right,\n            conf_win_bottom_offset: crop_bottom,\n            pProfileTierLevel: &ptl,\n            pDecPicBufMgr: &pbm,\n            pSequenceParameterSetVui: &vui,\n            ..unsafe { std::mem::zeroed() }\n        };\n\n        sps.flags.set_conformance_window_flag(1);\n        sps.flags.set_vui_parameters_present_flag(1);\n        sps.flags.set_sps_temporal_id_nesting_flag(1);\n        sps.flags.set_sps_sub_layer_ordering_info_present_flag(1);\n\n        if caps\n            .h265_caps\n            .std_syntax_flags\n            .contains(vk::VideoEncodeH265StdFlagsEXT::SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET)\n        {\n            sps.flags.set_sample_adaptive_offset_enabled_flag(1);\n        }\n\n        if caps\n            .h265_caps\n            .std_syntax_flags\n            .contains(vk::VideoEncodeH265StdFlagsEXT::TRANSFORM_SKIP_ENABLED_FLAG_SET)\n        {\n            sps.flags.set_transform_skip_context_enabled_flag(1);\n        }\n\n        let pps = vk::native::StdVideoH265PictureParameterSet {\n            ..unsafe { std::mem::zeroed() }\n        };\n\n        let sps = [sps];\n        let pps = [pps];\n        let vps = [vps];\n\n        let h265_add_info = vk::VideoEncodeH265SessionParametersAddInfoEXT::default()\n            .std_vp_ss(&vps)\n            .std_sp_ss(&sps)\n            .std_pp_ss(&pps);\n        let mut session_params = vk::VideoEncodeH265SessionParametersCreateInfoEXT::default()\n            .parameters_add_info(&h265_add_info)\n            .max_std_vps_count(1)\n            .max_std_pps_count(1)\n            .max_std_sps_count(1);\n\n        let inner = super::EncoderInner::new(\n            vk.clone(),\n            params.width,\n            params.height,\n            framerate,\n            structure.required_dpb_size(),\n            profile.as_mut(),\n            caps.video_caps,\n            &mut session_params,\n            sink,\n        )?;\n\n        // Generate encoded stream headers.\n        let headers = unsafe {\n            let mut h265_get_info = vk::VideoEncodeH265SessionParametersGetInfoEXT::default()\n                .write_std_vps(true)\n                .write_std_sps(true)\n                .write_std_pps(true);\n\n            let mut h265_feedback_info =\n                vk::VideoEncodeH265SessionParametersFeedbackInfoEXT::default();\n\n            let mut feedback_info = vk::VideoEncodeSessionParametersFeedbackInfoKHR::default()\n                .push_next(&mut h265_feedback_info);\n\n            let get_info = vk::VideoEncodeSessionParametersGetInfoKHR::default()\n                .video_session_parameters(inner.session_params)\n                .push_next(&mut h265_get_info);\n\n            encode_loader\n                .get_encoded_video_session_parameters(&get_info, &mut feedback_info)\n                .context(\"vkGetEncodedVideoSessionParametersKHR\")?\n        };\n\n        if headers.is_empty() {\n            bail!(\"failed to generate sps/pps/vps\");\n        } else {\n            trace!(\"generated {} bytes of h265 headers\", headers.len());\n        }\n\n        let pic_metadata = vec![H265Metadata::default(); structure.layers as usize];\n\n        Ok(Self {\n            inner,\n            profile,\n            rc_mode,\n            structure,\n            pic_metadata,\n            idr_num: 0,\n            frame_num: 0,\n            headers: Bytes::copy_from_slice(&headers),\n        })\n    }\n\n    pub unsafe fn submit_encode(\n        &mut self,\n        input: &VkImage,\n        tp_acquire: VkTimelinePoint,\n        tp_release: VkTimelinePoint,\n    ) -> anyhow::Result<()> {\n        let frame_state = self.structure.next_frame();\n        if frame_state.is_keyframe {\n            self.idr_num += 1;\n            self.frame_num = 0;\n        }\n\n        let pattern = if self.structure.layers > 1 {\n            vk::VideoEncodeH265RateControlFlagsEXT::TEMPORAL_SUB_LAYER_PATTERN_DYADIC\n        } else {\n            vk::VideoEncodeH265RateControlFlagsEXT::REFERENCE_PATTERN_FLAT\n        };\n\n        let mut h265_rc_layers = Vec::new();\n        let mut rc_layers = Vec::new();\n\n        if let RateControlMode::Vbr(vbr) = self.rc_mode {\n            let layer_settings = (0..self.structure.layers)\n                .map(|layer| vbr.layer(layer))\n                .collect::<Vec<_>>();\n\n            for settings in &layer_settings {\n                h265_rc_layers.push(\n                    vk::VideoEncodeH265RateControlLayerInfoEXT::default()\n                        .use_min_qp(true)\n                        .use_max_qp(true)\n                        .min_qp(vk::VideoEncodeH265QpEXT {\n                            qp_i: settings.min_qp as i32,\n                            qp_p: settings.min_qp as i32,\n                            qp_b: settings.min_qp as i32,\n                        })\n                        .max_qp(vk::VideoEncodeH265QpEXT {\n                            qp_i: settings.max_qp as i32,\n                            qp_p: settings.max_qp as i32,\n                            qp_b: settings.max_qp as i32,\n                        }),\n                );\n            }\n\n            for (layer, (settings, h265_rc_layer)) in layer_settings\n                .iter()\n                .zip(h265_rc_layers.iter_mut())\n                .enumerate()\n            {\n                let (fps_numerator, fps_denominator) = self\n                    .structure\n                    .layer_framerate(layer as u32, self.inner.framerate);\n\n                rc_layers.push(\n                    vk::VideoEncodeRateControlLayerInfoKHR::default()\n                        .max_bitrate(settings.peak_bitrate)\n                        .average_bitrate(settings.average_bitrate)\n                        .frame_rate_numerator(fps_numerator)\n                        .frame_rate_denominator(fps_denominator)\n                        .push_next(h265_rc_layer),\n                );\n            }\n        }\n\n        let mut h265_rc_info = vk::VideoEncodeH265RateControlInfoEXT::default()\n            .gop_frame_count(self.structure.gop_size)\n            .idr_period(self.structure.gop_size)\n            .consecutive_b_frame_count(0)\n            .sub_layer_count(rc_layers.len() as u32)\n            .flags(vk::VideoEncodeH265RateControlFlagsEXT::REGULAR_GOP | pattern);\n\n        let vbv_size = match self.rc_mode {\n            RateControlMode::Vbr(settings) => settings.vbv_size_ms,\n            _ => 0,\n        };\n\n        let mut rc_info = vk::VideoEncodeRateControlInfoKHR::default()\n            .rate_control_mode(self.rc_mode.as_vk_flags())\n            .virtual_buffer_size_in_ms(vbv_size);\n\n        if !rc_layers.is_empty() {\n            rc_info = rc_info.layers(&rc_layers);\n        }\n\n        // Doesn't have a push_next method, because we're supposed to call it on\n        // the parent struct.\n        rc_info.p_next = <*mut _>::cast(&mut h265_rc_info);\n\n        let weight_table: vk::native::StdVideoEncodeH265WeightTable = std::mem::zeroed();\n\n        let slice_type = if frame_state.is_keyframe {\n            vk::native::StdVideoH265SliceType_STD_VIDEO_H265_SLICE_TYPE_I\n        } else {\n            vk::native::StdVideoH265SliceType_STD_VIDEO_H265_SLICE_TYPE_P\n        };\n\n        let pic_type = if frame_state.is_keyframe {\n            vk::native::StdVideoH265PictureType_STD_VIDEO_H265_PICTURE_TYPE_IDR\n        } else {\n            vk::native::StdVideoH265PictureType_STD_VIDEO_H265_PICTURE_TYPE_P\n        };\n\n        let std_slice_header = vk::native::StdVideoEncodeH265SliceSegmentHeader {\n            slice_type,\n            pWeightTable: &weight_table,\n            MaxNumMergeCand: 5, // Decoders complain if this is zero. The max value is 5.\n            ..std::mem::zeroed()\n        };\n\n        let slice_segment_info = [vk::VideoEncodeH265NaluSliceSegmentInfoEXT::default()\n            .std_slice_segment_header(&std_slice_header)\n            .constant_qp(if let RateControlMode::ConstantQp(qp) = self.rc_mode {\n                qp.layer(frame_state.id) as i32\n            } else {\n                0\n            })];\n\n        let mut ref_lists_info = vk::native::StdVideoEncodeH265ReferenceListsInfo {\n            RefPicList0: [u8::MAX; 15],\n            RefPicList1: [u8::MAX; 15],\n            ..std::mem::zeroed()\n        };\n\n        for (idx, id) in frame_state.ref_ids.iter().enumerate() {\n            let pic = self\n                .inner\n                .dpb\n                .get_pic(*id)\n                .ok_or(anyhow::anyhow!(\"ref pic {id} missing from dpb\"))?;\n\n            ref_lists_info.RefPicList0[idx] = pic.index as u8;\n        }\n\n        // For each frame, we have to tell the decoder which pictures will be\n        // used as references in the future, in addition to those that are\n        // references for this frame.\n        let mut ref_ids = if frame_state.is_keyframe {\n            // If we're outputting a keyframe, clear the forward reference counts.\n            for md in &mut self.pic_metadata {\n                md.ref_count = 0;\n            }\n\n            Vec::new()\n        } else {\n            self.pic_metadata\n                .iter_mut()\n                .enumerate()\n                .filter_map(|(id, md)| {\n                    let id = id as u32;\n                    if md.ref_count == 0 {\n                        None\n                    } else if frame_state.ref_ids.contains(&id) {\n                        md.ref_count -= 1;\n                        Some((id, true))\n                    } else {\n                        Some((id, false))\n                    }\n                })\n                .collect::<Vec<_>>()\n        };\n\n        // Sort in descending order of POC.\n        ref_ids.sort_by_key(|(id, _)| {\n            std::cmp::Reverse(self.pic_metadata[*id as usize].pic_order_cnt)\n        });\n\n        let mut short_term_refs = vk::native::StdVideoH265ShortTermRefPicSet {\n            used_by_curr_pic_s0_flag: 0,\n            num_negative_pics: ref_ids.len() as u8,\n            // No forward refs.\n            used_by_curr_pic_s1_flag: 0,\n            num_positive_pics: 0,\n            ..std::mem::zeroed()\n        };\n\n        let pic_order_cnt = frame_state.gop_position as i32;\n        let mut delta_poc = 0;\n        for (idx, (id, is_direct_ref)) in ref_ids.into_iter().enumerate() {\n            // delta_poc accumulates for each step backwards in time we take.\n            // So if a frame references the immediately preceding one and then a\n            // frame four frames ago, the delta_poc values are 1 and 3.\n            //\n            // Taking the modulo allows us to reference frames across a GOP\n            // boundary.\n            delta_poc = (pic_order_cnt - self.pic_metadata[id as usize].pic_order_cnt)\n                .rem_euclid(self.structure.gop_size as i32)\n                - delta_poc;\n\n            short_term_refs.delta_poc_s0_minus1[idx] = (delta_poc - 1) as u16;\n            if is_direct_ref {\n                short_term_refs.used_by_curr_pic_s0_flag |= 1 << idx;\n            }\n        }\n\n        let mut std_pic_info = vk::native::StdVideoEncodeH265PictureInfo {\n            pic_type,\n            sps_video_parameter_set_id: 0,\n            pps_seq_parameter_set_id: 0,\n            pps_pic_parameter_set_id: 0,\n            PicOrderCntVal: frame_state.gop_position as i32,\n            TemporalId: frame_state.id as u8,\n            pRefLists: &ref_lists_info,\n            pShortTermRefPicSet: &short_term_refs,\n            ..std::mem::zeroed()\n        };\n\n        std_pic_info\n            .flags\n            .set_IrapPicFlag(frame_state.is_keyframe as u32);\n        std_pic_info\n            .flags\n            .set_is_reference((frame_state.forward_ref_count > 0) as u32);\n\n        if frame_state.is_keyframe {\n            std_pic_info.flags.set_pic_output_flag(1);\n            std_pic_info.flags.set_no_output_of_prior_pics_flag(1);\n        }\n\n        let mut h265_pic_info = vk::VideoEncodeH265PictureInfoEXT::default()\n            .std_picture_info(&std_pic_info)\n            .nalu_slice_segment_entries(&slice_segment_info);\n\n        let mut std_ref_infos = frame_state\n            .ref_ids\n            .iter()\n            .map(|id| vk::native::StdVideoEncodeH265ReferenceInfo {\n                pic_type: self.pic_metadata[*id as usize].pic_type,\n                PicOrderCntVal: self.pic_metadata[*id as usize].pic_order_cnt,\n                TemporalId: *id as u8,\n                ..std::mem::zeroed()\n            })\n            .collect::<Vec<_>>();\n\n        let mut ref_info = std_ref_infos\n            .iter_mut()\n            .map(|info| vk::VideoEncodeH265DpbSlotInfoEXT::default().std_reference_info(info))\n            .collect::<Vec<_>>();\n\n        let setup_std_ref_info = vk::native::StdVideoEncodeH265ReferenceInfo {\n            pic_type,\n            PicOrderCntVal: pic_order_cnt,\n            TemporalId: frame_state.id as u8,\n            ..std::mem::zeroed()\n        };\n\n        let mut setup_info =\n            vk::VideoEncodeH265DpbSlotInfoEXT::default().std_reference_info(&setup_std_ref_info);\n\n        let insert = if frame_state.is_keyframe {\n            Some(self.headers.clone())\n        } else {\n            None\n        };\n\n        self.inner.submit_encode(\n            input,\n            tp_acquire,\n            tp_release,\n            &frame_state,\n            &mut rc_info,\n            &mut h265_pic_info,\n            &mut setup_info,\n            &mut ref_info,\n            insert,\n        )?;\n\n        // Save the reference info for the DPB slot we just wrote.\n        self.pic_metadata[frame_state.id as usize] = H265Metadata {\n            pic_type,\n            pic_order_cnt,\n            ref_count: frame_state.forward_ref_count,\n        };\n\n        // This is supposed to increment only for reference frames.\n        if frame_state.forward_ref_count > 0 {\n            self.frame_num += 1;\n        }\n\n        Ok(())\n    }\n\n    pub fn input_format(&self) -> vk::Format {\n        self.inner.input_format\n    }\n\n    pub fn create_input_image(&mut self) -> anyhow::Result<VkImage> {\n        self.inner.create_input_image(self.profile.as_mut())\n    }\n\n    pub fn request_refresh(&mut self) {\n        self.structure.request_refresh()\n    }\n}\n"
  },
  {
    "path": "mm-server/src/encoder/rate_control.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse ash::vk;\nuse tracing::warn;\n\nuse crate::session::control::VideoStreamParams;\n\n// Bitrate is defined here in terms of 1080p, and scaled nonlinearly to the\n// target resolution. Values are indexed by quality preset. Values 7/8/9 are\n// only used if CRF is unsupported by the driver.\nconst BASELINE_AVG_BITRATE_MBPS: [f32; 10] = [2.5, 3.0, 4.0, 5.0, 6.0, 8.0, 10.0, 12.0, 25.0, 50.0];\nconst BASELINE_PEAK_BITRATE_MBPS: [f32; 10] =\n    [5.0, 8.0, 10.0, 15.0, 20.0, 30.0, 40.0, 60.0, 80.0, 100.0];\nconst BASELINE_DIMS: f32 = 1920.0 * 1080.0;\nconst VBV_SIZE: u32 = 2500;\n\n#[derive(Debug, Clone)]\npub enum RateControlMode {\n    ConstantQp(CascadingQp),\n    Vbr(LayeredVbr),\n    Defaults,\n}\n\nimpl RateControlMode {\n    pub fn as_vk_flags(&self) -> vk::VideoEncodeRateControlModeFlagsKHR {\n        match self {\n            Self::ConstantQp(_) => vk::VideoEncodeRateControlModeFlagsKHR::DISABLED,\n            Self::Vbr(_) => vk::VideoEncodeRateControlModeFlagsKHR::VBR,\n            Self::Defaults => vk::VideoEncodeRateControlModeFlagsKHR::DEFAULT,\n        }\n    }\n}\n\n#[derive(Debug, Clone, Copy)]\npub struct CascadingQp {\n    target: u32,\n    max: u32,\n}\n\nimpl CascadingQp {\n    pub fn layer(&self, layer: u32) -> u32 {\n        layer_qp(self.target, layer).min(self.max)\n    }\n}\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\npub struct VbrSettings {\n    pub average_bitrate: u64,\n    pub peak_bitrate: u64,\n    pub max_qp: u32,\n    pub min_qp: u32,\n}\n\n#[derive(Debug, Clone, Copy)]\npub struct LayeredVbr {\n    pub vbv_size_ms: u32,\n\n    base: VbrSettings,\n    num_layers: u32,\n}\n\nimpl LayeredVbr {\n    pub fn layer(&self, layer: u32) -> VbrSettings {\n        if self.num_layers <= 1 {\n            return self.base;\n        }\n\n        let bitrate_denominator = 2_u64.pow(layer + 1);\n        let max_qp = layer_qp(self.base.max_qp, layer).clamp(self.base.min_qp, self.base.max_qp);\n\n        VbrSettings {\n            average_bitrate: self.base.average_bitrate / bitrate_denominator,\n            peak_bitrate: self.base.peak_bitrate / bitrate_denominator,\n            max_qp,\n            min_qp: self.base.min_qp,\n        }\n    }\n}\n\npub fn select_rc_mode(\n    params: VideoStreamParams,\n    caps: &vk::VideoEncodeCapabilitiesKHR,\n    min_qp: u32,\n    max_qp: u32,\n    structure: &super::gop_structure::HierarchicalP,\n) -> RateControlMode {\n    assert!(params.preset <= 9);\n\n    let min_qp = 17.max(min_qp);\n    let target_qp = 40 - (2 * params.preset); // 22 - 40;\n\n    let supports_crf = caps\n        .rate_control_modes\n        .contains(vk::VideoEncodeRateControlModeFlagsKHR::DISABLED);\n    let supports_vbr = caps\n        .rate_control_modes\n        .contains(vk::VideoEncodeRateControlModeFlagsKHR::VBR);\n\n    if params.preset >= 7 && supports_crf {\n        // Presets 7/8/9 use a very low constant QP.\n        RateControlMode::ConstantQp(CascadingQp {\n            target: target_qp.clamp(min_qp, max_qp),\n            max: max_qp,\n        })\n    } else if supports_vbr {\n        // 6 and lower use VBR, starting with a high peak and reducing as the\n        // presets get lower.\n        let scale = ((params.width * params.height) as f32 / BASELINE_DIMS).sqrt();\n\n        const MBPS: f32 = 1_000_000.0;\n        let average_bitrate =\n            (BASELINE_AVG_BITRATE_MBPS[params.preset as usize] * MBPS * scale).round() as u64;\n        let peak_bitrate =\n            (BASELINE_PEAK_BITRATE_MBPS[params.preset as usize] * MBPS * scale).round() as u64;\n\n        RateControlMode::Vbr(LayeredVbr {\n            vbv_size_ms: VBV_SIZE,\n            base: VbrSettings {\n                average_bitrate,\n                peak_bitrate,\n                min_qp,\n                max_qp: target_qp.clamp(min_qp, max_qp),\n            },\n            num_layers: structure.layers,\n        })\n    } else if supports_crf {\n        // Fall back to CRF with a high QP.\n        RateControlMode::ConstantQp(CascadingQp {\n            target: target_qp.clamp(min_qp, max_qp),\n            max: max_qp,\n        })\n    } else {\n        warn!(\"no rate control modes available, using driver defaults!\");\n        RateControlMode::Defaults\n    }\n}\n\n/// Determines the constant QP for a layer given the target QP.\nfn layer_qp(target_qp: u32, layer: u32) -> u32 {\n    // Example: for a target QP of 22, the QP for each layer is:\n    //   22, 27, 29, 31...\n    target_qp + (3 * layer.min(1)) + (layer * 2)\n}\n"
  },
  {
    "path": "mm-server/src/encoder/stats.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{sync::Arc, time};\n\nuse parking_lot::Mutex;\n\n#[derive(Default, Clone)]\npub struct EncodeStats {\n    inner: Arc<Mutex<Inner>>,\n}\n\nstruct Inner {\n    start: time::Instant,\n    stream_stats: LayerStats,\n    keyframe_stats: LayerStats,\n    layer_stats: Vec<LayerStats>,\n}\n\nimpl Default for Inner {\n    fn default() -> Self {\n        let start = time::Instant::now();\n\n        Self {\n            start,\n            stream_stats: LayerStats::new(start),\n            keyframe_stats: LayerStats::new(start),\n            layer_stats: Vec::new(),\n        }\n    }\n}\n\nstruct LayerStats {\n    start: time::Instant,\n    min: usize,\n    max: usize,\n    total: u64,\n}\n\nimpl LayerStats {\n    fn new(start: time::Instant) -> Self {\n        Self {\n            start,\n            min: 0,\n            max: 0,\n            total: 0,\n        }\n    }\n\n    fn record_frame_size(&mut self, len: usize) {\n        self.total += len as u64;\n\n        if self.min == 0 || len < self.min {\n            self.min = len;\n        }\n\n        if len > self.max {\n            self.max = len;\n        }\n    }\n}\n\nimpl std::fmt::Debug for LayerStats {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        let period = self.start.elapsed();\n\n        let mut f = f.debug_struct(\"EncodeStats\");\n\n        f.field(\"frame_min\", &self.min);\n        f.field(\"frame_max\", &self.max);\n        f.field(\"rate\", &calculate_rate(period, self.total));\n\n        f.finish()\n    }\n}\n\nimpl EncodeStats {\n    pub fn record_frame_size(&self, is_keyframe: bool, layer: u32, len: usize) {\n        let mut inner = self.inner.lock();\n\n        inner.stream_stats.record_frame_size(len);\n        if is_keyframe {\n            inner.keyframe_stats.record_frame_size(len);\n        } else {\n            let layer = layer as usize;\n            let layers = (layer + 1).max(inner.layer_stats.len());\n\n            let start = inner.start;\n            inner\n                .layer_stats\n                .resize_with(layers, || LayerStats::new(start));\n\n            inner.layer_stats[layer].record_frame_size(len);\n        }\n    }\n}\n\nimpl std::fmt::Debug for EncodeStats {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        let inner = self.inner.lock();\n\n        let mut f = f.debug_struct(\"EncodeStats\");\n\n        f.field(\"duration\", &inner.start.elapsed());\n        f.field(\"totals\", &inner.stream_stats);\n        f.field(\"IDR\", &inner.keyframe_stats);\n        for (layer, stats) in inner.layer_stats.iter().enumerate() {\n            f.field(&format!(\"P{layer}\"), &stats);\n        }\n\n        f.finish()\n    }\n}\n\nfn calculate_rate(dur: time::Duration, total: u64) -> f32 {\n    // Total is in bytes, we want mbit/s.\n    let total_mbits = total as f32 / (1024.0 * 1024.0) * 8.0;\n    total_mbits / dur.as_secs_f32()\n}\n"
  },
  {
    "path": "mm-server/src/encoder.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\n// It's not me, it's vulkan.\n#![allow(clippy::too_many_arguments)]\n\nuse std::sync::Arc;\nuse std::time;\n\nuse anyhow::{anyhow, bail, Context};\nuse ash::vk;\nuse bytes::Bytes;\nuse crossbeam_channel as crossbeam;\nuse tracing::{debug, error, instrument, trace, trace_span};\n\nuse self::gop_structure::HierarchicalP;\nuse crate::codec::VideoCodec;\nuse crate::session::control::VideoStreamParams;\nuse crate::vulkan::video::VideoQueueExt;\nuse crate::vulkan::*;\n\nmod dpb;\nmod gop_structure;\nmod rate_control;\nmod stats;\n\nmod h264;\nuse h264::H264Encoder;\n\nmod h265;\nuse h265::H265Encoder;\n\npub enum Encoder {\n    H264(H264Encoder),\n    H265(H265Encoder),\n}\n\nimpl Encoder {\n    pub fn new(\n        vk: Arc<VkContext>,\n        params: VideoStreamParams,\n        framerate: u32,\n        sink: impl Sink,\n    ) -> anyhow::Result<Self> {\n        match params.codec {\n            VideoCodec::H264 => Ok(Self::H264(H264Encoder::new(vk, params, framerate, sink)?)),\n            VideoCodec::H265 => Ok(Self::H265(H265Encoder::new(vk, params, framerate, sink)?)),\n            _ => bail!(\"unsupported codec\"),\n        }\n    }\n\n    pub unsafe fn submit_encode(\n        &mut self,\n        image: &VkImage,\n        acquire: VkTimelinePoint,\n        release: VkTimelinePoint,\n    ) -> anyhow::Result<()> {\n        match self {\n            Self::H264(encoder) => encoder.submit_encode(image, acquire, release),\n            Self::H265(encoder) => encoder.submit_encode(image, acquire, release),\n        }\n    }\n\n    pub fn input_format(&self) -> vk::Format {\n        match self {\n            Self::H264(encoder) => encoder.input_format(),\n            Self::H265(encoder) => encoder.input_format(),\n        }\n    }\n\n    pub fn create_input_image(&mut self) -> anyhow::Result<VkImage> {\n        match self {\n            Self::H264(encoder) => encoder.create_input_image(),\n            Self::H265(encoder) => encoder.create_input_image(),\n        }\n    }\n\n    pub fn request_refresh(&mut self) {\n        match self {\n            Encoder::H264(encoder) => encoder.request_refresh(),\n            Encoder::H265(encoder) => encoder.request_refresh(),\n        }\n    }\n}\n\nstruct EncoderInner {\n    session: vk::VideoSessionKHR,\n    session_memory: Vec<vk::DeviceMemory>,\n\n    session_params: vk::VideoSessionParametersKHR,\n\n    writer_thread_handle: Option<std::thread::JoinHandle<anyhow::Result<()>>>,\n    submitted_frames: Option<crossbeam::Sender<EncoderOutputFrame>>,\n    done_frames: crossbeam::Receiver<EncoderOutputFrame>,\n\n    dpb: dpb::DpbPool,\n\n    width: u32,\n    height: u32,\n    framerate: u32,\n    input_format: vk::Format,\n\n    stats: stats::EncodeStats,\n\n    vk: Arc<VkContext>,\n}\n\nimpl EncoderInner {\n    pub fn new(\n        vk: Arc<VkContext>,\n        width: u32,\n        height: u32,\n        framerate: u32,\n        required_dpb_size: usize,\n        profile: &mut vk::VideoProfileInfoKHR,\n        capabilities: vk::VideoCapabilitiesKHR,\n        session_params: &mut impl vk::ExtendsVideoSessionParametersCreateInfoKHR,\n        sink: impl Sink,\n    ) -> anyhow::Result<Self> {\n        if vk.encode_queue.is_none() {\n            bail!(\"no vulkan video support\")\n        }\n\n        let (video_loader, _encode_loader) = vk.video_apis.as_ref().unwrap();\n        let encode_family = vk.device_info.encode_family.unwrap();\n\n        if capabilities.max_coded_extent.width < width\n            || capabilities.max_coded_extent.height < height\n        {\n            bail!(\n                \"video resolution too large: (max {}x{})\",\n                capabilities.max_coded_extent.width,\n                capabilities.max_coded_extent.height\n            );\n        }\n\n        let format_info = list_format_props(\n            video_loader,\n            vk.device_info.pdevice,\n            profile,\n            vk::ImageUsageFlags::VIDEO_ENCODE_SRC_KHR,\n        )?;\n\n        for format in &format_info {\n            trace!(?format, \"available input format\");\n        }\n\n        let input_format = match format_info.first() {\n            Some(format) => format.format,\n            None => bail!(\"unable to determine supported ENCODE_SRC format\"),\n        };\n\n        trace!(?input_format, width, height, \"using input format\");\n\n        let buffer_size_alignment = capabilities.min_bitstream_buffer_size_alignment as usize;\n\n        let session = {\n            let create_info = vk::VideoSessionCreateInfoKHR::default()\n                .queue_family_index(encode_family)\n                .flags(vk::VideoSessionCreateFlagsKHR::ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS)\n                .video_profile(profile)\n                .picture_format(input_format)\n                .reference_picture_format(input_format)\n                .max_coded_extent(capabilities.max_coded_extent)\n                .max_dpb_slots(capabilities.max_dpb_slots)\n                .max_active_reference_pictures(capabilities.max_active_reference_pictures)\n                .std_header_version(&capabilities.std_header_version);\n\n            unsafe {\n                video_loader\n                    .create_video_session(&create_info, None)\n                    .context(\"vkCreateVideoSessionKHR\")?\n            }\n        };\n\n        let session_memory =\n            bind_session_memory(video_loader, &vk.device, &vk.device_info, session)?;\n\n        let session_params = {\n            let create_info = vk::VideoSessionParametersCreateInfoKHR::default()\n                .video_session(session)\n                .push_next(session_params);\n\n            unsafe {\n                video_loader\n                    .create_video_session_parameters(&create_info, None)\n                    .context(\"vkCreateVideoSessionParametersKHR\")?\n            }\n        };\n\n        let dpb = if capabilities\n            .flags\n            .contains(vk::VideoCapabilityFlagsKHR::SEPARATE_REFERENCE_IMAGES)\n        {\n            trace!(\"using separate images for DPB pool\");\n\n            dpb::DpbPool::new_separate_images(\n                vk.clone(),\n                input_format,\n                width.next_multiple_of(capabilities.picture_access_granularity.width),\n                height.next_multiple_of(capabilities.picture_access_granularity.height),\n                profile,\n                required_dpb_size,\n            )?\n        } else {\n            trace!(\"using shared image for DPB pool\");\n\n            dpb::DpbPool::new(\n                vk.clone(),\n                input_format,\n                width.next_multiple_of(capabilities.picture_access_granularity.width),\n                height.next_multiple_of(capabilities.picture_access_granularity.height),\n                profile,\n                required_dpb_size,\n            )?\n        };\n\n        let stats = stats::EncodeStats::default();\n\n        let (submitted_frames_tx, submitted_frames_rx) = crossbeam::bounded(1);\n        let (done_frames_tx, done_frames_rx) = crossbeam::unbounded();\n\n        for _frame in 0..2 {\n            // We need a frame name for each swapframe.\n            #[cfg(feature = \"tracy\")]\n            let frame_name = [\n                tracy_client::frame_name!(\"composite + encode 1\"),\n                tracy_client::frame_name!(\"composite + encode 2\"),\n            ][_frame];\n\n            done_frames_tx\n                .send(EncoderOutputFrame::new(\n                    vk.clone(),\n                    width,\n                    height,\n                    buffer_size_alignment,\n                    profile,\n                    #[cfg(feature = \"tracy\")]\n                    frame_name,\n                )?)\n                .unwrap();\n        }\n\n        let vk_clone = vk.clone();\n        let stats_clone = stats.clone();\n        let handle = std::thread::Builder::new()\n            .name(\"encoder writer\".to_owned())\n            .spawn(move || {\n                writer_thread(\n                    vk_clone,\n                    submitted_frames_rx,\n                    done_frames_tx,\n                    sink,\n                    stats_clone,\n                )\n            })?;\n\n        Ok(Self {\n            session,\n            session_params,\n            session_memory,\n\n            writer_thread_handle: Some(handle),\n            submitted_frames: Some(submitted_frames_tx),\n            done_frames: done_frames_rx,\n\n            dpb,\n\n            width,\n            height,\n            framerate,\n            input_format,\n\n            stats,\n\n            vk,\n        })\n    }\n\n    fn create_input_image(&self, profile: &mut vk::VideoProfileInfoKHR) -> anyhow::Result<VkImage> {\n        let image = {\n            let mut profile_list_info = single_profile_list_info(profile);\n\n            let create_info = vk::ImageCreateInfo::default()\n                .image_type(vk::ImageType::TYPE_2D)\n                .format(self.input_format)\n                .extent(vk::Extent3D {\n                    width: self.width,\n                    height: self.height,\n                    depth: 1,\n                })\n                .mip_levels(1)\n                .array_layers(1)\n                .samples(vk::SampleCountFlags::TYPE_1)\n                .tiling(vk::ImageTiling::OPTIMAL)\n                .usage(vk::ImageUsageFlags::VIDEO_ENCODE_SRC_KHR | vk::ImageUsageFlags::STORAGE)\n                .sharing_mode(vk::SharingMode::EXCLUSIVE)\n                .initial_layout(vk::ImageLayout::UNDEFINED)\n                .flags(vk::ImageCreateFlags::MUTABLE_FORMAT | vk::ImageCreateFlags::EXTENDED_USAGE)\n                .push_next(&mut profile_list_info);\n\n            unsafe {\n                self.vk\n                    .device\n                    .create_image(&create_info, None)\n                    .context(\"VkCreateImage\")?\n            }\n        };\n\n        let memory = unsafe {\n            bind_memory_for_image(&self.vk.device, &self.vk.device_info.memory_props, image)?\n        };\n\n        let view = unsafe {\n            let mut usage_info = vk::ImageViewUsageCreateInfo::default()\n                .usage(vk::ImageUsageFlags::VIDEO_ENCODE_SRC_KHR);\n\n            let create_info = vk::ImageViewCreateInfo::default()\n                .image(image)\n                .view_type(vk::ImageViewType::TYPE_2D)\n                .format(self.input_format)\n                .subresource_range(vk::ImageSubresourceRange {\n                    aspect_mask: vk::ImageAspectFlags::COLOR,\n                    base_mip_level: 0,\n                    level_count: 1,\n                    base_array_layer: 0,\n                    layer_count: 1,\n                })\n                .push_next(&mut usage_info);\n\n            self.vk\n                .device\n                .create_image_view(&create_info, None)\n                .context(\"VkCreateImageView\")?\n        };\n\n        Ok(VkImage::wrap(\n            self.vk.clone(),\n            image,\n            view,\n            memory,\n            self.input_format,\n            self.width,\n            self.height,\n        ))\n    }\n\n    #[instrument(skip_all)]\n    pub unsafe fn submit_encode(\n        &mut self,\n        input: &VkImage,\n        tp_acquire: VkTimelinePoint,\n        tp_release: VkTimelinePoint,\n        frame_state: &gop_structure::GopFrame,\n        rc_info: &mut (impl vk::ExtendsVideoBeginCodingInfoKHR + vk::ExtendsVideoCodingControlInfoKHR),\n        codec_pic_info: &mut impl vk::ExtendsVideoEncodeInfoKHR,\n        codec_setup_info: &mut impl vk::ExtendsVideoReferenceSlotInfoKHR,\n        codec_ref_info: &mut [impl vk::ExtendsVideoReferenceSlotInfoKHR],\n        insert: Option<Bytes>,\n    ) -> anyhow::Result<()> {\n        use ash::vk::Handle;\n        if self.session_params.is_null() {\n            bail!(\"session parameters not yet created\");\n        }\n\n        let (video_loader, encode_loader) = self.vk.video_apis.as_ref().unwrap();\n        let encode_queue = self.vk.encode_queue.as_ref().unwrap();\n\n        // \"Acquire\" a buffer to copy to. This provides backpressure if the\n        // encoder can't keep up.\n        let res = trace_span!(\"wait_prev_frame\").in_scope(|| self.done_frames.recv());\n        let mut frame = match res {\n            Ok(frame) => frame,\n            Err(_) => {\n                bail!(\"copy thread died\");\n            }\n        };\n\n        #[cfg(feature = \"tracy\")]\n        {\n            frame.tracy_context.frame = Some(\n                tracy_client::Client::running()\n                    .expect(\"no tracy client\")\n                    .non_continuous_frame(frame.tracy_context.frame_name),\n            );\n\n            if let Some(ref ctx) = encode_queue.tracy_context {\n                frame.tracy_context.encode_span =\n                    Some(ctx.span(tracy_client::span_location!(\"encode\"))?);\n            }\n        }\n\n        begin_command_buffer(&self.vk.device, frame.encode_cb)?;\n\n        // Record the start timestamp.\n        #[cfg(feature = \"tracy\")]\n        if let Some(encode_ts_pool) = &mut frame.tracy_context.encode_ts_pool {\n            encode_ts_pool.cmd_reset(&self.vk.device, frame.encode_cb);\n            self.vk.device.cmd_write_timestamp(\n                frame.encode_cb,\n                vk::PipelineStageFlags::TOP_OF_PIPE,\n                encode_ts_pool.pool,\n                0,\n            );\n        }\n\n        // Acquire the image from the graphics queue.\n        insert_image_barrier(\n            &self.vk.device,\n            frame.encode_cb,\n            input.image,\n            Some((self.vk.graphics_queue.family, encode_queue.family)),\n            vk::ImageLayout::GENERAL,\n            vk::ImageLayout::VIDEO_ENCODE_SRC_KHR,\n            vk::PipelineStageFlags2::NONE,\n            vk::AccessFlags2::NONE,\n            vk::PipelineStageFlags2::VIDEO_ENCODE_KHR,\n            vk::AccessFlags2::VIDEO_ENCODE_READ_KHR,\n        );\n\n        // Bind the setup picture and any reference pictures.\n        let setup_pic = self.dpb.setup_pic();\n        let ref_pics = frame_state\n            .ref_ids\n            .iter()\n            .map(|id| {\n                self.dpb\n                    .get_pic(*id)\n                    .ok_or(anyhow!(\"ref pic {id} missing from dpb\"))\n            })\n            .collect::<anyhow::Result<Vec<_>>>()?;\n\n        let mut bound_pics = vec![vk::VideoReferenceSlotInfoKHR::default()\n            .slot_index(if setup_pic.currently_active {\n                setup_pic.index as i32\n            } else {\n                -1\n            })\n            .picture_resource(&setup_pic.picture_resource_info)];\n\n        for ref_pic in &ref_pics {\n            assert!(ref_pic.currently_active);\n            bound_pics.push(\n                vk::VideoReferenceSlotInfoKHR::default()\n                    .slot_index(ref_pic.index as i32)\n                    .picture_resource(&ref_pic.picture_resource_info),\n            );\n        }\n\n        trace!(\n            ref_ids = ?frame_state.ref_ids,\n            ref_slots = ?ref_pics.iter().map(|p| p.index).collect::<Vec<_>>(),\n            setup_id = frame_state.id,\n            setup_slot = setup_pic.index,\n            gop_position = frame_state.gop_position,\n            is_keyframe = frame_state.is_keyframe,\n            forward_ref_count = frame_state.forward_ref_count,\n            input_image = ?input.image,\n            \"encoding frame\"\n        );\n\n        // Bind the session.\n        {\n            let mut begin_info = vk::VideoBeginCodingInfoKHR::default()\n                .flags(vk::VideoBeginCodingFlagsKHR::empty())\n                .video_session(self.session)\n                .video_session_parameters(self.session_params)\n                .reference_slots(&bound_pics);\n\n            // Vulkan wants us to inform it of the current rate control, which\n            // is unset on the first frame.\n            if frame_state.stream_position != 0 {\n                begin_info = begin_info.push_next(rc_info)\n            }\n\n            unsafe {\n                video_loader.cmd_begin_video_coding(frame.encode_cb, &begin_info);\n            };\n        }\n\n        // Reset on keyframes.\n        if frame_state.is_keyframe {\n            let ctrl_info = vk::VideoCodingControlInfoKHR::default()\n                .flags(\n                    vk::VideoCodingControlFlagsKHR::RESET\n                        | vk::VideoCodingControlFlagsKHR::ENCODE_RATE_CONTROL,\n                )\n                .push_next(rc_info);\n\n            unsafe {\n                video_loader.cmd_control_video_coding(frame.encode_cb, &ctrl_info);\n            };\n        }\n\n        // Encode.\n        self.vk.device.cmd_begin_query(\n            frame.encode_cb,\n            frame.query_pool,\n            0,\n            vk::QueryControlFlags::empty(),\n        );\n\n        {\n            // The input picture.\n            let src_pic_resource = vk::VideoPictureResourceInfoKHR::default()\n                .coded_extent(vk::Extent2D {\n                    width: self.width,\n                    height: self.height,\n                })\n                .image_view_binding(input.view);\n\n            // The slot we're writing to.\n            let setup_reference_slot = vk::VideoReferenceSlotInfoKHR::default()\n                .slot_index(setup_pic.index as i32)\n                .picture_resource(&setup_pic.picture_resource_info)\n                .push_next(codec_setup_info);\n\n            // The slots we're referencing.\n            let reference_slots = ref_pics\n                .iter()\n                .zip(codec_ref_info.iter_mut())\n                .map(|(ref_pic, codec_ref_info)| {\n                    vk::VideoReferenceSlotInfoKHR::default()\n                        .slot_index(ref_pic.index as i32)\n                        .picture_resource(&ref_pic.picture_resource_info)\n                        .push_next(codec_ref_info)\n                })\n                .collect::<Vec<_>>();\n\n            let encode_info = vk::VideoEncodeInfoKHR::default()\n                .flags(vk::VideoEncodeFlagsKHR::empty())\n                .dst_buffer(frame.copy_buffer.buffer)\n                .dst_buffer_range(frame.copy_buffer.len as u64)\n                .src_picture_resource(src_pic_resource)\n                .setup_reference_slot(&setup_reference_slot)\n                .reference_slots(&reference_slots)\n                .push_next(codec_pic_info);\n\n            // Transition the DPB images/layers we need.\n            let mut dpb_barriers = Vec::new();\n            for pic in &ref_pics {\n                dpb_barriers.push(\n                    vk::ImageMemoryBarrier2::default()\n                        .src_stage_mask(vk::PipelineStageFlags2::NONE)\n                        .src_access_mask(vk::AccessFlags2::NONE)\n                        .dst_stage_mask(vk::PipelineStageFlags2::VIDEO_ENCODE_KHR)\n                        .dst_access_mask(vk::AccessFlags2::VIDEO_ENCODE_READ_KHR)\n                        .old_layout(vk::ImageLayout::VIDEO_ENCODE_DPB_KHR)\n                        .new_layout(vk::ImageLayout::VIDEO_ENCODE_DPB_KHR)\n                        .image(pic.image)\n                        .subresource_range(vk::ImageSubresourceRange {\n                            aspect_mask: vk::ImageAspectFlags::COLOR,\n                            base_mip_level: 0,\n                            level_count: vk::REMAINING_MIP_LEVELS,\n                            // For multiple-layers-in-one-image DPB, just the layer referenced.\n                            base_array_layer: pic.picture_resource_info.base_array_layer,\n                            layer_count: 1,\n                        }),\n                );\n            }\n\n            dpb_barriers.push(\n                vk::ImageMemoryBarrier2::default()\n                    .src_stage_mask(vk::PipelineStageFlags2::NONE)\n                    .src_access_mask(vk::AccessFlags2::NONE)\n                    .dst_stage_mask(vk::PipelineStageFlags2::VIDEO_ENCODE_KHR)\n                    .dst_access_mask(\n                        vk::AccessFlags2::VIDEO_ENCODE_WRITE_KHR\n                            | vk::AccessFlags2::VIDEO_ENCODE_READ_KHR,\n                    )\n                    .old_layout(vk::ImageLayout::VIDEO_ENCODE_DPB_KHR)\n                    .new_layout(vk::ImageLayout::VIDEO_ENCODE_DPB_KHR)\n                    .image(setup_pic.image)\n                    .subresource_range(vk::ImageSubresourceRange {\n                        aspect_mask: vk::ImageAspectFlags::COLOR,\n                        base_mip_level: 0,\n                        level_count: vk::REMAINING_MIP_LEVELS,\n                        base_array_layer: setup_pic.picture_resource_info.base_array_layer,\n                        layer_count: 1,\n                    }),\n            );\n\n            self.vk.device.cmd_pipeline_barrier2(\n                frame.encode_cb,\n                &vk::DependencyInfo::default().image_memory_barriers(&dpb_barriers),\n            );\n\n            // Update state as if the operation succeeded.\n            if frame_state.forward_ref_count > 0 {\n                // Keyframes clear all dpb slots.\n                if frame_state.is_keyframe {\n                    self.dpb.clear();\n                }\n\n                self.dpb.mark_active(setup_pic.index, frame_state.id);\n            } else {\n                self.dpb.mark_inactive(setup_pic.index);\n            }\n\n            unsafe {\n                encode_loader.cmd_encode_video(frame.encode_cb, &encode_info);\n            };\n        }\n\n        self.vk\n            .device\n            .cmd_end_query(frame.encode_cb, frame.query_pool, 0);\n\n        // Unbind the session.\n        {\n            let end_info =\n                vk::VideoEndCodingInfoKHR::default().flags(vk::VideoEndCodingFlagsKHR::empty());\n\n            unsafe {\n                video_loader.cmd_end_video_coding(frame.encode_cb, &end_info);\n            };\n        }\n\n        // Release the input picture back to the graphics queue.\n        insert_image_barrier(\n            &self.vk.device,\n            frame.encode_cb,\n            input.image,\n            Some((encode_queue.family, self.vk.graphics_queue.family)),\n            vk::ImageLayout::VIDEO_ENCODE_SRC_KHR,\n            vk::ImageLayout::GENERAL,\n            vk::PipelineStageFlags2::VIDEO_ENCODE_KHR,\n            vk::AccessFlags2::VIDEO_ENCODE_WRITE_KHR,\n            vk::PipelineStageFlags2::empty(),\n            vk::AccessFlags2::empty(),\n        );\n\n        // Record the end timestamp.\n        #[cfg(feature = \"tracy\")]\n        if let Some(encode_ts_pool) = &mut frame.tracy_context.encode_ts_pool {\n            self.vk.device.cmd_write_timestamp(\n                frame.encode_cb,\n                vk::PipelineStageFlags::ALL_COMMANDS,\n                encode_ts_pool.pool,\n                1,\n            );\n        }\n\n        #[cfg(feature = \"tracy\")]\n        if let Some(span) = &mut frame.tracy_context.encode_span {\n            span.end_zone();\n        }\n\n        // Wait for the output buffer to be clear of the previous copy\n        // operation, then establish new timeline points.\n        frame.tp_copied.wait()?;\n        frame.tp_encoded += 10;\n        frame.tp_copied = &frame.tp_encoded + 1;\n\n        // Submit!\n        {\n            self.vk.device.end_command_buffer(frame.encode_cb)?;\n\n            let cb_infos = [vk::CommandBufferSubmitInfo::default().command_buffer(frame.encode_cb)];\n\n            let wait_infos = [vk::SemaphoreSubmitInfo::default()\n                .semaphore(tp_acquire.timeline().as_semaphore())\n                .value(tp_acquire.into())\n                .stage_mask(vk::PipelineStageFlags2::ALL_COMMANDS)];\n\n            let signal_infos = [\n                vk::SemaphoreSubmitInfo::default()\n                    .semaphore(frame.timeline.as_semaphore())\n                    .value(frame.tp_encoded.value())\n                    .stage_mask(vk::PipelineStageFlags2::ALL_COMMANDS),\n                vk::SemaphoreSubmitInfo::default()\n                    .semaphore(tp_release.timeline().as_semaphore())\n                    .value(tp_release.value())\n                    .stage_mask(vk::PipelineStageFlags2::ALL_COMMANDS),\n            ];\n\n            let submit_info = vk::SubmitInfo2::default()\n                .wait_semaphore_infos(&wait_infos)\n                .signal_semaphore_infos(&signal_infos)\n                .command_buffer_infos(&cb_infos);\n\n            let encode_queue = self.vk.encode_queue.as_ref().unwrap();\n            self.vk\n                .device\n                .queue_submit2(encode_queue.queue, &[submit_info], vk::Fence::null())\n                .context(\"vkQueueSubmit\")?;\n        }\n\n        frame.hierarchical_layer = frame_state.id;\n        frame.is_keyframe = frame_state.is_keyframe;\n        if let Some(submitted_frames) = &self.submitted_frames {\n            // Tell the other thread to copy out the finished packet when it's\n            // finished. Optionally insert headers.\n            frame.headers = insert;\n\n            submitted_frames\n                .send(frame)\n                .map_err(|_| anyhow::anyhow!(\"writer thread died\"))?;\n        }\n\n        Ok(())\n    }\n}\n\nimpl Drop for EncoderInner {\n    fn drop(&mut self) {\n        drop(self.submitted_frames.take());\n        for done in self.done_frames.iter() {\n            drop(done)\n        }\n\n        if let Some(handle) = self.writer_thread_handle.take() {\n            match handle.join() {\n                Ok(Ok(())) => (),\n                Ok(Err(e)) => error!(\"copy thread exited with error: {:#}\", e),\n                Err(_) => error!(\"copy thread panicked\"),\n            }\n        }\n\n        debug!(\"stream stats: \\n{:#?}\", self.stats);\n\n        let (video_loader, _) = self.vk.video_apis.as_ref().unwrap();\n\n        unsafe {\n            self.vk\n                .device\n                .queue_wait_idle(self.vk.encode_queue.as_ref().unwrap().queue)\n                .unwrap();\n\n            video_loader.destroy_video_session(self.session, None);\n            video_loader.destroy_video_session_parameters(self.session_params, None);\n\n            for memory in self.session_memory.drain(..) {\n                self.vk.device.free_memory(memory, None);\n            }\n        }\n    }\n}\n\n/// A synchronized buffer for writing encoded frames to. Passed back and forth\n/// between the submission thread and the copy thread.\nstruct EncoderOutputFrame {\n    encode_cb: vk::CommandBuffer,\n    copy_buffer: VkHostBuffer,\n    query_pool: vk::QueryPool,\n\n    hierarchical_layer: u32,\n    is_keyframe: bool,\n    headers: Option<bytes::Bytes>,\n\n    timeline: VkTimelineSemaphore,\n    tp_encoded: VkTimelinePoint,\n    tp_copied: VkTimelinePoint,\n\n    #[cfg(feature = \"tracy\")]\n    tracy_context: TracingContext,\n\n    vk: Arc<VkContext>,\n}\n\n#[cfg(feature = \"tracy\")]\nstruct TracingContext {\n    frame_name: tracy_client::FrameName,\n    frame: Option<tracy_client::Frame>,\n    encode_span: Option<tracy_client::GpuSpan>,\n    encode_ts_pool: Option<VkTimestampQueryPool>,\n}\n\nimpl EncoderOutputFrame {\n    pub fn new(\n        vk: Arc<VkContext>,\n        width: u32,\n        height: u32,\n        buffer_size_alignment: usize,\n        profile: &mut vk::VideoProfileInfoKHR,\n        #[cfg(feature = \"tracy\")] frame_name: tracy_client::FrameName,\n    ) -> anyhow::Result<Self> {\n        let buffer_size = (width * height * 3).next_multiple_of(buffer_size_alignment as u32);\n\n        let mut profile_list_info = single_profile_list_info(profile);\n\n        let copy_buffer = {\n            let buf = {\n                let create_info = vk::BufferCreateInfo::default()\n                    .size(buffer_size as u64)\n                    .sharing_mode(vk::SharingMode::EXCLUSIVE)\n                    .usage(vk::BufferUsageFlags::VIDEO_ENCODE_DST_KHR)\n                    .push_next(&mut profile_list_info);\n\n                unsafe { vk.device.create_buffer(&create_info, None)? }\n            };\n\n            let requirements = unsafe { vk.device.get_buffer_memory_requirements(buf) };\n\n            let alloc_info = vk::MemoryAllocateInfo::default()\n                .allocation_size(requirements.size)\n                .memory_type_index(vk.device_info.host_visible_mem_type_index);\n\n            let memory = unsafe { vk.device.allocate_memory(&alloc_info, None)? };\n\n            unsafe {\n                vk.device\n                    .bind_buffer_memory(buf, memory, 0)\n                    .context(\"vkBindBufferMemory\")?\n            };\n\n            VkHostBuffer::wrap(vk.clone(), buf, memory, buffer_size as usize)\n        };\n\n        let encode_queue = vk.encode_queue.as_ref().unwrap();\n        let encode_cb = allocate_command_buffer(&vk.device, encode_queue.command_pool)?;\n\n        let query_pool = {\n            let mut video_pool_info = vk::QueryPoolVideoEncodeFeedbackCreateInfoKHR::default()\n                .encode_feedback_flags(\n                    vk::VideoEncodeFeedbackFlagsKHR::BITSTREAM_BUFFER_OFFSET\n                        | vk::VideoEncodeFeedbackFlagsKHR::BITSTREAM_BYTES_WRITTEN,\n                );\n\n            let create_info = vk::QueryPoolCreateInfo::default()\n                .query_type(vk::QueryType::VIDEO_ENCODE_FEEDBACK_KHR)\n                .query_count(1)\n                .push_next(profile)\n                .push_next(&mut video_pool_info);\n\n            unsafe {\n                let query_pool = vk\n                    .device\n                    .create_query_pool(&create_info, None)\n                    .context(\"vkCreateQueryPool\")?;\n                vk.device.reset_query_pool(query_pool, 0, 1);\n\n                query_pool\n            }\n        };\n\n        let timeline = VkTimelineSemaphore::new(vk.clone(), 0)?;\n\n        #[cfg(feature = \"tracy\")]\n        let encode_ts_pool = if matches!(\n            vk.device_info.driver_version,\n            DriverVersion::MesaRadv { .. }\n        ) {\n            // RADV offers support for timestamp queries, but then has an\n            // assertion at timestamp write time.\n            None\n        } else {\n            create_timestamp_query_pool(&vk.device, 2).ok()\n        };\n\n        Ok(EncoderOutputFrame {\n            encode_cb,\n            copy_buffer,\n            query_pool,\n\n            hierarchical_layer: 0,\n            is_keyframe: false,\n            headers: None,\n\n            tp_encoded: timeline.new_point(0),\n            tp_copied: timeline.new_point(0),\n            timeline,\n\n            #[cfg(feature = \"tracy\")]\n            tracy_context: TracingContext {\n                frame_name,\n                frame: None,\n                encode_span: None,\n                encode_ts_pool,\n            },\n\n            vk,\n        })\n    }\n}\n\nimpl Drop for EncoderOutputFrame {\n    fn drop(&mut self) {\n        unsafe {\n            let device = &self.vk.device;\n            let encode_queue = self.vk.encode_queue.as_ref().unwrap();\n\n            device.queue_wait_idle(encode_queue.queue).unwrap();\n            device.free_command_buffers(encode_queue.command_pool, &[self.encode_cb]);\n            device.destroy_query_pool(self.query_pool, None);\n\n            #[cfg(feature = \"tracy\")]\n            if let Some(pool) = self.tracy_context.encode_ts_pool.take() {\n                device.destroy_query_pool(pool.pool, None);\n            }\n        }\n    }\n}\n\n// SAFETY: the contained pointers are nothing fancy.\nunsafe impl Send for EncoderOutputFrame {}\n\n/// Allows the caller to decide where to sink the frames.\npub trait Sink: Send + 'static {\n    fn write_frame(\n        &mut self,\n        ts: time::Instant,\n        frame: Bytes,\n        hierarchical_layer: u32,\n        is_keyframe: bool,\n    );\n}\n\n#[repr(C)]\n#[derive(Debug, Clone, Copy, Default)]\nstruct QueryResults {\n    offset: i32,\n    size: i32,\n    result: i32,\n}\n\n/// Responsible for copying encoded frames from the output buffer and\n/// dispatching them to the client. Passes instances of `EncodedOutputFrame`\n/// back and forth with the main thread.\nfn writer_thread(\n    vk: Arc<VkContext>,\n    input: crossbeam::Receiver<EncoderOutputFrame>,\n    done: crossbeam::Sender<EncoderOutputFrame>,\n    mut sink: impl Sink,\n    stats: stats::EncodeStats,\n) -> anyhow::Result<()> {\n    let device = &vk.device;\n\n    let mut capture_ts = time::Instant::now();\n\n    for mut frame in input {\n        let dur = capture_ts.elapsed();\n        capture_ts = time::Instant::now();\n\n        // Wait for the frame to finish encoding.\n        unsafe {\n            frame.tp_encoded.wait()?;\n        }\n\n        #[cfg(feature = \"tracy\")]\n        {\n            frame.tracy_context.frame.take();\n            if let Some(span) = frame.tracy_context.encode_span.take() {\n                if let Some(pool) = &mut frame.tracy_context.encode_ts_pool {\n                    let timestamps = pool.fetch_results(device)?;\n                    span.upload_timestamp(timestamps[0], timestamps[1])\n                }\n            }\n        }\n\n        // Get the buffer offsets for the encoded data.\n        let mut results = [QueryResults::default()];\n        unsafe {\n            device\n                .get_query_pool_results(\n                    frame.query_pool,\n                    0,\n                    &mut results,\n                    vk::QueryResultFlags::WITH_STATUS_KHR,\n                )\n                .context(\"vkGetQueryPoolResults\")?;\n            device.reset_query_pool(frame.query_pool, 0, 1)\n        }\n\n        let res = vk::QueryResultStatusKHR::from_raw(results[0].result);\n        if res != vk::QueryResultStatusKHR::COMPLETE {\n            bail!(\"encode failed: {:?}\", res);\n        }\n\n        trace!(len = results[0].size, ?dur, \"encoded packet\");\n        stats.record_frame_size(\n            frame.is_keyframe,\n            frame.hierarchical_layer,\n            results[0].size as usize,\n        );\n\n        let data = unsafe {\n            let ptr = frame.copy_buffer.access as *const u8;\n            std::slice::from_raw_parts(\n                ptr.add(results[0].offset as usize),\n                results[0].size as usize,\n            )\n        };\n\n        // Prepend any headers.\n        let data = if let Some(headers) = frame.headers.take() {\n            let mut buf = bytes::BytesMut::from(headers);\n            buf.extend_from_slice(data);\n            buf.freeze()\n        } else {\n            Bytes::copy_from_slice(data)\n        };\n\n        unsafe {\n            frame.tp_copied.signal()?;\n        }\n\n        sink.write_frame(\n            capture_ts,\n            data,\n            frame.hierarchical_layer,\n            frame.is_keyframe,\n        );\n        done.send(frame).ok();\n    }\n\n    Ok(())\n}\n\nfn list_format_props<'a>(\n    video_loader: &'a VideoQueueExt,\n    pdevice: vk::PhysicalDevice,\n    profile: &mut vk::VideoProfileInfoKHR,\n    usage: vk::ImageUsageFlags,\n) -> anyhow::Result<Vec<vk::VideoFormatPropertiesKHR<'a>>> {\n    let mut profile_list_info = single_profile_list_info(profile);\n    let format_info = vk::PhysicalDeviceVideoFormatInfoKHR::default()\n        .image_usage(usage)\n        .push_next(&mut profile_list_info);\n\n    let props = unsafe {\n        video_loader\n            .get_physical_device_video_format_properties(pdevice, &format_info)\n            .context(\"vkGetVideoFormatPropertiesKHR\")?\n    };\n\n    Ok(props)\n}\n\nfn bind_session_memory(\n    video_loader: &VideoQueueExt,\n    device: &ash::Device,\n    device_info: &VkDeviceInfo,\n    session: vk::VideoSessionKHR,\n) -> anyhow::Result<Vec<vk::DeviceMemory>> {\n    let mut session_memory = Vec::new();\n    let reqs = unsafe { video_loader.get_video_session_memory_requirements(session)? };\n\n    let mut binds = Vec::new();\n    for req in reqs.into_iter() {\n        let memory = {\n            let mut alloc_info =\n                vk::MemoryAllocateInfo::default().allocation_size(req.memory_requirements.size);\n\n            let mem_type_idx = select_memory_type(\n                &device_info.memory_props,\n                vk::MemoryPropertyFlags::DEVICE_LOCAL,\n                Some(req.memory_requirements.memory_type_bits),\n            )\n            .or_else(|| {\n                select_memory_type(\n                    &device_info.memory_props,\n                    vk::MemoryPropertyFlags::empty(),\n                    Some(req.memory_requirements.memory_type_bits),\n                )\n            });\n\n            if mem_type_idx.is_none() {\n                bail!(\"no suitable memory type for video session\");\n            }\n\n            alloc_info = alloc_info.memory_type_index(mem_type_idx.unwrap());\n\n            unsafe {\n                device\n                    .allocate_memory(&alloc_info, None)\n                    .context(\"vkAllocateMemory\")?\n            }\n        };\n\n        session_memory.push(memory);\n        binds.push(\n            vk::BindVideoSessionMemoryInfoKHR::default()\n                .memory_bind_index(req.memory_bind_index)\n                .memory(memory)\n                .memory_size(req.memory_requirements.size),\n        );\n    }\n\n    unsafe {\n        video_loader\n            .bind_video_session_memory(device, session, &binds)\n            .context(\"vkBindVideoSessionMemory\")?\n    }\n\n    Ok(session_memory)\n}\n\nfn default_profile(op: vk::VideoCodecOperationFlagsKHR) -> vk::VideoProfileInfoKHR<'static> {\n    vk::VideoProfileInfoKHR::default()\n        .video_codec_operation(op)\n        .chroma_subsampling(vk::VideoChromaSubsamplingFlagsKHR::TYPE_420)\n        .chroma_bit_depth(vk::VideoComponentBitDepthFlagsKHR::TYPE_8)\n        .luma_bit_depth(vk::VideoComponentBitDepthFlagsKHR::TYPE_8)\n}\n\nfn default_hdr10_profile(op: vk::VideoCodecOperationFlagsKHR) -> vk::VideoProfileInfoKHR<'static> {\n    vk::VideoProfileInfoKHR::default()\n        .video_codec_operation(op)\n        .chroma_subsampling(vk::VideoChromaSubsamplingFlagsKHR::TYPE_420)\n        .chroma_bit_depth(vk::VideoComponentBitDepthFlagsKHR::TYPE_10)\n        .luma_bit_depth(vk::VideoComponentBitDepthFlagsKHR::TYPE_10)\n}\n\nfn default_encode_usage(driver_version: DriverVersion) -> vk::VideoEncodeUsageInfoKHR<'static> {\n    // Nvidia chokes on \"ULTRA LOW\" for some reason.\n    let tuning_mode = if matches!(driver_version, DriverVersion::NvidiaProprietary { .. }) {\n        vk::VideoEncodeTuningModeKHR::LOW_LATENCY\n    } else {\n        vk::VideoEncodeTuningModeKHR::ULTRA_LOW_LATENCY\n    };\n\n    vk::VideoEncodeUsageInfoKHR::default()\n        .video_usage_hints(vk::VideoEncodeUsageFlagsKHR::STREAMING)\n        .video_content_hints(vk::VideoEncodeContentFlagsKHR::RENDERED)\n        .tuning_mode(tuning_mode)\n}\n\nfn single_profile_list_info<'a>(\n    profile: &'a mut vk::VideoProfileInfoKHR,\n) -> vk::VideoProfileListInfoKHR<'a> {\n    vk::VideoProfileListInfoKHR {\n        p_profiles: <*const _>::cast(profile),\n        profile_count: 1,\n        ..Default::default()\n    }\n}\n\nfn default_structure(\n    codec: VideoCodec,\n    max_codec_layers: u32,\n    max_dpb_slots: u32,\n) -> anyhow::Result<HierarchicalP> {\n    const MAX_LAYERS: u32 = 4;\n    const DEFAULT_GOP_SIZE: u32 = 256;\n\n    // Disable hierarchical coding on H264, because it's broken.\n    let mut layers = if codec == VideoCodec::H264 {\n        1\n    } else {\n        std::cmp::min(MAX_LAYERS, max_codec_layers)\n    };\n\n    let mut structure = HierarchicalP::new(layers, DEFAULT_GOP_SIZE);\n    while structure.required_dpb_size() as u32 > max_dpb_slots {\n        layers -= 1;\n        if layers == 0 {\n            bail!(\"max_dpb_slots too low\");\n        }\n\n        structure = HierarchicalP::new(layers, DEFAULT_GOP_SIZE);\n    }\n\n    Ok(structure)\n}\n"
  },
  {
    "path": "mm-server/src/main.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nmod codec;\nmod color;\nmod config;\nmod container;\nmod encoder;\nmod pixel_scale;\nmod server;\nmod session;\nmod state;\nmod vulkan;\nmod waking_sender;\n\nuse std::{\n    os::unix::fs::DirBuilderExt,\n    path::{Path, PathBuf},\n    process::Command,\n    sync::Arc,\n};\n\nuse anyhow::{bail, Context, Result};\nuse clap::Parser;\nuse parking_lot::Mutex;\nuse tracing::{debug, info, warn};\nuse tracing_subscriber::{util::SubscriberInitExt, EnvFilter, Layer};\n\n#[derive(Debug, Parser)]\n#[command(name = \"mmserver\")]\n#[command(about = \"The Magic Mirror server\", long_about = None)]\nstruct Cli {\n    /// Print the version.\n    #[arg(short, long)]\n    version: bool,\n    /// The address to bind. Defaults to [::0]:9599.\n    #[arg(long, value_name = \"HOST[:PORT]\")]\n    bind: Option<String>,\n    /// Bind using systemd's socket passing protocol (LISTEN_FDS).\n    #[arg(long)]\n    bind_systemd: bool,\n    /// The path to a config file. By default,\n    /// /etc/magic-mirror/mmserver.{toml,json} is used (if present).\n    #[arg(short = 'C', long, value_name = \"FILE\")]\n    config: Option<PathBuf>,\n    /// Include extra app definitions. May be specified multiple times, with\n    /// either individual files or directories to be searched.\n    #[arg(short = 'i', long, value_name = \"PATH\")]\n    include_apps: Vec<PathBuf>,\n    /// Generate a bug report in a temporary directory. WARNING: this will save\n    /// video recordings, which may be large!\n    #[arg(long)]\n    bug_report: bool,\n}\n\nfn main() -> Result<()> {\n    let args = Cli::parse();\n\n    let version = format!(\n        \"mmserver {}\",\n        git_version::git_version!(\n            args = [\"--always\", \"--tags\", \"--match\", \"mmserver-v\"],\n            prefix = \"git:\",\n            cargo_prefix = \"\",\n        )\n    );\n\n    if args.version {\n        println!(\"{}\", version);\n        return Ok(());\n    }\n\n    let bug_report_dir = if args.bug_report {\n        let dirname = std::env::temp_dir().join(format!(\"magic-mirror-{}\", uuid::Uuid::new_v4()));\n        std::fs::DirBuilder::new().mode(0o0755).create(&dirname)?;\n\n        Some(dirname)\n    } else {\n        None\n    };\n\n    init_logging(bug_report_dir.as_ref())?;\n\n    debug!(version, \"starting up\");\n    if let Some(ref dirname) = bug_report_dir {\n        warn!(\"generating bug report files in: {:?}\", &dirname);\n    }\n\n    #[cfg(feature = \"tracy\")]\n    warn!(\"tracing enabled!\");\n\n    // Load config.\n    let mut cfg = config::Config::new(args.config.as_ref(), &args.include_apps)\n        .context(\"failed to read config\")?;\n\n    let vk = Arc::new(vulkan::VkContext::new(cfg!(debug_assertions))?);\n\n    preflight_checks(&cfg, &vk)?;\n\n    // Override with command line flags.\n    cfg.bug_report_dir = bug_report_dir.clone();\n    if let Some(bind) = args.bind {\n        cfg.server.bind = bind;\n    } else if args.bind_systemd {\n        cfg.server.bind_systemd = true;\n    }\n\n    let sock = if cfg.server.bind_systemd {\n        let mut listenfd = listenfd::ListenFd::from_env();\n        if let Some(sock) = listenfd.take_udp_socket(0)? {\n            debug!(\"using systemd socket: {:?}\", sock.local_addr()?);\n            sock\n        } else {\n            bail!(\"systemd UDP socket not found\")\n        }\n    } else {\n        std::net::UdpSocket::bind(&cfg.server.bind).context(\"binding server socket\")?\n    };\n\n    let state = Arc::new(Mutex::new(state::ServerState::new(vk, cfg.clone())));\n    let mut srv = server::Server::new(sock, cfg.server.clone(), state)?;\n\n    let closer = srv.closer();\n    ctrlc::set_handler(move || {\n        debug!(\"received SIGINT\");\n        closer.send(()).ok();\n    })?;\n\n    info!(\"listening on {:?}\", srv.local_addr()?);\n    srv.run().context(\"server exited\")?;\n\n    if let Some(dir) = &bug_report_dir {\n        save_vulkaninfo(dir);\n        info!(\"bug report files saved to: {:?}\", dir);\n    }\n\n    Ok(())\n}\n\nfn init_logging(bug_report_dir: Option<impl AsRef<Path>>) -> Result<()> {\n    use tracing_subscriber::layer::SubscriberExt;\n\n    let trace_log = if let Some(dir) = bug_report_dir {\n        // Additionally write a trace log with everything to the bug report dir.\n        let file = std::fs::File::create(dir.as_ref().join(\"mmserver.log\"))?;\n        let trace_filter =\n            tracing_subscriber::EnvFilter::new(\"mmserver=trace,fuser=trace,southpaw=trace\");\n\n        let trace_log = tracing_subscriber::fmt::layer()\n            .with_ansi(false)\n            .with_writer(std::sync::Mutex::new(file))\n            .with_filter(trace_filter);\n\n        Some(trace_log)\n    } else {\n        None\n    };\n\n    let tracy = if cfg!(feature = \"tracy\") {\n        Some(tracing_tracy::TracyLayer::default().with_filter(EnvFilter::new(\"mmserver=trace\")))\n    } else {\n        None\n    };\n\n    let printed_log = tracing_subscriber::fmt::layer().with_filter(\n        EnvFilter::builder()\n            .with_default_directive(\"mmserver=info\".parse()?)\n            .from_env_lossy(),\n    );\n\n    tracing_subscriber::registry()\n        .with(tracy)\n        .with(trace_log)\n        .with(printed_log)\n        .init();\n\n    Ok(())\n}\n\nfn preflight_checks(cfg: &config::Config, vk: &vulkan::VkContext) -> anyhow::Result<()> {\n    match linux_version() {\n        Some((major, minor)) if major < 6 => {\n            bail!(\"kernel version {major}.{minor} is too low; 6.x required\");\n        }\n        None => warn!(\"unable to determine linux kernel version!\"),\n        _ => (),\n    }\n\n    match vk.device_info.driver_version {\n        vulkan::DriverVersion::MesaRadv {\n            major,\n            minor,\n            patch,\n        } => {\n            if major < 24 || (major == 24 && minor < 3) {\n                bail!(\"mesa >= 24.3 required, have {major}.{minor}.{patch}\");\n            }\n        }\n        vulkan::DriverVersion::NvidiaProprietary { major, minor } => {\n            if major < 565 {\n                bail!(\"driver version >= 565.x required, have {major}.{minor}\");\n            }\n        }\n        vulkan::DriverVersion::Other(ref driver) => {\n            warn!(driver, \"using potentially unsupported vulkan driver\")\n        }\n    }\n\n    std::fs::create_dir_all(&cfg.data_home).context(format!(\n        \"failed to initialize data_home ({})\",\n        cfg.data_home.display(),\n    ))?;\n\n    // Check for Ubuntu's restrictions on rootless containers.\n    if sysctl(\"apparmor_restrict_unprivileged_unconfined\")\n        || sysctl(\"apparmor_restrict_unprivileged_userns\")\n    {\n        warn!(\n            \"Unprivileged user namespaces restricted by AppArmor! Launching applications will \\\n             fail unless an exception is installed. Read more here: \\\n             https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces\"\n        )\n    }\n\n    Ok(())\n}\n\nfn linux_version() -> Option<(u32, u32)> {\n    let uname = rustix::system::uname();\n    let version = uname.release().to_str().ok()?;\n\n    let version = version.split_whitespace().next()?;\n    let mut parts = version.splitn(3, \".\");\n    let major = parts.next()?;\n    let minor = parts.next()?;\n\n    Some((major.parse().ok()?, minor.parse().ok()?))\n}\n\nfn sysctl(name: &str) -> bool {\n    const CTL_PATH: &str = \"/proc/sys/kernel\";\n\n    std::fs::read_to_string(Path::new(CTL_PATH).join(name))\n        .map(|s| s.trim() == \"1\")\n        .ok()\n        .unwrap_or_default()\n}\n\nfn save_vulkaninfo(bug_report_dir: impl AsRef<Path>) {\n    match Command::new(\"vulkaninfo\").env_clear().output() {\n        Ok(output) => {\n            let _ = std::fs::write(\n                bug_report_dir.as_ref().join(\"vulkaninfo.log\"),\n                output.stdout,\n            );\n        }\n        Err(e) => debug!(\"failed to run vulkaninfo: {:#}\", e),\n    }\n}\n\n#[test]\nfn test_linux_version() {\n    let Some((major, _minor)) = linux_version() else {\n        panic!(\"failed to determine linux version\");\n    };\n\n    assert!(major >= 6);\n}\n"
  },
  {
    "path": "mm-server/src/pixel_scale.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::fmt;\n\nuse anyhow::anyhow;\nuse mm_protocol as protocol;\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\npub struct PixelScale(pub u32, pub u32);\n\nimpl PixelScale {\n    pub const ONE: Self = Self(1, 1);\n\n    pub fn is_fractional(&self) -> bool {\n        (self.0 % self.1) != 0\n    }\n\n    pub fn ceil(self) -> Self {\n        Self(self.0.next_multiple_of(self.1), self.1)\n    }\n}\n\nimpl std::fmt::Display for PixelScale {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        write!(f, \"{:.1}\", self.0 as f64 / self.1 as f64)\n    }\n}\n\nimpl Default for PixelScale {\n    fn default() -> Self {\n        PixelScale::ONE\n    }\n}\n\n#[derive(Debug, Clone)]\npub struct FractionalScaleError;\n\nimpl fmt::Display for FractionalScaleError {\n    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n        write!(f, \"scale is fractional\")\n    }\n}\n\nimpl TryFrom<protocol::PixelScale> for PixelScale {\n    type Error = anyhow::Error;\n\n    fn try_from(scale: protocol::PixelScale) -> anyhow::Result<Self> {\n        if scale.denominator == 0 && scale.numerator != 0 {\n            Ok(Self(1, 1))\n        } else if scale.denominator == 0 || scale.numerator == 0 {\n            Err(anyhow!(\n                \"invalid pixel scale: {}/{}\",\n                scale.numerator,\n                scale.denominator\n            ))\n        } else {\n            Ok(Self(scale.numerator, scale.denominator))\n        }\n    }\n}\n\nimpl From<PixelScale> for f64 {\n    fn from(value: PixelScale) -> Self {\n        value.0 as f64 / value.1 as f64\n    }\n}\n\nimpl TryFrom<PixelScale> for u32 {\n    type Error = FractionalScaleError;\n\n    fn try_from(value: PixelScale) -> Result<Self, Self::Error> {\n        if value.is_fractional() {\n            return Err(FractionalScaleError);\n        }\n\n        Ok(value.0 / value.1)\n    }\n}\n\nimpl From<PixelScale> for protocol::PixelScale {\n    fn from(scale: PixelScale) -> Self {\n        Self {\n            numerator: scale.0,\n            denominator: scale.1,\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/server/handlers/attachment/stats.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::time;\n\nuse simple_moving_average::SMA as _;\nuse tracing::info;\n\npub struct AttachmentStats {\n    app_id: String,\n    start: time::Instant,\n    total_transfer: u64,\n\n    sma: simple_moving_average::SingleSumSMA<f64, f64, 300>,\n\n    last_log: time::Instant,\n}\n\nimpl AttachmentStats {\n    pub fn new(app_id: String) -> Self {\n        let now = time::Instant::now();\n\n        Self {\n            app_id,\n            start: now,\n            total_transfer: 0,\n\n            sma: simple_moving_average::SingleSumSMA::new(),\n\n            last_log: now,\n        }\n    }\n\n    pub fn record_frame(&mut self, _seq: u64, len: usize, duration: time::Duration) {\n        self.total_transfer += len as u64;\n        self.sma\n            .add_sample((len as f64 * 8.0 / (1024.0 * 1024.0)) / duration.as_secs_f64());\n\n        let avg = self.sma.get_average();\n\n        if self.last_log.elapsed().as_secs() > 5 {\n            self.last_log = time::Instant::now();\n\n            let total_transfer_gb = self.total_transfer as f32 / (1024.0 * 1024.0 * 1024.0);\n            info!(\n                duration = ?self.start.elapsed(),\n                current_bitrate_mbps = avg,\n                total_transfer_gb,\n                \"{}\", self.app_id\n            );\n        }\n\n        #[cfg(feature = \"tracy\")]\n        if _seq % 10 == 0 {\n            tracy_client::plot!(\"video bitrate (mbps)\", avg);\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/server/handlers/attachment.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{collections::BTreeMap, fs, path::PathBuf, time};\n\nuse mm_protocol::{self as protocol, error::ErrorCode};\nuse tracing::{debug, debug_span, error, trace};\n\nmod stats;\n\nuse super::{validate_attachment, validate_gamepad, ServerError, ValidationError};\nuse crate::{\n    server::stream::StreamWriter,\n    session::{\n        compositor,\n        control::{ControlMessage, DisplayParams, SessionEvent},\n        Attachment,\n    },\n};\n\nimpl From<DisplayParams> for protocol::VirtualDisplayParameters {\n    fn from(params: DisplayParams) -> Self {\n        protocol::VirtualDisplayParameters {\n            resolution: Some(protocol::Size {\n                width: params.width,\n                height: params.height,\n            }),\n            framerate_hz: params.framerate,\n            ui_scale: Some(params.ui_scale.into()),\n        }\n    }\n}\n\nstruct AttachmentHandler<'a> {\n    ctx: &'a super::Context,\n    handle: Attachment,\n\n    session_display_params: DisplayParams,\n    attached: protocol::Attached,\n    superscale: f64,\n\n    // Keep track of the pointer lock, and debounce session events for it.\n    pointer_lock: Option<(f64, f64)>,\n\n    last_video_frame_recvd: time::Instant,\n    last_audio_frame_recvd: time::Instant,\n    current_video_stream_seq: u64,\n\n    // For saving the bitstream to disk in bug reports.\n    bug_report: Option<(PathBuf, BTreeMap<u64, fs::File>)>,\n\n    stats: stats::AttachmentStats,\n}\n\n#[derive(Debug, Clone)]\nenum AttachmentError {\n    Finished,\n    ServerError(ErrorCode, Option<String>),\n}\n\npub fn attach(ctx: &super::Context, msg: protocol::Attach) -> Result<(), ServerError> {\n    let session_id = msg.session_id;\n    let handler = AttachmentHandler::new(ctx, msg)?;\n\n    // Make sure we detach, even if we panic.\n    let mut handler = scopeguard::guard(handler, |h| {\n        debug!(\"detaching from session\");\n        if let Some(s) = ctx.state.lock().sessions.get_mut(&session_id) {\n            s.detach(h.handle).ok();\n        };\n    });\n\n    handler.run()\n}\n\nimpl<'a> AttachmentHandler<'a> {\n    fn new(ctx: &'a super::Context, msg: protocol::Attach) -> Result<Self, ServerError> {\n        if msg.attachment_type() != protocol::AttachmentType::Operator {\n            return Err(ServerError(\n                ErrorCode::ErrorAttachmentParamsNotSupported,\n                Some(\"unsupported attachment type\".to_string()),\n            ));\n        }\n\n        let session_id = msg.session_id;\n        let (video_params, audio_params) = validate_attachment(msg).map_err(|err| match err {\n            ValidationError::Unsupported(text) => {\n                ServerError(ErrorCode::ErrorAttachmentParamsNotSupported, Some(text))\n            }\n            ValidationError::Invalid(text) => ServerError(ErrorCode::ErrorProtocol, Some(text)),\n        })?;\n\n        let mut guard = ctx.state.lock();\n        let server_config = guard.cfg.server.clone();\n\n        let attachment_id = guard.id_generator.next_int();\n        let Some(session) = guard.sessions.get_mut(&session_id) else {\n            return Err(ServerError(ErrorCode::ErrorSessionNotFound, None));\n        };\n\n        if !session.supports_stream(video_params) {\n            return Err(ServerError(\n                ErrorCode::ErrorAttachmentParamsNotSupported,\n                Some(\"unsupported streaming resolution or codec\".to_string()),\n            ));\n        }\n\n        let stream_writer = StreamWriter::new(\n            session_id,\n            attachment_id,\n            &server_config,\n            ctx.outgoing_dgrams.clone(),\n            ctx.max_dgram_len,\n        );\n\n        let handle = match session.attach(\n            attachment_id,\n            true,\n            video_params,\n            audio_params,\n            stream_writer,\n        ) {\n            Ok(v) => v,\n            Err(err) => {\n                error!(?err, \"failed to attach to session\");\n                return Err(ServerError(\n                    ErrorCode::ErrorServer,\n                    Some(\"failed to attach to session\".to_string()),\n                ));\n            }\n        };\n\n        let app_id = session.application_id.clone();\n        let display_params = session.display_params;\n        let bug_report_dir = session.bug_report_dir.clone();\n        drop(guard);\n\n        let superscale = display_params.height as f64 / video_params.height as f64;\n        assert_eq!(display_params.height % video_params.height, 0);\n        assert_eq!(\n            display_params.width as f64 / video_params.width as f64,\n            superscale\n        );\n\n        debug!(\n            ?video_params,\n            ?audio_params,\n            ?superscale,\n            \"attaching with params\"\n        );\n\n        let video_codec: protocol::VideoCodec = video_params.codec.into();\n        let video_profile: protocol::VideoProfile = video_params.profile.into();\n        let audio_codec: protocol::AudioCodec = audio_params.codec.into();\n        let attached = protocol::Attached {\n            session_id,\n            attachment_id: handle.attachment_id,\n\n            video_codec: video_codec.into(),\n            streaming_resolution: Some(protocol::Size {\n                width: video_params.width,\n                height: video_params.height,\n            }),\n            video_profile: video_profile.into(),\n\n            quality_preset: video_params.preset,\n\n            audio_codec: audio_codec.into(),\n            sample_rate_hz: audio_params.sample_rate,\n            channels: Some(protocol::AudioChannels {\n                channels: vec![\n                    protocol::audio_channels::Channel::Mono.into();\n                    audio_params.channels as usize\n                ],\n            }),\n        };\n\n        let pointer_lock = None;\n\n        let now = time::Instant::now();\n\n        Ok(Self {\n            ctx,\n            handle,\n\n            session_display_params: display_params,\n            attached,\n            superscale,\n\n            pointer_lock,\n\n            last_video_frame_recvd: now,\n            last_audio_frame_recvd: now,\n            current_video_stream_seq: 0,\n\n            bug_report: bug_report_dir.map(|dir| (dir, BTreeMap::default())),\n\n            stats: stats::AttachmentStats::new(app_id),\n        })\n    }\n\n    fn run(&mut self) -> Result<(), ServerError> {\n        let span = debug_span!(\n            \"attachment\",\n            self.handle.session_id,\n            self.handle.attachment_id,\n        );\n        let _guard = span.enter();\n\n        if self\n            .ctx\n            .outgoing\n            .send(self.attached.clone().into())\n            .is_err()\n        {\n            // Client already hung up.\n            return Ok(());\n        }\n\n        loop {\n            crossbeam_channel::select! {\n                recv(self.ctx.incoming) -> msg => {\n                    match msg {\n                        Ok(m) => {\n                            match self.handle_attachment_message(m) {\n                                Ok(_) => (),\n                                Err(AttachmentError::Finished) => return Ok(()),\n                                Err(AttachmentError::ServerError(code, text)) => {\n                                    return Err(ServerError(code, text));\n                                }\n                            }\n                        }\n                        Err(_) => return Ok(()), // Client fin.\n                    }\n                },\n                recv(&self.handle.events) -> event => {\n                    match event {\n                        Ok(ev) => match self.handle_session_event(ev) {\n                            Ok(_) => (),\n                            Err(AttachmentError::Finished) => return Ok(()),\n                            Err(AttachmentError::ServerError(code, text)) => {\n                                return Err(ServerError(code, text));\n                            }\n                        }\n                        Err(e) => {\n                            // Mark the session defunct. It'll get GC'd.\n                            error!(\"error in attach handler: {:#}\", e);\n\n                            if let Some(s) = self.ctx.state.lock().sessions.get_mut(&self.handle.session_id) {\n                                s.defunct = true;\n                            };\n\n                            return Err(ServerError(\n                                ErrorCode::ErrorServer,\n                                Some(\"internal server error\".to_string()),\n                            ));\n                        }\n                    }\n                },\n            }\n        }\n    }\n\n    fn handle_attachment_message(\n        &mut self,\n        msg: protocol::MessageType,\n    ) -> Result<(), AttachmentError> {\n        match msg {\n            protocol::MessageType::KeepAlive(_) => {}\n            protocol::MessageType::Detach(_) => return Err(AttachmentError::Finished),\n            protocol::MessageType::RequestVideoRefresh(ev) => {\n                if ev.stream_seq == self.current_video_stream_seq {\n                    let _ = self.handle.control.send(ControlMessage::RefreshVideo);\n                } else {\n                    debug!(\n                        current = self.current_video_stream_seq,\n                        requested = ev.stream_seq,\n                        \"ignoring RequestVideoRefresh\"\n                    );\n                }\n            }\n            protocol::MessageType::KeyboardInput(ev) => {\n                use protocol::keyboard_input::KeyState;\n\n                trace!(ev.key, ev.state, \"received keyboard event: {:?}\", ev);\n\n                let state = match ev.state.try_into() {\n                    Ok(KeyState::Unknown) | Err(_) => {\n                        return Err(AttachmentError::ServerError(\n                            ErrorCode::ErrorProtocol,\n                            Some(\"invalid key state\".to_string()),\n                        ));\n                    }\n                    Ok(KeyState::Pressed) => compositor::KeyState::Pressed,\n                    Ok(KeyState::Released) => compositor::KeyState::Released,\n                    Ok(KeyState::Repeat) => compositor::KeyState::Repeat,\n                };\n\n                let key_code =\n                    match protocol::keyboard_input::Key::try_from(ev.key).map(key_to_evdev) {\n                        Ok(Some(scancode)) => scancode,\n                        _ => {\n                            return Err(AttachmentError::ServerError(\n                                ErrorCode::ErrorProtocol,\n                                Some(\"invalid key\".to_string()),\n                            ));\n                        }\n                    };\n\n                let ch = match ev.char {\n                    0 => None,\n                    c => match char::from_u32(c) {\n                        Some(c) => Some(c),\n                        None => {\n                            return Err(AttachmentError::ServerError(\n                                ErrorCode::ErrorProtocol,\n                                Some(\"invalid keychar\".to_string()),\n                            ));\n                        }\n                    },\n                };\n\n                trace!(key_code, ?state, ?ch, \"translated keyboard event\");\n\n                self.handle\n                    .control\n                    .send(ControlMessage::KeyboardInput {\n                        key_code,\n                        state,\n                        char: ch,\n                    })\n                    .ok();\n            }\n            protocol::MessageType::PointerMotion(ev) => {\n                let x = ev.x * self.superscale;\n                let y = ev.y * self.superscale;\n                self.handle\n                    .control\n                    .send(ControlMessage::PointerMotion(x, y))\n                    .ok();\n            }\n            protocol::MessageType::RelativePointerMotion(ev) => {\n                let x = ev.x * self.superscale;\n                let y = ev.y * self.superscale;\n                self.handle\n                    .control\n                    .send(ControlMessage::RelativePointerMotion(x, y))\n                    .ok();\n            }\n            protocol::MessageType::PointerEntered(_) => {\n                self.handle\n                    .control\n                    .send(ControlMessage::PointerEntered)\n                    .ok();\n            }\n            protocol::MessageType::PointerLeft(_) => {\n                self.handle.control.send(ControlMessage::PointerLeft).ok();\n            }\n            protocol::MessageType::PointerInput(ev) => {\n                use protocol::pointer_input::*;\n\n                let state = match ev.state.try_into() {\n                    Ok(ButtonState::Unknown) | Err(_) => {\n                        return Err(AttachmentError::ServerError(\n                            ErrorCode::ErrorProtocol,\n                            Some(\"invalid button state\".to_string()),\n                        ));\n                    }\n                    Ok(ButtonState::Pressed) => compositor::ButtonState::Pressed,\n                    Ok(ButtonState::Released) => compositor::ButtonState::Released,\n                };\n\n                // https://gitlab.freedesktop.org/libinput/libinput/-/blob/main/include/linux/linux/input-event-codes.h#L354\n                let button_code = match ev.button.try_into() {\n                    Ok(Button::Left) => 0x110,\n                    Ok(Button::Right) => 0x111,\n                    Ok(Button::Middle) => 0x112,\n                    Ok(Button::Forward) => 0x115,\n                    Ok(Button::Back) => 0x116,\n                    _ => {\n                        return Err(AttachmentError::ServerError(\n                            ErrorCode::ErrorProtocol,\n                            Some(\"invalid button\".to_string()),\n                        ));\n                    }\n                };\n\n                trace!(\n                    button = ev.button,\n                    pressed = (state == compositor::ButtonState::Pressed),\n                    \"sending cursor input event\",\n                );\n\n                self.handle\n                    .control\n                    .send(ControlMessage::PointerInput {\n                        x: ev.x,\n                        y: ev.y,\n                        button_code,\n                        state,\n                    })\n                    .ok();\n            }\n            protocol::MessageType::PointerScroll(ev) => match ev.scroll_type.try_into() {\n                Ok(protocol::pointer_scroll::ScrollType::Continuous) => {\n                    let x = ev.x * self.superscale;\n                    let y = ev.y * self.superscale;\n                    self.handle\n                        .control\n                        .send(ControlMessage::PointerAxis(x, y))\n                        .ok();\n                }\n                Ok(protocol::pointer_scroll::ScrollType::Discrete) => {\n                    self.handle\n                        .control\n                        .send(ControlMessage::PointerAxisDiscrete(ev.x, ev.y))\n                        .ok();\n                }\n                _ => {\n                    return Err(AttachmentError::ServerError(\n                        ErrorCode::ErrorProtocol,\n                        Some(\"invalid scroll type\".to_string()),\n                    ));\n                }\n            },\n            protocol::MessageType::GamepadAvailable(ev) => {\n                let (id, _layout) = match validate_gamepad(ev.gamepad) {\n                    Ok(v) => v,\n                    Err(ValidationError::Invalid(text)) => {\n                        return Err(AttachmentError::ServerError(\n                            ErrorCode::ErrorProtocol,\n                            Some(text),\n                        ))\n                    }\n                    Err(_) => unreachable!(),\n                };\n\n                self.handle\n                    .control\n                    .send(ControlMessage::GamepadAvailable(id))\n                    .ok();\n            }\n            protocol::MessageType::GamepadUnavailable(ev) => {\n                self.handle\n                    .control\n                    .send(ControlMessage::GamepadUnavailable(ev.id))\n                    .ok();\n            }\n            protocol::MessageType::GamepadMotion(ev) => {\n                let (scancode, is_trigger) =\n                    match protocol::gamepad_motion::GamepadAxis::try_from(ev.axis)\n                        .ok()\n                        .and_then(axis_to_evdev)\n                    {\n                        Some(v) => v,\n                        _ => {\n                            return Err(AttachmentError::ServerError(\n                                ErrorCode::ErrorProtocol,\n                                Some(\"invalid gamepad axis\".to_string()),\n                            ));\n                        }\n                    };\n\n                let cm = if is_trigger {\n                    ControlMessage::GamepadTrigger {\n                        id: ev.gamepad_id,\n                        trigger_code: scancode,\n                        value: ev.value,\n                    }\n                } else {\n                    ControlMessage::GamepadAxis {\n                        id: ev.gamepad_id,\n                        axis_code: scancode,\n                        value: ev.value,\n                    }\n                };\n\n                self.handle.control.send(cm).ok();\n            }\n            protocol::MessageType::GamepadInput(ev) => {\n                use protocol::gamepad_input::{GamepadButton, GamepadButtonState};\n                let state = match ev.state.try_into() {\n                    Ok(GamepadButtonState::Unknown) | Err(_) => {\n                        return Err(AttachmentError::ServerError(\n                            ErrorCode::ErrorProtocol,\n                            Some(\"invalid gamepad button state\".to_string()),\n                        ));\n                    }\n                    Ok(GamepadButtonState::Pressed) => compositor::ButtonState::Pressed,\n                    Ok(GamepadButtonState::Released) => compositor::ButtonState::Released,\n                };\n\n                let scancode = match GamepadButton::try_from(ev.button)\n                    .ok()\n                    .and_then(gamepad_button_to_evdev)\n                {\n                    Some(v) => v,\n                    _ => {\n                        return Err(AttachmentError::ServerError(\n                            ErrorCode::ErrorProtocol,\n                            Some(\"invalid gamepad button\".to_string()),\n                        ));\n                    }\n                };\n\n                self.handle\n                    .control\n                    .send(ControlMessage::GamepadInput {\n                        id: ev.gamepad_id,\n                        button_code: scancode,\n                        state,\n                    })\n                    .ok();\n            }\n            protocol::MessageType::Error(ev) => {\n                error!(\n                    \"received error from client: {}: {}\",\n                    ev.err_code().as_str_name(),\n                    ev.error_text\n                );\n            }\n            msg => {\n                debug!(\"received {} from client on attachment stream\", msg);\n                return Err(AttachmentError::ServerError(\n                    ErrorCode::ErrorProtocolUnexpectedMessage,\n                    None,\n                ));\n            }\n        }\n\n        Ok(())\n    }\n\n    fn handle_session_event(&mut self, event: SessionEvent) -> Result<(), AttachmentError> {\n        match event {\n            SessionEvent::Shutdown => {\n                // The session ended, probably because the app exited.\n                self.ctx\n                    .state\n                    .lock()\n                    .sessions\n                    .remove(&self.handle.session_id);\n\n                self.send(protocol::SessionEnded {});\n                return Err(AttachmentError::Finished);\n            }\n            SessionEvent::DisplayParamsChanged { params, reattach } => {\n                self.session_display_params = params;\n                let msg = protocol::SessionParametersChanged {\n                    display_params: Some(params.into()),\n                    supported_streaming_resolutions: super::generate_streaming_res(&params),\n                    reattach_required: reattach,\n                };\n\n                self.send(msg);\n                if reattach {\n                    return Err(AttachmentError::Finished);\n                }\n            }\n            SessionEvent::VideoFrame {\n                stream_seq,\n                seq,\n                frame,\n                ..\n            } => {\n                self.current_video_stream_seq = self.current_video_stream_seq.max(stream_seq);\n\n                let duration = self.last_video_frame_recvd.elapsed();\n                if duration\n                    > time::Duration::from_secs_f32(\n                        1.5 / self.session_display_params.framerate as f32,\n                    )\n                {\n                    debug!(dur = ?duration, \"slow video frame\");\n                }\n\n                self.last_video_frame_recvd = time::Instant::now();\n                self.stats.record_frame(seq, frame.len(), duration);\n\n                if let Some((root, files)) = &mut self.bug_report {\n                    let file = files.entry(stream_seq).or_insert_with(|| {\n                        let ext = format!(\"{:?}\", self.attached.video_codec()).to_lowercase();\n                        let path = root.join(format!(\n                            \"attachment-{:02}-{}.{}\",\n                            stream_seq, self.handle.attachment_id, ext\n                        ));\n                        std::fs::File::create(path).unwrap()\n                    });\n\n                    std::io::Write::write_all(file, &frame).unwrap();\n                    std::io::Write::flush(file).unwrap();\n                }\n            }\n            SessionEvent::AudioFrame { seq, frame, .. } => {\n                let duration = self.last_audio_frame_recvd.elapsed();\n                if duration\n                    > time::Duration::from_secs_f32(\n                        1.5 / self.session_display_params.framerate as f32,\n                    )\n                {\n                    debug!(dur = ?duration, \"slow audio frame\");\n                }\n\n                self.last_audio_frame_recvd = time::Instant::now();\n                self.stats.record_frame(seq, frame.len(), duration);\n            }\n            SessionEvent::CursorUpdate {\n                image,\n                icon,\n                hotspot_x,\n                hotspot_y,\n            } => {\n                use protocol::update_cursor::CursorIcon;\n                let icon: CursorIcon = icon.map(cursor_icon_to_proto).unwrap_or(CursorIcon::None);\n\n                let msg = protocol::UpdateCursor {\n                    image: image.unwrap_or_default(),\n                    icon: icon.into(),\n                    hotspot_x,\n                    hotspot_y,\n                };\n\n                self.send(msg);\n            }\n            SessionEvent::PointerLocked(x, y) => {\n                let x = x / self.superscale;\n                let y = y / self.superscale;\n\n                if self.pointer_lock.replace((x, y)).is_none() {\n                    self.send(protocol::LockPointer { x, y });\n                }\n            }\n            SessionEvent::PointerReleased => {\n                if self.pointer_lock.take().is_some() {\n                    self.send(protocol::ReleasePointer {});\n                }\n            }\n        }\n\n        Ok(())\n    }\n\n    fn send(&self, msg: impl Into<protocol::MessageType>) {\n        let _ = self.ctx.outgoing.send(msg.into());\n    }\n}\n\nfn key_to_evdev(key: protocol::keyboard_input::Key) -> Option<u32> {\n    use protocol::keyboard_input::Key;\n\n    match key {\n        Key::Escape => Some(1),\n        Key::Digit1 => Some(2),\n        Key::Digit2 => Some(3),\n        Key::Digit3 => Some(4),\n        Key::Digit4 => Some(5),\n        Key::Digit5 => Some(6),\n        Key::Digit6 => Some(7),\n        Key::Digit7 => Some(8),\n        Key::Digit8 => Some(9),\n        Key::Digit9 => Some(10),\n        Key::Digit0 => Some(11),\n        Key::Minus => Some(12),\n        Key::Equal => Some(13),\n        Key::Backspace => Some(14),\n        Key::Tab => Some(15),\n        Key::Q => Some(16),\n        Key::W => Some(17),\n        Key::E => Some(18),\n        Key::R => Some(19),\n        Key::T => Some(20),\n        Key::Y => Some(21),\n        Key::U => Some(22),\n        Key::I => Some(23),\n        Key::O => Some(24),\n        Key::P => Some(25),\n        Key::BracketLeft => Some(26),\n        Key::BracketRight => Some(27),\n        Key::Enter => Some(28),\n        Key::ControlLeft => Some(29),\n        Key::A => Some(30),\n        Key::S => Some(31),\n        Key::D => Some(32),\n        Key::F => Some(33),\n        Key::G => Some(34),\n        Key::H => Some(35),\n        Key::J => Some(36),\n        Key::K => Some(37),\n        Key::L => Some(38),\n        Key::Semicolon => Some(39),\n        Key::Quote => Some(40),\n        Key::Backquote => Some(41),\n        Key::ShiftLeft => Some(42),\n        Key::Backslash => Some(43),\n        Key::Z => Some(44),\n        Key::X => Some(45),\n        Key::C => Some(46),\n        Key::V => Some(47),\n        Key::B => Some(48),\n        Key::N => Some(49),\n        Key::M => Some(50),\n        Key::Comma => Some(51),\n        Key::Period => Some(52),\n        Key::Slash => Some(53),\n        Key::ShiftRight => Some(54),\n        Key::NumpadMultiply => Some(55),\n        Key::AltLeft => Some(56),\n        Key::Space => Some(57),\n        Key::CapsLock => Some(58),\n        Key::F1 => Some(59),\n        Key::F2 => Some(60),\n        Key::F3 => Some(61),\n        Key::F4 => Some(62),\n        Key::F5 => Some(63),\n        Key::F6 => Some(64),\n        Key::F7 => Some(65),\n        Key::F8 => Some(66),\n        Key::F9 => Some(67),\n        Key::F10 => Some(68),\n        Key::NumLock => Some(69),\n        Key::ScrollLock => Some(70),\n        Key::Numpad7 => Some(71),\n        Key::Numpad8 => Some(72),\n        Key::Numpad9 => Some(73),\n        Key::NumpadSubtract => Some(74),\n        Key::Numpad4 => Some(75),\n        Key::Numpad5 => Some(76),\n        Key::Numpad6 => Some(77),\n        Key::NumpadAdd => Some(78),\n        Key::Numpad1 => Some(79),\n        Key::Numpad2 => Some(80),\n        Key::Numpad3 => Some(81),\n        Key::Numpad0 => Some(82),\n        Key::NumpadDecimal => Some(83),\n        Key::Lang5 => Some(85),\n        Key::IntlBackslash => Some(86),\n        Key::F11 => Some(87),\n        Key::F12 => Some(88),\n        Key::IntlRo => Some(89),\n        Key::Katakana => Some(90),\n        Key::Hiragana => Some(91),\n        Key::Convert => Some(92),\n        Key::KanaMode => Some(93),\n        Key::NonConvert => Some(94),\n        Key::NumpadEnter => Some(96),\n        Key::ControlRight => Some(97),\n        Key::NumpadDivide => Some(98),\n        Key::PrintScreen => Some(99),\n        Key::AltRight => Some(100),\n        Key::Home => Some(102),\n        Key::ArrowUp => Some(103),\n        Key::PageUp => Some(104),\n        Key::ArrowLeft => Some(105),\n        Key::ArrowRight => Some(106),\n        Key::End => Some(107),\n        Key::ArrowDown => Some(108),\n        Key::PageDown => Some(109),\n        Key::Insert => Some(110),\n        Key::Delete => Some(111),\n        Key::NumpadEqual => Some(117),\n        Key::Pause => Some(119),\n        Key::NumpadComma => Some(121),\n        Key::IntlYen => Some(124),\n        Key::MetaLeft => Some(125),\n        Key::MetaRight => Some(126),\n        Key::ContextMenu => Some(127),\n        Key::Help => Some(138),\n        Key::NumpadParenLeft => Some(179),\n        Key::NumpadParenRight => Some(180),\n        // Linux doesn't have this, so we'll map it to the regular backspace.\n        Key::NumpadBackspace => Some(14),\n        // TODO: Can't find these at all.\n        Key::Fn | Key::FnLock => None,\n        Key::Lang1 | Key::Lang2 | Key::Lang3 | Key::Lang4 => None,\n        Key::NumpadClear\n        | Key::NumpadClearEntry\n        | Key::NumpadHash\n        | Key::NumpadMemoryAdd\n        | Key::NumpadMemoryClear\n        | Key::NumpadMemoryRecall\n        | Key::NumpadMemoryStore\n        | Key::NumpadMemorySubtract => None,\n        Key::Unknown => None,\n    }\n}\n\nfn axis_to_evdev(axis: protocol::gamepad_motion::GamepadAxis) -> Option<(u32, bool)> {\n    use protocol::gamepad_motion::GamepadAxis;\n    match axis {\n        GamepadAxis::LeftX => Some((0x00, false)),       // ABS_X\n        GamepadAxis::LeftY => Some((0x01, false)),       // ABS_Y\n        GamepadAxis::RightX => Some((0x03, false)),      // ABS_RX\n        GamepadAxis::RightY => Some((0x04, false)),      // ABS_RY,\n        GamepadAxis::LeftTrigger => Some((0x02, true)),  // ABS_Z\n        GamepadAxis::RightTrigger => Some((0x05, true)), // ABS_RZ\n        GamepadAxis::Unknown => None,\n    }\n}\n\nfn gamepad_button_to_evdev(button: protocol::gamepad_input::GamepadButton) -> Option<u32> {\n    use protocol::gamepad_input::GamepadButton;\n\n    match button {\n        GamepadButton::DpadLeft => Some(0x222),      // BTN_DPAD_LEFT\n        GamepadButton::DpadRight => Some(0x223),     // BTN_DPAD_RIGHT\n        GamepadButton::DpadUp => Some(0x220),        // BTN_DPAD_UP\n        GamepadButton::DpadDown => Some(0x221),      // BTN_DPAD_DOWN\n        GamepadButton::South => Some(0x130),         // BTN_SOUTH\n        GamepadButton::East => Some(0x131),          // BTN_EAST\n        GamepadButton::North => Some(0x133),         // BTN_NORTH\n        GamepadButton::West => Some(0x134),          // BTN_WEST\n        GamepadButton::C => Some(0x132),             // BTN_C\n        GamepadButton::Z => Some(0x135),             // BTN_Z\n        GamepadButton::ShoulderLeft => Some(0x136),  // BTN_TL\n        GamepadButton::ShoulderRight => Some(0x137), // BTN_TR\n        GamepadButton::JoystickLeft => Some(0x13d),  // BTN_THUMBL\n        GamepadButton::JoystickRight => Some(0x13e), // BTN_THUMBR\n        GamepadButton::Start => Some(0x13b),         // BTN_START\n        GamepadButton::Select => Some(0x13a),        // BTN_SELECT\n        GamepadButton::Logo => Some(0x13c),          // BTN_MODE\n        GamepadButton::Share => None,                // TODO I'm not sure what code to use.\n        GamepadButton::TriggerLeft => Some(0x138),   // BTN_TL2\n        GamepadButton::TriggerRight => Some(0x139),  // BTN_TL3\n        GamepadButton::Unknown => None,\n    }\n}\n\nfn cursor_icon_to_proto(icon: cursor_icon::CursorIcon) -> protocol::update_cursor::CursorIcon {\n    use protocol::update_cursor::CursorIcon;\n\n    match icon {\n        cursor_icon::CursorIcon::ContextMenu => CursorIcon::ContextMenu,\n        cursor_icon::CursorIcon::Help => CursorIcon::Help,\n        cursor_icon::CursorIcon::Pointer => CursorIcon::Pointer,\n        cursor_icon::CursorIcon::Progress => CursorIcon::Progress,\n        cursor_icon::CursorIcon::Wait => CursorIcon::Wait,\n        cursor_icon::CursorIcon::Cell => CursorIcon::Cell,\n        cursor_icon::CursorIcon::Crosshair => CursorIcon::Crosshair,\n        cursor_icon::CursorIcon::Text => CursorIcon::Text,\n        cursor_icon::CursorIcon::VerticalText => CursorIcon::VerticalText,\n        cursor_icon::CursorIcon::Alias => CursorIcon::Alias,\n        cursor_icon::CursorIcon::Copy => CursorIcon::Copy,\n        cursor_icon::CursorIcon::Move => CursorIcon::Move,\n        cursor_icon::CursorIcon::NoDrop => CursorIcon::NoDrop,\n        cursor_icon::CursorIcon::NotAllowed => CursorIcon::NotAllowed,\n        cursor_icon::CursorIcon::Grab => CursorIcon::Grab,\n        cursor_icon::CursorIcon::Grabbing => CursorIcon::Grabbing,\n        cursor_icon::CursorIcon::EResize => CursorIcon::EResize,\n        cursor_icon::CursorIcon::NResize => CursorIcon::NResize,\n        cursor_icon::CursorIcon::NeResize => CursorIcon::NeResize,\n        cursor_icon::CursorIcon::NwResize => CursorIcon::NwResize,\n        cursor_icon::CursorIcon::SResize => CursorIcon::SResize,\n        cursor_icon::CursorIcon::SeResize => CursorIcon::SeResize,\n        cursor_icon::CursorIcon::SwResize => CursorIcon::SwResize,\n        cursor_icon::CursorIcon::WResize => CursorIcon::WResize,\n        cursor_icon::CursorIcon::EwResize => CursorIcon::EwResize,\n        cursor_icon::CursorIcon::NsResize => CursorIcon::NsResize,\n        cursor_icon::CursorIcon::NeswResize => CursorIcon::NeswResize,\n        cursor_icon::CursorIcon::NwseResize => CursorIcon::NwseResize,\n        cursor_icon::CursorIcon::ColResize => CursorIcon::ColResize,\n        cursor_icon::CursorIcon::RowResize => CursorIcon::RowResize,\n        cursor_icon::CursorIcon::AllScroll => CursorIcon::AllScroll,\n        cursor_icon::CursorIcon::ZoomIn => CursorIcon::ZoomIn,\n        cursor_icon::CursorIcon::ZoomOut => CursorIcon::ZoomOut,\n        _ => CursorIcon::Default,\n    }\n}\n"
  },
  {
    "path": "mm-server/src/server/handlers/validation.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse mm_protocol as protocol;\nuse tracing::debug;\n\nuse crate::{\n    codec::{AudioCodec, VideoCodec},\n    color::VideoProfile,\n    pixel_scale::PixelScale,\n    session::{\n        control::{AudioStreamParams, DisplayParams, VideoStreamParams},\n        GamepadLayout,\n    },\n};\n\npub enum ValidationError {\n    Invalid(String),\n    Unsupported(String),\n}\n\ntype Result<T> = std::result::Result<T, ValidationError>;\n\npub fn validate_display_params(\n    params: Option<protocol::VirtualDisplayParameters>,\n) -> Result<DisplayParams> {\n    if let Some(params) = params {\n        let (width, height) = validate_resolution(params.resolution)?;\n        let framerate = validate_framerate(params.framerate_hz)?;\n        let ui_scale = validate_ui_scale(params.ui_scale)?;\n\n        Ok(DisplayParams {\n            width,\n            height,\n            framerate,\n            ui_scale,\n        })\n    } else {\n        Err(ValidationError::Invalid(\n            \"display parameters missing\".into(),\n        ))\n    }\n}\n\npub fn validate_attachment(\n    params: protocol::Attach,\n) -> Result<(VideoStreamParams, AudioStreamParams)> {\n    let (width, height) = validate_resolution(params.streaming_resolution)?;\n    let video_codec = validate_video_codec(params.video_codec)?;\n    let preset = validate_preset(params.quality_preset)?;\n    let video_profile = validate_profile(params.video_profile)?;\n\n    let sample_rate = validate_sample_rate(params.sample_rate_hz)?;\n    let channels = validate_channels(params.channels)?;\n    let audio_codec = validate_audio_codec(params.audio_codec)?;\n\n    Ok((\n        VideoStreamParams {\n            width,\n            height,\n            codec: video_codec,\n            preset,\n            profile: video_profile,\n        },\n        AudioStreamParams {\n            sample_rate,\n            channels,\n            codec: audio_codec,\n        },\n    ))\n}\n\npub fn validate_resolution(resolution: Option<protocol::Size>) -> Result<(u32, u32)> {\n    match resolution {\n        Some(ref size) => {\n            let (width, height) = (size.width, size.height);\n            if width != 0 && height != 0 && width % 2 == 0 && height % 2 == 0 {\n                Ok((width, height))\n            } else {\n                debug!(\"rejecting invalid resolution: {}x{}\", width, height);\n                Err(ValidationError::Invalid(\n                    \"resolution must be non-zero and even\".into(),\n                ))\n            }\n        }\n        None => Err(ValidationError::Invalid(\"resolution missing\".into())),\n    }\n}\n\npub fn validate_ui_scale(ui_scale: Option<protocol::PixelScale>) -> Result<PixelScale> {\n    match ui_scale {\n        Some(scale) => match PixelScale::try_from(scale) {\n            Ok(s) if !s.is_fractional() => Ok(s),\n            Ok(_) => Err(ValidationError::Unsupported(\n                \"fractional UI scales are not supported\".into(),\n            )),\n            Err(_) => Err(ValidationError::Invalid(\"invalid UI scale\".into())),\n        },\n        None => Ok(PixelScale::ONE),\n    }\n}\n\nfn validate_profile(profile: i32) -> Result<VideoProfile> {\n    let p: protocol::VideoProfile = match profile.try_into() {\n        Err(_) => return Err(ValidationError::Invalid(\"invalid video profile\".into())),\n        Ok(protocol::VideoProfile::Unknown) => return Ok(VideoProfile::Hd),\n        Ok(p) => p,\n    };\n\n    match p.try_into() {\n        Ok(p) => Ok(p),\n        _ => Err(ValidationError::Unsupported(\n            \"unsupported video profile\".into(),\n        )),\n    }\n}\n\npub fn validate_video_codec(codec: i32) -> Result<VideoCodec> {\n    let codec: protocol::VideoCodec = match codec.try_into() {\n        Err(_) => return Err(ValidationError::Invalid(\"invalid video codec\".into())),\n        Ok(protocol::VideoCodec::Unknown) => return Ok(VideoCodec::H265),\n        Ok(v) => v,\n    };\n\n    match codec.try_into() {\n        Ok(c) => Ok(c),\n        Err(_) => Err(ValidationError::Invalid(\"invalid video codec\".into())),\n    }\n}\n\npub fn validate_preset(preset: u32) -> Result<u32> {\n    match preset {\n        0 => Ok(6), // Default to 6\n        v if v <= 10 => Ok(v - 1),\n        _ => Err(ValidationError::Invalid(\"invalid preset\".into())),\n    }\n}\npub fn validate_framerate(framerate: u32) -> Result<u32> {\n    match framerate {\n        60 | 30 => Ok(framerate),\n        _ => Err(ValidationError::Unsupported(\"unsupported framerate\".into())),\n    }\n}\n\npub fn validate_audio_codec(codec: i32) -> Result<AudioCodec> {\n    let codec: protocol::AudioCodec = match codec.try_into() {\n        Err(_) => return Err(ValidationError::Invalid(\"invalid audio codec\".into())),\n        Ok(protocol::AudioCodec::Unknown) => return Ok(AudioCodec::Opus),\n        Ok(v) => v,\n    };\n\n    match codec.try_into() {\n        Ok(c) => Ok(c),\n        Err(_) => Err(ValidationError::Invalid(\"invalid audio codec\".into())),\n    }\n}\n\npub fn validate_sample_rate(sample_rate: u32) -> Result<u32> {\n    if sample_rate == 0 {\n        Ok(48000)\n    } else if !(16000..=48000).contains(&sample_rate) {\n        Err(ValidationError::Invalid(\"invalid sample rate\".into()))\n    } else {\n        Ok(sample_rate)\n    }\n}\n\npub fn validate_channels(channels: Option<protocol::AudioChannels>) -> Result<u32> {\n    match channels {\n        Some(map) => {\n            let channels = map.channels.len() as u32;\n            for ch in map.channels {\n                if let Err(e) = protocol::audio_channels::Channel::try_from(ch) {\n                    return Err(ValidationError::Invalid(format!(\"invalid channel: {}\", e)));\n                }\n            }\n\n            if channels == 2 {\n                Ok(channels)\n            } else {\n                Err(ValidationError::Unsupported(\n                    \"unsupported number of channels\".into(),\n                ))\n            }\n        }\n        None => Ok(2), // Default to stereo.\n    }\n}\n\npub fn validate_gamepad(gamepad: Option<protocol::Gamepad>) -> Result<(u64, GamepadLayout)> {\n    let Some(gamepad) = gamepad else {\n        return Err(ValidationError::Invalid(\"gamepad is required\".into()));\n    };\n\n    let id = validate_gamepad_id(gamepad.id)?;\n    let layout = validate_gamepad_layout(gamepad.layout)?;\n    Ok((id, layout))\n}\n\npub fn validate_gamepad_id(id: u64) -> Result<u64> {\n    if id == 0 {\n        Err(ValidationError::Invalid(\"id must be non-zero\".into()))\n    } else {\n        Ok(id)\n    }\n}\n\npub fn validate_gamepad_layout(layout: i32) -> Result<GamepadLayout> {\n    match layout.try_into() {\n        Err(_) | Ok(protocol::gamepad::GamepadLayout::Unknown) => {\n            Err(ValidationError::Invalid(\"invalid gamepad layout\".into()))\n        }\n        Ok(_) => Ok(GamepadLayout::GenericDualStick), // TODO\n    }\n}\n"
  },
  {
    "path": "mm-server/src/server/handlers.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{fs::File, path::Path};\n\nuse anyhow::bail;\nuse bytes::Bytes;\nuse crossbeam_channel::Receiver;\nuse mm_protocol as protocol;\nuse protocol::error::ErrorCode;\nuse tracing::{debug, debug_span, error, trace};\n\nuse crate::{\n    session::{control::DisplayParams, Session},\n    state::SharedState,\n    waking_sender::{WakingOneshot, WakingSender},\n};\n\nmod attachment;\nmod validation;\n\nuse validation::*;\n\n#[derive(Debug, Clone)]\nstruct ServerError(protocol::error::ErrorCode, Option<String>);\n\nstruct Context {\n    state: SharedState,\n    incoming: Receiver<protocol::MessageType>,\n    outgoing: WakingSender<protocol::MessageType>,\n    outgoing_dgrams: WakingSender<Vec<u8>>,\n    max_dgram_len: usize,\n}\n\nimpl Context {\n    fn send_err(&self, err: ServerError) {\n        let ServerError(code, text) = err;\n\n        if let Some(text) = text.as_ref() {\n            debug!(\"handler ended with error: {:?}: {}\", code, text);\n        } else {\n            debug!(\"handler ended with error: {:?}\", code);\n        }\n\n        let err = protocol::Error {\n            err_code: code.into(),\n            error_text: text.unwrap_or_default(),\n        };\n\n        self.outgoing.send(err.into()).ok();\n    }\n}\n\ntype Result<M> = std::result::Result<M, ServerError>;\n\npub fn dispatch(\n    state: SharedState,\n    incoming: Receiver<protocol::MessageType>,\n    outgoing: WakingSender<protocol::MessageType>,\n    outgoing_dgrams: WakingSender<Vec<u8>>,\n    max_dgram_len: usize,\n    done: WakingOneshot<()>,\n) {\n    let instant = std::time::Instant::now();\n\n    let initial = match incoming.recv() {\n        Ok(msg) => msg,\n        Err(_) => {\n            error!(\"empty worker pipe\");\n            return;\n        }\n    };\n\n    let span = debug_span!(\"dispatch\", initial = %initial);\n    let _guard = span.enter();\n\n    let ctx = Context {\n        state,\n        incoming,\n        outgoing,\n        outgoing_dgrams,\n        max_dgram_len,\n    };\n\n    match initial {\n        protocol::MessageType::ListApplications(msg) => roundtrip(list_applications, &ctx, msg),\n        protocol::MessageType::FetchApplicationImage(msg) => roundtrip(fetch_img, &ctx, msg),\n        protocol::MessageType::LaunchSession(msg) => roundtrip(launch_session, &ctx, msg),\n        protocol::MessageType::ListSessions(msg) => roundtrip(list_sessions, &ctx, msg),\n        protocol::MessageType::UpdateSession(msg) => roundtrip(update_session, &ctx, msg),\n        protocol::MessageType::EndSession(msg) => roundtrip(end_session, &ctx, msg),\n        protocol::MessageType::Attach(msg) => {\n            if let Err(err) = attachment::attach(&ctx, msg) {\n                ctx.send_err(err);\n            } else {\n                // Clean exit, no final message.\n            }\n        }\n        _ => {\n            error!(\"unexpected message type: {}\", initial);\n            ctx.send_err(ServerError(ErrorCode::ErrorProtocolUnexpectedMessage, None));\n        }\n    };\n\n    // Explicitly hang up.\n    drop(ctx);\n    let _ = done.send(());\n\n    debug!(dur = ?instant.elapsed(),\"worker finished\");\n}\n\nfn roundtrip<F, Req, Resp>(f: F, ctx: &Context, req: Req)\nwhere\n    Resp: Into<protocol::MessageType>,\n    F: Fn(&Context, Req) -> Result<Resp>,\n{\n    match f(ctx, req) {\n        Ok(resp) => {\n            if ctx.outgoing.send(resp.into()).is_err() {\n                debug!(\"client hung up before response could be sent\");\n            }\n        }\n        Err(err) => {\n            error!(?err, \"handler returned error\");\n            ctx.send_err(err);\n        }\n    }\n}\n\nfn list_applications(\n    ctx: &Context,\n    _msg: protocol::ListApplications,\n) -> Result<protocol::ApplicationList> {\n    let apps = ctx\n        .state\n        .lock()\n        .cfg\n        .apps\n        .iter()\n        .map(|(id, app)| protocol::application_list::Application {\n            id: id.clone(),\n            description: app.description.clone().unwrap_or_default(),\n            folder: app.path.clone(),\n            images_available: if app.header_image.is_some() {\n                vec![protocol::ApplicationImageFormat::Header.into()]\n            } else {\n                vec![]\n            },\n        })\n        .collect();\n\n    Ok(protocol::ApplicationList { list: apps })\n}\n\nfn fetch_img(\n    ctx: &Context,\n    msg: protocol::FetchApplicationImage,\n) -> Result<protocol::ApplicationImage> {\n    match msg.format.try_into() {\n        Ok(protocol::ApplicationImageFormat::Header) => (),\n        _ => {\n            return Err(ServerError(\n                ErrorCode::ErrorProtocol,\n                Some(\"unknown application image type\".to_string()),\n            ));\n        }\n    }\n\n    let Some(config) = ctx.state.lock().cfg.apps.get(&msg.application_id).cloned() else {\n        return Err(ServerError(\n            ErrorCode::ErrorApplicationNotFound,\n            Some(\"application not found\".to_string()),\n        ));\n    };\n\n    let Some(path) = &config.header_image else {\n        return Err(ServerError(\n            ErrorCode::ErrorApplicationNotFound,\n            Some(\"image not found\".to_string()),\n        ));\n    };\n\n    match read_file(path, crate::config::MAX_IMAGE_SIZE) {\n        Ok(image_data) => Ok(protocol::ApplicationImage { image_data }),\n        Err(err) => {\n            error!(path = ?path, ?err, \"failed to load image data\");\n\n            Err(ServerError(\n                ErrorCode::ErrorServer,\n                Some(\"failed to load image\".into()),\n            ))\n        }\n    }\n}\n\nfn launch_session(\n    ctx: &Context,\n    msg: protocol::LaunchSession,\n) -> Result<protocol::SessionLaunched> {\n    let display_params = validate_display_params(msg.display_params).map_err(|err| match err {\n        ValidationError::Unsupported(text) => {\n            ServerError(ErrorCode::ErrorSessionParamsNotSupported, Some(text))\n        }\n        ValidationError::Invalid(text) => ServerError(ErrorCode::ErrorProtocol, Some(text)),\n    })?;\n\n    // Tracy gets confused if we have multiple sessions going.\n    let mut guard = ctx.state.lock();\n    if cfg!(feature = \"tracy\") && !guard.sessions.is_empty() {\n        return Err(ServerError(\n            ErrorCode::ErrorServer,\n            Some(\"only one session allowed if actively debugging\".into()),\n        ));\n    }\n\n    // Don't keep the state cloned while we launch the session.\n    let vk_clone = guard.vk.clone();\n    let Some(application_config) = guard.cfg.apps.get(&msg.application_id).cloned() else {\n        return Err(ServerError(\n            ErrorCode::ErrorSessionLaunchFailed,\n            Some(\"application not found\".to_string()),\n        ));\n    };\n\n    for gamepad in msg.permanent_gamepads.clone() {\n        validate_gamepad(Some(gamepad)).map_err(|err| match err {\n            ValidationError::Unsupported(text) => {\n                ServerError(ErrorCode::ErrorSessionParamsNotSupported, Some(text))\n            }\n            ValidationError::Invalid(text) => ServerError(ErrorCode::ErrorProtocol, Some(text)),\n        })?;\n    }\n\n    let bug_report_dir = guard.cfg.bug_report_dir.clone();\n    let (session_seq, session_id) = guard.generate_session_id();\n    drop(guard);\n\n    // Create a folder in the bug report directory just for this session.\n    let mut bug_report_dir = bug_report_dir;\n    if let Some(ref mut dir) = bug_report_dir {\n        dir.push(format!(\"session-{:02}-{}\", session_seq, session_id));\n        std::fs::create_dir_all(dir).unwrap();\n    }\n\n    let session = match Session::launch(\n        vk_clone,\n        session_id,\n        &msg.application_id,\n        &application_config,\n        display_params,\n        msg.permanent_gamepads,\n        bug_report_dir,\n    ) {\n        Ok(session) => session,\n        Err(err) => {\n            error!(?err, \"failed to launch session\");\n            return Err(ServerError(ErrorCode::ErrorSessionLaunchFailed, None));\n        }\n    };\n\n    let id = session.id;\n    ctx.state.lock().sessions.insert(id, session);\n\n    // XXX: The protocol allows us to support superresolution here, but we don't\n    // know how to downscale before encoding (yet).\n    Ok(protocol::SessionLaunched {\n        id,\n        supported_streaming_resolutions: generate_streaming_res(&display_params),\n    })\n}\n\nfn list_sessions(ctx: &Context, _msg: protocol::ListSessions) -> Result<protocol::SessionList> {\n    let sessions = ctx\n        .state\n        .lock()\n        .sessions\n        .values()\n        .map(|s| protocol::session_list::Session {\n            application_id: s.application_id.clone(),\n            session_id: s.id,\n            session_start: Some(s.started.into()),\n            display_params: Some(s.display_params.into()),\n            supported_streaming_resolutions: generate_streaming_res(&s.display_params),\n            permanent_gamepads: s.permanent_gamepads.clone(),\n        })\n        .collect();\n\n    Ok(protocol::SessionList { list: sessions })\n}\n\nfn update_session(ctx: &Context, msg: protocol::UpdateSession) -> Result<protocol::SessionUpdated> {\n    let display_params = validate_display_params(msg.display_params).map_err(|err| match err {\n        ValidationError::Unsupported(text) => {\n            ServerError(ErrorCode::ErrorSessionParamsNotSupported, Some(text))\n        }\n        ValidationError::Invalid(text) => ServerError(ErrorCode::ErrorProtocol, Some(text)),\n    })?;\n\n    let mut state = ctx.state.lock();\n    let Some(session) = state.sessions.get_mut(&msg.session_id) else {\n        return Err(ServerError(ErrorCode::ErrorSessionNotFound, None));\n    };\n\n    trace!(?session.display_params, ?display_params, \"update_session\");\n    if session.display_params != display_params {\n        if let Err(err) = session.update_display_params(display_params) {\n            error!(?err, \"failed to update display params\");\n            return Err(ServerError(\n                ErrorCode::ErrorServer,\n                Some(\"failed to update display params\".to_string()),\n            ));\n        }\n    } else {\n        debug!(\"display params unchanged; ignoring update\");\n    }\n\n    Ok(protocol::SessionUpdated {})\n}\n\nfn end_session(ctx: &Context, msg: protocol::EndSession) -> Result<protocol::SessionEnded> {\n    let Some(session) = ctx.state.lock().sessions.remove(&msg.session_id) else {\n        return Err(ServerError(ErrorCode::ErrorSessionNotFound, None));\n    };\n\n    if let Err(e) = session.stop() {\n        error!(\"failed to gracefully stop session: {}\", e)\n    };\n\n    Ok(protocol::SessionEnded {})\n}\n\nfn generate_streaming_res(display_params: &DisplayParams) -> Vec<protocol::Size> {\n    // XXX: The protocol allows us to support superresolution here, but we don't\n    // know how to downscale before encoding (yet).\n    vec![protocol::Size {\n        width: display_params.width,\n        height: display_params.height,\n    }]\n}\n\nfn read_file(p: impl AsRef<Path>, max_size: u64) -> anyhow::Result<Bytes> {\n    use std::io::Read as _;\n\n    use bytes::buf::BufMut;\n\n    let mut r = File::open(p.as_ref())?.take(max_size + 1);\n    let mut w = bytes::BytesMut::new().writer();\n\n    match std::io::copy(&mut r, &mut w) {\n        Ok(len) if len > max_size => bail!(\"file is bigger than maximum size\"),\n        Ok(0) => bail!(\"file is empty\"),\n        Ok(len) => {\n            let mut buf = w.into_inner();\n            Ok(buf.split_to(len as usize).freeze())\n        }\n        Err(e) => Err(e.into()),\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn test_read_file() -> anyhow::Result<()> {\n        let zero_file = mktemp::Temp::new_file()?;\n        assert_eq!(\"\".to_string(), std::fs::read_to_string(&zero_file)?);\n        assert!(read_file(&zero_file, 1024).is_err());\n        drop(zero_file);\n\n        let s = \"foobar\".repeat(64);\n        let len = s.len() as u64;\n        let big_file = mktemp::Temp::new_file()?;\n        std::fs::write(&big_file, &s)?;\n        assert_eq!(s, std::fs::read_to_string(&big_file)?);\n        assert_eq!(s.as_bytes().to_vec(), read_file(&big_file, len)?);\n        assert_eq!(s.as_bytes().to_vec(), read_file(&big_file, len + 1)?);\n        assert!(read_file(&big_file, len - 1).is_err());\n        drop(big_file);\n\n        Ok(())\n    }\n}\n"
  },
  {
    "path": "mm-server/src/server/mdns.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::net::SocketAddr;\n\nuse anyhow::bail;\nuse tracing::debug;\n\npub struct MdnsService {\n    daemon: mdns_sd::ServiceDaemon,\n    service_name: String,\n}\n\nimpl MdnsService {\n    pub fn new(\n        addr: SocketAddr,\n        hostname: Option<&str>,\n        instance_name: Option<&str>,\n    ) -> anyhow::Result<Self> {\n        let daemon = mdns_sd::ServiceDaemon::new()?;\n\n        let txt = [(\n            \"mmp\",\n            std::str::from_utf8(mm_protocol::ALPN_PROTOCOL_VERSION).unwrap(),\n        )];\n\n        let hostname = match hostname {\n            Some(h) => h.to_owned(),\n            None => mdns_hostname()?,\n        };\n\n        let instance_name = match instance_name {\n            Some(s) => s.to_owned(),\n            None => mdns_instance_name(&hostname)?,\n        };\n\n        let ip = addr.ip();\n        let (ip, ip_auto) = if ip.is_unspecified() {\n            (vec![], true)\n        } else {\n            (vec![ip], false)\n        };\n\n        let mut service_info = mdns_sd::ServiceInfo::new(\n            \"_magic-mirror._udp.local.\",\n            &instance_name,\n            &hostname,\n            &ip[..],\n            addr.port(),\n            &txt[..],\n        )?;\n\n        if ip_auto {\n            service_info = service_info.enable_addr_auto();\n        }\n\n        let service_name = service_info.get_fullname().to_owned();\n        daemon.register(service_info)?;\n\n        debug!(hostname, instance_name, ip = ?ip.first(), ip_auto, \"advertizing service\");\n\n        Ok(Self {\n            daemon,\n            service_name,\n        })\n    }\n}\n\nimpl Drop for MdnsService {\n    fn drop(&mut self) {\n        loop {\n            match self.daemon.unregister(&self.service_name) {\n                Ok(_) => break,\n                Err(mdns_sd::Error::Again) => continue,\n                Err(err) => {\n                    debug!(?err, \"error shutting down mdns daemon\");\n                    return;\n                }\n            }\n        }\n\n        loop {\n            match self.daemon.shutdown() {\n                Ok(_) => return,\n                Err(mdns_sd::Error::Again) => continue,\n                Err(err) => {\n                    debug!(?err, \"error shutting down mdns daemon\");\n                    return;\n                }\n            }\n        }\n    }\n}\n\nfn mdns_hostname() -> anyhow::Result<String> {\n    let uname = rustix::system::uname();\n\n    let hostname = uname.nodename().to_str()?;\n    if hostname.is_empty() {\n        bail!(\"empty hostname\");\n    }\n\n    if hostname.ends_with(\".local\") {\n        return Ok(format!(\"{hostname}.\"));\n    } else if hostname.contains('.') {\n        bail!(\"hostname appears to be a qualified domain\");\n    }\n\n    Ok(format!(\"{hostname}.local.\"))\n}\n\nfn mdns_instance_name(hostname: &str) -> anyhow::Result<String> {\n    if hostname.is_empty() {\n        bail!(\"empty hostname\");\n    }\n\n    let hostname = match hostname.split_once('.') {\n        Some((host, _)) => host,\n        None => hostname,\n    };\n\n    Ok(hostname.to_uppercase())\n}\n"
  },
  {
    "path": "mm-server/src/server/sendmmsg.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::os::fd::{AsFd, AsRawFd};\nuse std::time;\nuse std::{io::IoSlice, net::SocketAddr};\n\nuse nix::sys::socket::{\n    cmsg_space, setsockopt, sockopt::TxTime, ControlMessage, MsgFlags, MultiHeaders,\n    SockaddrStorage,\n};\nuse tracing::instrument;\n#[derive(Default)]\npub struct SendMmsg<'a> {\n    iovs: Vec<[IoSlice<'a>; 1]>,\n    addrs: Vec<Option<SockaddrStorage>>,\n    txtimes: Vec<u64>,\n}\n\nimpl<'a> SendMmsg<'a> {\n    #[instrument(skip_all)]\n    pub fn sendmsg(mut self, buf: &'a [u8], addr: SocketAddr, txtime: time::Instant) -> Self {\n        self.iovs.push([IoSlice::new(buf)]);\n        self.addrs.push(Some(addr.into()));\n\n        let txtime = std_time_to_u64(&txtime);\n        self.txtimes.push(txtime);\n\n        self\n    }\n\n    #[instrument(skip_all)]\n    pub fn finish(&mut self, fd: &impl AsRawFd) -> Result<(), nix::Error> {\n        let mut data: MultiHeaders<SockaddrStorage> = MultiHeaders::preallocate(\n            self.iovs.len(),\n            Some(Vec::with_capacity(cmsg_space::<u64>() * self.iovs.len())),\n        );\n        let cmsgs = self\n            .txtimes\n            .iter()\n            .map(ControlMessage::TxTime)\n            .collect::<Vec<_>>();\n\n        loop {\n            match nix::sys::socket::sendmmsg(\n                fd.as_raw_fd(),\n                &mut data,\n                &self.iovs,\n                &self.addrs,\n                &cmsgs,\n                MsgFlags::empty(),\n            ) {\n                Ok(_) => break,\n                Err(nix::errno::Errno::EAGAIN) => continue,\n                Err(e) => return Err(e),\n            };\n        }\n\n        Ok(())\n    }\n}\n\npub fn new<'a>() -> SendMmsg<'a> {\n    SendMmsg::default()\n}\n\n#[cfg(target_os = \"linux\")]\npub fn set_so_txtime(sock: &impl AsFd) -> anyhow::Result<()> {\n    let config = nix::libc::sock_txtime {\n        clockid: nix::libc::CLOCK_MONOTONIC,\n        flags: 0,\n    };\n\n    setsockopt(&sock, TxTime, &config)?;\n\n    Ok(())\n}\n\n#[cfg(target_os = \"linux\")]\nfn std_time_to_u64(time: &std::time::Instant) -> u64 {\n    const NANOS_PER_SEC: u64 = 1_000_000_000;\n\n    const INSTANT_ZERO: std::time::Instant = unsafe { std::mem::transmute(std::time::UNIX_EPOCH) };\n\n    let raw_time = time.duration_since(INSTANT_ZERO);\n\n    let sec = raw_time.as_secs();\n    let nsec = raw_time.subsec_nanos();\n\n    sec * NANOS_PER_SEC + nsec as u64\n}\n"
  },
  {
    "path": "mm-server/src/server/stream.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse bytes::Bytes;\nuse either::Either;\nuse mm_protocol as protocol;\nuse tracing::{debug, error, instrument, trace_span};\n\nuse crate::{config, waking_sender::WakingSender};\n\n/// A helper to write audio/video frames out as chunks to the client. Runs on\n/// the encoder thread, not on the server thread.\npub struct StreamWriter {\n    session_id: u64,\n    attachment_id: u64,\n    outgoing: WakingSender<Vec<u8>>,\n\n    chunk_size: usize,\n    max_dgram_len: usize,\n    fec_ratios: Vec<f32>,\n\n    audio_stream_seq: u64,\n    audio_seq: u64,\n\n    video_stream_seq: u64,\n    video_seq: u64,\n}\n\nimpl StreamWriter {\n    pub fn new(\n        session_id: u64,\n        attachment_id: u64,\n        config: &config::ServerConfig,\n        outgoing: WakingSender<Vec<u8>>,\n        max_dgram_len: usize,\n    ) -> Self {\n        // max_dgram_len is our overall MTU. The MM protocol header is 2-10 bytes,\n        // and then we include seven varints (maximum 5 bytes each) and a bool of\n        // metadata, plus an optional 12-ish bytes of FEC information. 64 bytes of\n        // headroom should cover the worst case. However, a little extra will\n        // increase the chance that the packet is coalesced into an existing QUIC\n        // packet.\n        let chunk_size = max_dgram_len - 128;\n\n        Self {\n            session_id,\n            attachment_id,\n            outgoing,\n\n            chunk_size,\n            max_dgram_len,\n            fec_ratios: config.video_fec_ratios.clone(),\n\n            // The first stream_seq is 1, but we increment immediately below.\n            audio_stream_seq: 0,\n            video_stream_seq: 0,\n            audio_seq: 0,\n            video_seq: 0,\n        }\n    }\n\n    #[instrument(skip_all)]\n    pub fn write_video_frame(\n        &mut self,\n        pts: u64,\n        frame: Bytes,\n        hierarchical_layer: u32,\n        stream_restart: bool,\n    ) -> (u64, u64) {\n        if stream_restart {\n            self.video_stream_seq += 1;\n            self.video_seq = 0;\n\n            debug!(\n                stream_seq = self.video_stream_seq,\n                \"starting or restarting video stream\"\n            );\n        }\n\n        let seq = self.video_seq;\n        let fec_ratio = self\n            .fec_ratios\n            .get(hierarchical_layer as usize)\n            .copied()\n            .unwrap_or_default();\n\n        for chunk in iter_chunks(frame, self.chunk_size, fec_ratio) {\n            let msg = protocol::VideoChunk {\n                session_id: self.session_id,\n                attachment_id: self.attachment_id,\n\n                stream_seq: self.video_stream_seq,\n                seq,\n                data: chunk.data,\n                chunk: chunk.index,\n                num_chunks: chunk.num_chunks,\n                hierarchical_layer,\n                timestamp: pts,\n\n                fec_metadata: chunk.fec_metadata,\n            };\n\n            let res: Result<_, protocol::ProtocolError> =\n                trace_span!(\"encode_message\").in_scope(|| {\n                    let mut buf = vec![0; self.max_dgram_len];\n                    let len = protocol::encode_message(&msg.into(), &mut buf)?;\n\n                    buf.truncate(len);\n                    Ok(buf)\n                });\n\n            match res {\n                Ok(buf) => {\n                    let _ = self.outgoing.send(buf);\n                }\n                Err(err) => {\n                    error!(?err, \"failed to encode video chunk\");\n                }\n            };\n        }\n\n        self.video_seq += 1;\n        (self.video_stream_seq, seq)\n    }\n\n    #[instrument(skip_all)]\n    pub fn write_audio_frame(\n        &mut self,\n        pts: u64,\n        frame: Bytes,\n        stream_restart: bool,\n    ) -> (u64, u64) {\n        if stream_restart {\n            self.audio_stream_seq += 1;\n            self.audio_seq = 0;\n            debug!(\n                stream_seq = self.audio_stream_seq,\n                \"starting or restarting audio stream\"\n            );\n        }\n\n        let seq = self.audio_seq;\n        for chunk in iter_chunks(frame, self.chunk_size, 0.0) {\n            let msg = protocol::AudioChunk {\n                session_id: self.session_id,\n                attachment_id: self.attachment_id,\n\n                stream_seq: self.audio_stream_seq,\n                seq,\n                data: chunk.data,\n                chunk: chunk.index,\n                num_chunks: chunk.num_chunks,\n                timestamp: pts,\n\n                fec_metadata: chunk.fec_metadata,\n            };\n\n            let res: Result<_, protocol::ProtocolError> =\n                trace_span!(\"encode_message\").in_scope(|| {\n                    let mut buf = vec![0; self.max_dgram_len];\n                    let len = protocol::encode_message(&msg.into(), &mut buf)?;\n\n                    buf.truncate(len);\n                    Ok(buf)\n                });\n\n            match res {\n                Ok(buf) => {\n                    let _ = self.outgoing.send(buf);\n                }\n                Err(err) => {\n                    error!(?err, \"failed to encode audio chunk\");\n                }\n            };\n        }\n\n        self.audio_seq += 1;\n        (self.audio_stream_seq, seq)\n    }\n}\n\npub struct Chunk {\n    pub index: u32,\n    pub num_chunks: u32,\n    pub data: Bytes,\n    pub fec_metadata: Option<protocol::FecMetadata>,\n}\n\npub fn iter_chunks(\n    mut buf: bytes::Bytes,\n    mtu: usize,\n    fec_ratio: f32,\n) -> impl Iterator<Item = Chunk> {\n    if fec_ratio > 0.0 {\n        return Either::Left(iter_chunks_fec(buf, mtu, fec_ratio));\n    }\n\n    let num_chunks = buf.len().div_ceil(mtu) as u32;\n    let mut next_chunk: u32 = 0;\n\n    let span = trace_span!(\"iter_chunks\");\n    let _guard = span.enter();\n\n    Either::Right(std::iter::from_fn(move || {\n        if buf.is_empty() {\n            return None;\n        }\n\n        let data = if buf.len() < mtu {\n            buf.split_to(buf.len())\n        } else {\n            buf.split_to(mtu)\n        };\n\n        let chunk = next_chunk;\n        next_chunk += 1;\n        Some(Chunk {\n            index: chunk,\n            num_chunks,\n            data,\n            fec_metadata: None,\n        })\n    }))\n}\n\n#[instrument(skip_all)]\nfn iter_chunks_fec(buf: Bytes, mtu: usize, ratio: f32) -> impl Iterator<Item = Chunk> {\n    let encoder = raptorq::Encoder::with_defaults(&buf, mtu as u16);\n    let oti = Bytes::copy_from_slice(&encoder.get_config().serialize());\n\n    let base_chunks = buf.len().div_ceil(mtu) as u32;\n    let repair_chunks = (base_chunks as f32 * ratio).ceil() as u32;\n    let chunks = encoder.get_encoded_packets(repair_chunks);\n    let num_chunks = chunks.len() as u32;\n\n    chunks.into_iter().enumerate().map(move |(chunk, p)| Chunk {\n        index: chunk as u32,\n        num_chunks,\n        data: Bytes::copy_from_slice(p.data()),\n        fec_metadata: Some(mm_protocol::FecMetadata {\n            fec_scheme: protocol::fec_metadata::FecScheme::Raptorq.into(),\n            fec_payload_id: Bytes::copy_from_slice(&p.payload_id().serialize()),\n            fec_oti: oti.clone(),\n        }),\n    })\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn test_iter_chunks() {\n        let frame = bytes::Bytes::from(vec![9; 3536]);\n        let mut chunks = iter_chunks(frame, 1200, 0.0);\n        let chunk = chunks.next().unwrap();\n        assert_eq!(chunk.index, 0);\n        assert_eq!(chunk.num_chunks, 3);\n        assert_eq!(chunk.data.len(), 1200);\n        assert_eq!(chunk.fec_metadata, None);\n\n        let chunk = chunks.next().unwrap();\n        assert_eq!(chunk.index, 1);\n        assert_eq!(chunk.num_chunks, 3);\n        assert_eq!(chunk.data.len(), 1200);\n        assert_eq!(chunk.fec_metadata, None);\n\n        let chunk = chunks.next().unwrap();\n        assert_eq!(chunk.index, 2);\n        assert_eq!(chunk.num_chunks, 3);\n        assert_eq!(chunk.data.len(), 1136);\n        assert_eq!(chunk.fec_metadata, None);\n\n        assert!(chunks.next().is_none());\n    }\n\n    #[test]\n    fn test_iter_chunks_fec() {\n        let frame = bytes::Bytes::from(vec![9; 3536]);\n        let mut chunks = iter_chunks(frame, 1200, 0.15);\n        let chunk = chunks.next().unwrap();\n        assert_eq!(chunk.index, 0);\n        assert_eq!(chunk.num_chunks, 4);\n        assert_eq!(chunk.data.len(), 1200);\n        assert_eq!(\n            chunk.fec_metadata.as_ref().unwrap().fec_scheme(),\n            protocol::fec_metadata::FecScheme::Raptorq\n        );\n        assert_eq!(chunk.fec_metadata.as_ref().unwrap().fec_oti.len(), 12);\n\n        let chunk = chunks.next().unwrap();\n        assert_eq!(chunk.index, 1);\n        assert_eq!(chunk.num_chunks, 4);\n        assert_eq!(chunk.data.len(), 1200);\n        assert_eq!(\n            chunk.fec_metadata.as_ref().unwrap().fec_scheme(),\n            protocol::fec_metadata::FecScheme::Raptorq\n        );\n        assert_eq!(chunk.fec_metadata.as_ref().unwrap().fec_oti.len(), 12);\n\n        let chunk = chunks.next().unwrap();\n        assert_eq!(chunk.index, 2);\n        assert_eq!(chunk.num_chunks, 4);\n        assert_eq!(chunk.data.len(), 1200);\n        assert_eq!(\n            chunk.fec_metadata.as_ref().unwrap().fec_scheme(),\n            protocol::fec_metadata::FecScheme::Raptorq\n        );\n        assert_eq!(chunk.fec_metadata.as_ref().unwrap().fec_oti.len(), 12);\n\n        let chunk = chunks.next().unwrap();\n        assert_eq!(chunk.index, 3);\n        assert_eq!(chunk.num_chunks, 4);\n        assert_eq!(chunk.data.len(), 1200);\n        assert_eq!(\n            chunk.fec_metadata.as_ref().unwrap().fec_scheme(),\n            protocol::fec_metadata::FecScheme::Raptorq\n        );\n        assert_eq!(chunk.fec_metadata.as_ref().unwrap().fec_oti.len(), 12);\n\n        assert!(chunks.next().is_none());\n    }\n}\n"
  },
  {
    "path": "mm-server/src/server.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nmod handlers;\nmod mdns;\nmod sendmmsg;\npub mod stream;\n\nuse std::collections::{BTreeMap, VecDeque};\nuse std::net::SocketAddr;\nuse std::sync::Arc;\nuse std::time;\n\nuse anyhow::anyhow;\nuse anyhow::bail;\nuse anyhow::Context;\nuse bytes::{Buf, Bytes, BytesMut};\nuse crossbeam_channel::{Receiver, Sender, TryRecvError};\nuse hashbrown::HashMap;\nuse mm_protocol as protocol;\nuse protocol::error::ErrorCode;\nuse ring::rand::{self, SecureRandom};\nuse tracing::trace;\nuse tracing::trace_span;\nuse tracing::warn;\nuse tracing::{debug, error};\nuse tracing::{debug_span, instrument};\n\nuse crate::state::SharedState;\nuse crate::waking_sender::WakingOneshot;\nuse crate::waking_sender::WakingSender;\n\nconst MAX_QUIC_PACKET_SIZE: usize = 1350;\n\nconst SOCKET: mio::Token = mio::Token(0);\nconst WAKER: mio::Token = mio::Token(1);\n\npub struct Server {\n    server_config: crate::config::ServerConfig,\n    quiche_config: quiche::Config,\n    addr: SocketAddr,\n    socket: mio::net::UdpSocket,\n    scratch: BytesMut,\n    outgoing_packets: VecDeque<Outgoing>,\n\n    poll: mio::Poll,\n    waker: Arc<mio::Waker>,\n    next_timer_token: usize,\n    thread_pool: threadpool::ThreadPool,\n\n    clients: HashMap<quiche::ConnectionId<'static>, ClientConnection>,\n    state: SharedState,\n    close_recv: Receiver<()>,\n    close_send: WakingSender<()>,\n\n    _mdns: Option<mdns::MdnsService>,\n    shutting_down: bool,\n}\n\nstruct Outgoing {\n    buf: Bytes,\n    to: SocketAddr,\n}\n\npub struct StreamWorker {\n    incoming_messages: Option<Sender<protocol::MessageType>>,\n    outgoing_messages: Receiver<protocol::MessageType>,\n    done: oneshot::Receiver<()>,\n}\n\npub struct ClientConnection {\n    remote_addr: SocketAddr,\n    conn_id: quiche::ConnectionId<'static>,\n    conn: quiche::Connection,\n    timer: mio_timerfd::TimerFd,\n    timeout_token: mio::Token,\n\n    partial_reads: BTreeMap<u64, BytesMut>,\n    partial_writes: BTreeMap<u64, Bytes>,\n    in_flight: BTreeMap<u64, StreamWorker>,\n\n    dgram_recv: Receiver<Vec<u8>>,\n    dgram_send: WakingSender<Vec<u8>>,\n\n    last_keepalive: time::Instant,\n}\n\nimpl Server {\n    pub fn new(\n        socket: std::net::UdpSocket,\n        server_config: crate::config::ServerConfig,\n        state: SharedState,\n    ) -> anyhow::Result<Self> {\n        let poll = mio::Poll::new().unwrap();\n        let waker = Arc::new(mio::Waker::new(poll.registry(), WAKER)?);\n\n        let clients = HashMap::new();\n\n        let mut config = match (&server_config.tls_cert, &server_config.tls_key) {\n            (Some(cert), Some(key)) => {\n                let mut config = quiche::Config::new(quiche::PROTOCOL_VERSION)?;\n\n                config\n                    .load_cert_chain_from_pem_file(cert.to_str().unwrap())\n                    .context(\"loading certificate file\")?;\n                config\n                    .load_priv_key_from_pem_file(key.to_str().unwrap())\n                    .context(\"loading private key file\")?;\n                config\n            }\n            _ => {\n                let addr = socket.local_addr()?;\n                let ip = addr.ip();\n                if ip_rfc::global(&ip) || ip.is_unspecified() {\n                    bail!(\"TLS is required for non-private addresses\");\n                }\n\n                let tls_ctx = self_signed_tls_ctx(addr)?;\n                quiche::Config::with_boring_ssl_ctx_builder(quiche::PROTOCOL_VERSION, tls_ctx)?\n            }\n        };\n\n        config.set_application_protos(&[protocol::ALPN_PROTOCOL_VERSION])?;\n        config.set_initial_max_data(65536);\n        config.set_initial_max_stream_data_bidi_remote(65536);\n        config.set_initial_max_stream_data_bidi_local(65536);\n        config.set_initial_max_stream_data_uni(65536);\n        config.set_initial_max_streams_bidi(64);\n        config.set_initial_max_streams_uni(64);\n        config.enable_dgram(true, 0, 1024 * 1024);\n        config.enable_early_data();\n\n        // Set the idle timeout to 10s. If any streams are active, we send\n        // ack-eliciting frames so that we don't accidentally kill a client\n        // that's in the middle of something slow (like launching a session).\n        config.set_max_idle_timeout(10_000);\n\n        // Storage for packets that would have blocked on sending.\n        let outgoing_packets = VecDeque::new();\n\n        socket.set_nonblocking(true)?;\n        sendmmsg::set_so_txtime(&socket)?;\n        let mut socket = mio::net::UdpSocket::from_std(socket);\n        poll.registry()\n            .register(&mut socket, SOCKET, mio::Interest::READABLE)?;\n\n        let (close_send, close_recv) = crossbeam_channel::bounded(1);\n        let close_send = WakingSender::new(waker.clone(), close_send);\n\n        let thread_pool = threadpool::ThreadPool::new(server_config.worker_threads.get() as usize);\n\n        let addr = socket.local_addr()?;\n        let mdns = if server_config.mdns {\n            match mdns::MdnsService::new(\n                addr,\n                server_config.mdns_hostname.as_deref(),\n                server_config.mdns_instance_name.as_deref(),\n            ) {\n                Ok(sd) => Some(sd),\n                Err(e) => {\n                    error!(\"failed to enable mDNS service discovery: {e:#}\");\n                    None\n                }\n            }\n        } else {\n            None\n        };\n\n        Ok(Self {\n            server_config,\n            quiche_config: config,\n            addr: socket.local_addr()?,\n            socket,\n            scratch: BytesMut::with_capacity(65536),\n            outgoing_packets,\n\n            poll,\n            waker,\n            next_timer_token: 1024,\n            thread_pool,\n\n            clients,\n            state,\n            close_send,\n            close_recv,\n\n            _mdns: mdns,\n            shutting_down: false,\n        })\n    }\n\n    pub fn local_addr(&self) -> anyhow::Result<SocketAddr> {\n        Ok(self.socket.local_addr()?)\n    }\n\n    pub fn closer(&self) -> WakingSender<()> {\n        self.close_send.clone()\n    }\n\n    /// Starts the server loop, returning only on error.\n    pub fn run(&mut self) -> anyhow::Result<()> {\n        let mut events = mio::Events::with_capacity(1024);\n\n        'poll: loop {\n            // TODO: It might be worthwhile to switch to a busy loop if\n            // there are any active sessions. That would mean handling quiche\n            // timeouts in userspace.\n            let poll_res = trace_span!(\"poll\").in_scope(|| {\n                self.poll\n                    .poll(&mut events, Some(time::Duration::from_secs(1)))\n            });\n\n            match poll_res {\n                Ok(_) => (),\n                Err(e) if e.kind() == std::io::ErrorKind::Interrupted => continue,\n                Err(e) => return Err(e.into()),\n            }\n\n            #[cfg(feature = \"tracy\")]\n            {\n                tracy_client::plot!(\n                    \"active streams\",\n                    self.clients\n                        .iter()\n                        .map(|(_, c)| c.in_flight.len())\n                        .sum::<usize>() as f64\n                );\n\n                tracy_client::plot!(\n                    \"dgram send queue\",\n                    self.clients\n                        .iter()\n                        .map(|(_, c)| c.conn.dgram_send_queue_len())\n                        .sum::<usize>() as f64\n                );\n\n                tracy_client::plot!(\"outgoing packet queue\", self.outgoing_packets.len() as f64);\n            }\n\n            // Check if we're supposed to shut down.\n            if let Ok(()) = self.close_recv.try_recv() {\n                debug!(\"shutting down server\");\n                self.shutting_down = true;\n                for client in self.clients.values_mut() {\n                    match client.conn.close(true, 0, &[]) {\n                        Ok(_) | Err(quiche::Error::Done) => (),\n                        Err(e) => {\n                            bail!(\"failed to close connection: {:?}\", e);\n                        }\n                    }\n                }\n            }\n\n            for event in events.iter() {\n                // Check if the token is a timeout token.\n                let client = self\n                    .clients\n                    .values_mut()\n                    .find(|c| c.timeout_token == event.token());\n                if let Some(client) = client {\n                    client.timer.read()?;\n                    client.conn.on_timeout();\n                    client.update_timeout()?;\n                }\n            }\n\n            // Garbage-collect dead sessions.\n            self.state.lock().tick()?;\n\n            // Garbage-collect closed clients.\n            self.clients.retain(|_, c| {\n                if c.conn.is_closed() {\n                    debug!(conn_id = ?c.conn_id, remote_addr = ?c.remote_addr, \"client disconnected\");\n                    false\n                } else if c.conn.is_draining() {\n                    // Drop the workers, which drops the send/recv channels,\n                    // signaling that the workers can exit already.\n                    c.in_flight.clear();\n                    true\n                } else {\n                    true\n                }\n            });\n\n            if self.shutting_down && self.clients.is_empty() {\n                return Ok(());\n            } else if self.shutting_down {\n                debug!(\"waiting for {} clients to disconnect\", self.clients.len());\n            }\n\n            // Read incoming UDP packets and handle them.\n            'read: loop {\n                self.scratch.resize(MAX_QUIC_PACKET_SIZE, 0);\n                let (len, from) = match self.socket.recv_from(&mut self.scratch) {\n                    Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => {\n                        break 'read;\n                    }\n                    v => v.context(\"recv_from error\")?,\n                };\n\n                let pkt = self.scratch.split_to(len);\n                match self.recv(pkt, from) {\n                    Ok(_) => {}\n                    Err(e) => {\n                        error!(\"recv failed: {:?}\", e);\n                    }\n                }\n            }\n\n            // Write out any queued packets.\n            while !self.outgoing_packets.is_empty() {\n                let pkt = self.outgoing_packets.pop_front().unwrap();\n                match self.socket.send_to(&pkt.buf, pkt.to) {\n                    Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => {\n                        self.outgoing_packets.push_front(pkt);\n                        continue 'poll;\n                    }\n                    v => v?,\n                };\n            }\n\n            // Let workers know if any peers hung up, and let peers know if any\n            // workers finished.\n            for client in self.clients.values_mut() {\n                let mut to_close = Vec::new();\n                for (sid, worker) in client.in_flight.iter_mut() {\n                    if client.conn.stream_finished(*sid) {\n                        trace!(\"peer hung up on stream {:?}:{}\", client.conn_id, sid);\n                        worker.incoming_messages.take();\n                    }\n\n                    if matches!(\n                        worker.done.try_recv(),\n                        Ok(()) | Err(oneshot::TryRecvError::Disconnected)\n                    ) && worker.outgoing_messages.is_empty()\n                        && !client.partial_writes.contains_key(sid)\n                    {\n                        to_close.push(*sid);\n                    }\n                }\n\n                for sid in to_close {\n                    trace!(sid, \"closing stream because worker finished\");\n\n                    let _ = client.conn.stream_send(sid, &[], true);\n                    let _ = client.conn.stream_shutdown(sid, quiche::Shutdown::Read, 0);\n                    client.in_flight.remove(&sid);\n                }\n            }\n\n            #[cfg(feature = \"tracy\")]\n            let mut max_txtime: f64 = 0.0;\n\n            // Demux packets from in-flight requests and datagrams from attachments.\n            for client in self.clients.values_mut() {\n                let conn_span = trace_span!(\"conn_write\", conn_id = ?client.conn_id);\n                let _guard = conn_span.enter();\n\n                if client.conn.is_draining() {\n                    continue;\n                }\n\n                loop {\n                    if client.conn.is_dgram_send_queue_full() {\n                        warn!(\"datagram send queue full!\");\n                        break;\n                    }\n\n                    let msg = match client.dgram_recv.try_recv() {\n                        Ok(msg) => msg,\n                        Err(TryRecvError::Disconnected) => unreachable!(),\n                        Err(TryRecvError::Empty) => break,\n                    };\n\n                    match client.send_dgram(msg) {\n                        Ok(_) => {}\n                        Err(e) => {\n                            match e.downcast_ref::<quiche::Error>() {\n                                Some(quiche::Error::Done) => (),\n                                _ => error!(\"failed to send datagram: {}\", e),\n                            }\n\n                            client\n                                .conn\n                                .close(true, ErrorCode::ErrorProtocol as u64, &[])\n                                .ok();\n                            break;\n                        }\n                    }\n                }\n\n                for sid in client.conn.writable() {\n                    if !client.in_flight.contains_key(&sid) {\n                        continue;\n                    }\n\n                    if !client.flush_partial_write(sid)? {\n                        continue;\n                    }\n\n                    loop {\n                        let span = trace_span!(\"stream_write\", sid);\n                        let _guard = span.enter();\n\n                        match client\n                            .in_flight\n                            .get(&sid)\n                            .unwrap()\n                            .outgoing_messages\n                            .try_recv()\n                        {\n                            Ok(msg) => {\n                                if !client.write_message(sid, msg, false, &mut self.scratch)? {\n                                    // No more write capacity at the moment.\n                                    break;\n                                }\n                            }\n                            Err(_) => break,\n                        }\n                    }\n                }\n            }\n\n            // Generate outgoing QUIC packets.\n            let mut packets = Vec::new();\n\n            let mut off = 0;\n            for client in self.clients.values_mut() {\n                let span = trace_span!(\"gather_send\", conn_id = ?client.conn_id);\n                let _guard = span.enter();\n\n                // Generate ack-eliciting keepalives for any clients with open\n                // streams. Clients with no open streams are allowed to time\n                // out.\n                client.send_periodic_keepalive()?;\n\n                loop {\n                    let start = off;\n                    self.scratch.resize(off + MAX_QUIC_PACKET_SIZE, 0);\n                    let (len, send_info) = match client.conn.send(&mut self.scratch[off..]) {\n                        Ok(v) => v,\n                        Err(quiche::Error::Done) => break,\n                        Err(e) => {\n                            error!(\"QUIC error: {:?}\", e);\n                            continue;\n                        }\n                    };\n\n                    off += len;\n                    packets.push((start..(start + len), send_info.to, send_info.at));\n                }\n\n                // Update the timeout.\n                client.update_timeout()?;\n            }\n\n            // Send out the packets.\n            if !packets.is_empty() {\n                let mut sendmmsg = sendmmsg::new();\n                for (range, to, txtime) in packets {\n                    sendmmsg = sendmmsg.sendmsg(&self.scratch[range], to, txtime);\n\n                    // Plot the max txtime difference.\n                    #[cfg(feature = \"tracy\")]\n                    {\n                        max_txtime = max_txtime.max(\n                            txtime\n                                .duration_since(std::time::Instant::now())\n                                .as_secs_f64()\n                                / 1000.0,\n                        );\n                    }\n                }\n\n                sendmmsg.finish(&self.socket)?;\n            }\n\n            #[cfg(feature = \"tracy\")]\n            tracy_client::plot!(\"max txtime (ms)\", max_txtime);\n        }\n    }\n\n    /// Handles an incoming datagram.\n    fn recv(&mut self, mut pkt: BytesMut, from: SocketAddr) -> anyhow::Result<()> {\n        let hdr = match quiche::Header::from_slice(&mut pkt, quiche::MAX_CONN_ID_LEN) {\n            Ok(v) => v,\n            Err(e) => {\n                bail!(\"invalid packet: {:?}\", e);\n            }\n        };\n\n        let num_clients = self.clients.len();\n        let client = match self.clients.get_mut(&hdr.dcid) {\n            Some(c) => c,\n            None if self.shutting_down => return Ok(()),\n            None => {\n                if hdr.ty != quiche::Type::Initial {\n                    debug!(\"invalid packet: dcid not found and not Initial\");\n                    return Ok(());\n                }\n\n                if let Some(max) = self.server_config.max_connections {\n                    if num_clients as u32 >= max.get() {\n                        warn!(\"rejecting connection: max_connections ({}) reached\", max);\n                        return Ok(());\n                    }\n                }\n\n                if !quiche::version_is_supported(hdr.version) {\n                    debug!(\n                        \"version {:x} is not supported; doing version negotiation\",\n                        hdr.version\n                    );\n\n                    let out = {\n                        self.scratch.resize(MAX_QUIC_PACKET_SIZE, 0);\n                        let len =\n                            quiche::negotiate_version(&hdr.scid, &hdr.dcid, &mut self.scratch)?;\n                        self.scratch.split_to(len).freeze()\n                    };\n\n                    self.outgoing_packets\n                        .push_back(Outgoing { buf: out, to: from });\n                    return Ok(());\n                }\n\n                let conn_id = gen_random_cid();\n                let conn =\n                    quiche::accept(&conn_id, None, self.addr, from, &mut self.quiche_config)?;\n\n                let mut timer = mio_timerfd::TimerFd::new(mio_timerfd::ClockId::Monotonic)?;\n                let timeout_token = mio::Token(self.next_timer_token);\n                self.next_timer_token += 1;\n                self.poll.registry().register(\n                    &mut timer,\n                    timeout_token,\n                    mio::Interest::READABLE,\n                )?;\n\n                let streams = BTreeMap::new();\n\n                let (dgram_send, dgram_recv) = crossbeam_channel::unbounded();\n                let dgram_send = WakingSender::new(self.waker.clone(), dgram_send);\n\n                let c = ClientConnection {\n                    remote_addr: from,\n                    conn_id: conn_id.clone(),\n                    conn,\n                    timer,\n                    timeout_token,\n                    in_flight: streams,\n                    partial_reads: BTreeMap::new(),\n                    partial_writes: BTreeMap::new(),\n                    dgram_recv,\n                    dgram_send,\n\n                    last_keepalive: time::Instant::now(),\n                };\n\n                debug!(\"new client connection: {}\", from);\n                self.clients.entry(conn_id).or_insert(c)\n            }\n        };\n\n        // Run QUIC machinery.\n        client.conn.recv(\n            &mut pkt,\n            quiche::RecvInfo {\n                from,\n                to: self.addr,\n            },\n        )?;\n\n        for sid in client.conn.readable() {\n            let (messages, fin) = match client.read_messages(sid, &mut self.scratch) {\n                Ok(v) => v,\n                Err(e) => {\n                    if e.downcast_ref::<protocol::ProtocolError>().is_some() {\n                        client.err_stream(\n                            sid,\n                            ErrorCode::ErrorProtocol,\n                            Some(e.to_string()),\n                            &mut self.scratch,\n                        );\n                    } else {\n                        error!(\"unexpected error: {}\", e);\n                        client.err_stream(\n                            sid,\n                            ErrorCode::ErrorServer,\n                            Some(\"Internal server error\".to_string()),\n                            &mut self.scratch,\n                        );\n                    }\n\n                    continue;\n                }\n            };\n\n            let worker = match client.in_flight.get_mut(&sid) {\n                Some(w) => w,\n                None if messages.is_empty() => continue,\n                None => {\n                    let (incoming_send, incoming_recv) = crossbeam_channel::unbounded();\n                    let (outgoing_send, outgoing_recv) = crossbeam_channel::unbounded();\n                    let outgoing_send = WakingSender::new(self.waker.clone(), outgoing_send);\n                    let outgoing_dgrams = client.dgram_send.clone();\n\n                    let (done_send, done_recv) = oneshot::channel();\n                    let done_send = WakingOneshot::new(self.waker.clone(), done_send);\n\n                    let state_clone = self.state.clone();\n                    let max_dgram_len = match client.conn.dgram_max_writable_len() {\n                        Some(v) => v,\n                        None => bail!(\"client doesn't support datagrams\"),\n                    };\n\n                    let client_addr = client.remote_addr;\n                    self.thread_pool.execute(move || {\n                        let span = debug_span!(\"stream\", sid, remote_addr = ?client_addr);\n                        let _guard = span.enter();\n\n                        handlers::dispatch(\n                            state_clone,\n                            incoming_recv,\n                            outgoing_send,\n                            outgoing_dgrams,\n                            max_dgram_len,\n                            done_send,\n                        );\n                    });\n\n                    let worker = StreamWorker {\n                        incoming_messages: Some(incoming_send),\n                        outgoing_messages: outgoing_recv,\n                        done: done_recv,\n                    };\n\n                    client.in_flight.entry(sid).or_insert(worker)\n                }\n            };\n\n            let incoming = worker.incoming_messages.as_ref().unwrap();\n            for msg in messages {\n                if incoming.send(msg).is_err() {\n                    // The worker finished execution, so ignore any further\n                    // messages.\n                    break;\n                }\n            }\n\n            if fin {\n                // Signal to the worker that the peer has stopped sending\n                // messages.\n                worker.incoming_messages.take();\n            }\n        }\n\n        // Update the timeout timer.\n        client.update_timeout()?;\n\n        // Clean up partial data for closed streams.\n        client\n            .partial_reads\n            .retain(|sid, _| !client.conn.stream_finished(*sid));\n        client\n            .partial_writes\n            .retain(|sid, _| !client.conn.stream_finished(*sid));\n\n        Ok(())\n    }\n}\n\nfn self_signed_tls_ctx(addr: SocketAddr) -> anyhow::Result<boring::ssl::SslContextBuilder> {\n    use boring::pkey::PKey;\n    use boring::x509::X509;\n\n    let ip = addr.ip();\n    assert!(!ip_rfc::global(&ip) && !ip.is_unspecified());\n\n    let certs = rcgen::generate_simple_self_signed(vec![ip.to_string()])\n        .context(\"generating self-signed certificates\")?;\n\n    let cert = X509::from_pem(certs.serialize_pem()?.as_bytes())?;\n    let key = PKey::private_key_from_pem(certs.serialize_private_key_pem().as_bytes())?;\n\n    let mut tls_ctx = boring::ssl::SslContextBuilder::new(boring::ssl::SslMethod::tls())?;\n    tls_ctx.set_private_key(&key)?;\n    tls_ctx.set_certificate(&cert)?;\n\n    Ok(tls_ctx)\n}\n\nimpl ClientConnection {\n    fn update_timeout(&mut self) -> anyhow::Result<()> {\n        if let Some(new_timeout) = self.conn.timeout() {\n            self.timer.set_timeout(&new_timeout)?;\n        } else {\n            self.timer.disarm()?;\n        }\n\n        Ok(())\n    }\n\n    fn read_messages(\n        &mut self,\n        sid: u64,\n        scratch: &mut BytesMut,\n    ) -> anyhow::Result<(Vec<protocol::MessageType>, bool)> {\n        // Start with partial data from the previous call to read_messages.\n        scratch.truncate(0);\n        if let Some(partial) = self.partial_reads.remove(&sid) {\n            scratch.unsplit(partial);\n        }\n\n        let mut off = scratch.len();\n        let mut stream_fin = false;\n        loop {\n            scratch.resize(off + protocol::MAX_MESSAGE_SIZE, 0);\n            match self.conn.stream_recv(sid, &mut scratch[off..]) {\n                Ok((len, fin)) => {\n                    off += len;\n\n                    if fin {\n                        stream_fin = true;\n                        break;\n                    }\n                }\n                Err(quiche::Error::Done) => {\n                    break;\n                }\n                Err(e) => return Err(e.into()),\n            }\n        }\n\n        // Read messages (there may be multiple).\n        scratch.truncate(off);\n        let mut buf = scratch.split();\n        let mut messages = Vec::new();\n        while !buf.is_empty() {\n            match protocol::decode_message(&buf) {\n                Ok((msg, len)) => {\n                    trace!(\n                        conn_id = ?self.conn_id,\n                        stream_id = sid,\n                        len,\n                        \"received {}\", msg\n                    );\n\n                    messages.push(msg);\n                    buf.advance(len);\n                }\n                Err(protocol::ProtocolError::InvalidMessageType(t, len)) => {\n                    warn!(msgtype = t, len, \"ignoring unknown message type\");\n                    buf.advance(len);\n                }\n                Err(protocol::ProtocolError::ShortBuffer(n)) => {\n                    trace!(\n                        \"partial message on stream {:?}:{}, need {} bytes\",\n                        self.conn_id,\n                        sid,\n                        n\n                    );\n\n                    self.partial_reads.insert(sid, buf);\n                    break;\n                }\n                Err(e) => return Err(e.into()),\n            };\n        }\n\n        Ok((messages, stream_fin))\n    }\n\n    /// Send a message on a stream. Returns Ok(false) if the stream is full.\n    fn write_message(\n        &mut self,\n        sid: u64,\n        msg: protocol::MessageType,\n        fin: bool,\n        scratch: &mut BytesMut,\n    ) -> anyhow::Result<bool> {\n        scratch.resize(protocol::MAX_MESSAGE_SIZE, 0);\n        let len =\n            protocol::encode_message(&msg, scratch).context(format!(\"failed to encode {}\", msg))?;\n\n        trace!(len, \"sending {}\", msg);\n\n        match self.conn.stream_send(sid, &scratch[..len], fin) {\n            Ok(n) if n != len => {\n                // Partial write.\n                assert!(n < len);\n                trace!(n, \"partial write\");\n\n                let partial = scratch.split_to(len).split_off(n).freeze();\n                let old = self.partial_writes.insert(sid, partial);\n                assert_eq!(None, old);\n\n                Ok(false)\n            }\n            Err(quiche::Error::Done) => {\n                trace!(\"stream blocked\");\n\n                let data = scratch.split_to(len).freeze();\n                let old = self.partial_writes.insert(sid, data);\n                assert_eq!(None, old);\n\n                Ok(false)\n            }\n            v => {\n                assert_eq!(len, v?);\n                Ok(true)\n            }\n        }\n    }\n\n    /// Flushes previous partial writes.\n    fn flush_partial_write(&mut self, sid: u64) -> anyhow::Result<bool> {\n        use std::collections::btree_map::Entry;\n\n        match self.partial_writes.entry(sid) {\n            Entry::Vacant(_) => Ok(true),\n            Entry::Occupied(mut entry) => {\n                let partial = entry.get().clone();\n                trace!(len = partial.len(), \"flushing previous partial\");\n\n                match self.conn.stream_send(sid, &partial, false) {\n                    Ok(n) if n != entry.get().len() => {\n                        // Partial write.\n                        entry.get_mut().advance(n);\n                        trace!(len = entry.get().len(), \"remaining partial\");\n                        Ok(false)\n                    }\n                    Ok(_) => {\n                        entry.remove();\n                        Ok(true)\n                    }\n                    Err(quiche::Error::Done) => Ok(false),\n                    Err(e) => Err(anyhow!(e)),\n                }\n            }\n        }\n    }\n\n    /// Send a message as a datagram.\n    #[instrument(skip_all)]\n    fn send_dgram(&mut self, msg: Vec<u8>) -> anyhow::Result<()> {\n        trace!(\n            conn_id = ?self.conn_id,\n            len = msg.len(),\n            \"sending datagram\",\n        );\n\n        match self.conn.dgram_send_vec(msg) {\n            Ok(_) => Ok(()),\n            Err(quiche::Error::InvalidState) => Err(anyhow!(\"client doesn't support datagrams\")),\n            Err(e) => Err(e.into()),\n        }\n    }\n\n    /// Send an Error message on a stream, then shut it down.\n    fn err_stream(\n        &mut self,\n        sid: u64,\n        code: ErrorCode,\n        error: Option<String>,\n        scratch: &mut BytesMut,\n    ) {\n        // TODO actually send an error message\n        let msg = protocol::Error {\n            error_text: error.unwrap_or_default(),\n            err_code: code.into(),\n        };\n\n        let _ = self.write_message(sid, msg.into(), true, scratch);\n        let _ = self\n            .conn\n            .stream_shutdown(sid, quiche::Shutdown::Read, code as u64);\n\n        self.in_flight.remove(&sid);\n    }\n\n    fn send_periodic_keepalive(&mut self) -> quiche::Result<()> {\n        const KEEPALIVE_PERIOD: time::Duration = time::Duration::from_secs(1);\n\n        let now = time::Instant::now();\n        if self.in_flight.is_empty() || now.duration_since(self.last_keepalive) < KEEPALIVE_PERIOD {\n            return Ok(());\n        }\n\n        // Includes a PING in the next packet, but only if none of the frames\n        // in that packet are ack-eliciting.\n        self.last_keepalive = now;\n        self.conn.send_ack_eliciting()\n    }\n}\n\nfn gen_random_cid() -> quiche::ConnectionId<'static> {\n    let mut cid = vec![0; quiche::MAX_CONN_ID_LEN];\n    let rng = rand::SystemRandom::new();\n    rng.fill(&mut cid).unwrap();\n    quiche::ConnectionId::from_vec(cid)\n}\n"
  },
  {
    "path": "mm-server/src/session/audio/buffer.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{collections::VecDeque, io};\n\nuse byteorder::{BigEndian as BE, LittleEndian as LE, ReadBytesExt as _};\nuse dasp::{interpolate::sinc::Sinc, ring_buffer, signal::interpolate::Converter};\nuse pulseaudio::protocol as pulse;\n\n/// Raw bytes go in, (optionally) resampled frames come out.\npub enum PlaybackBuffer<F>\nwhere\n    F: dasp::Frame<Sample = f32>,\n{\n    Passthrough(Buffer<F>),\n    Resampling {\n        converter: Converter<Buffer<F>, Sinc<[F; 32]>>,\n        output_rate: u32,\n    },\n}\n\nimpl<F> PlaybackBuffer<F>\nwhere\n    F: dasp::Frame<Sample = f32>,\n{\n    pub fn new(sample_spec: pulse::SampleSpec, output_spec: pulse::SampleSpec) -> Self {\n        assert_eq!(output_spec.channels as usize, F::CHANNELS);\n        assert!(\n            sample_spec.channels as usize >= F::CHANNELS,\n            \"upmixing is not supported\"\n        );\n\n        let buffer = Buffer::new(sample_spec);\n        if sample_spec.sample_rate == output_spec.sample_rate {\n            Self::Passthrough(buffer)\n        } else {\n            let ringbuf = ring_buffer::Fixed::from([F::EQUILIBRIUM; 32]);\n            let interpolator = Sinc::new(ringbuf);\n            Self::Resampling {\n                converter: dasp::Signal::from_hz_to_hz(\n                    buffer,\n                    interpolator,\n                    sample_spec.sample_rate as f64,\n                    output_spec.sample_rate as f64,\n                ),\n                output_rate: output_spec.sample_rate,\n            }\n        }\n    }\n\n    fn buffer(&self) -> &Buffer<F> {\n        match self {\n            PlaybackBuffer::Passthrough(ref buffer) => buffer,\n            PlaybackBuffer::Resampling { converter, .. } => converter.source(),\n        }\n    }\n\n    fn buffer_mut(&mut self) -> &mut Buffer<F> {\n        match self {\n            PlaybackBuffer::Passthrough(ref mut buffer) => buffer,\n            PlaybackBuffer::Resampling { converter, .. } => converter.source_mut(),\n        }\n    }\n\n    pub fn len_bytes(&self) -> usize {\n        self.buffer().len_bytes()\n    }\n\n    pub fn len_frames(&self) -> usize {\n        self.buffer().len_frames()\n    }\n\n    pub fn is_empty(&self) -> bool {\n        self.len_frames() == 0\n    }\n\n    pub fn write(&mut self, payload: &[u8]) {\n        let _ = io::Write::write_all(&mut self.buffer_mut().inner, payload);\n    }\n\n    /// Reads data from the buffer at the output sample rate, returning\n    /// `num_frames` at that rate, or None if there's insufficient data.\n    ///\n    /// Dropping the returned signal removes the remaining unread data.\n    pub fn drain(&mut self, num_frames: usize) -> Option<impl dasp::Signal<Frame = F> + '_> {\n        match self {\n            PlaybackBuffer::Passthrough(buffer) => buffer.drain(num_frames).map(EitherSignal::Left),\n            PlaybackBuffer::Resampling {\n                ref mut converter,\n                output_rate,\n            } => {\n                let buffer = converter.source();\n                let needed_frames = (buffer.sample_spec.sample_rate as usize * num_frames)\n                    .div_ceil(*output_rate as usize);\n\n                if buffer.len_frames() < needed_frames {\n                    return None;\n                }\n\n                Some(EitherSignal::Right(Drain {\n                    signal: converter,\n                    remaining: num_frames,\n                }))\n            }\n        }\n    }\n\n    pub fn clear(&mut self) {\n        self.buffer_mut().inner.clear()\n    }\n}\n\nenum EitherSignal<L, R> {\n    Left(L),\n    Right(R),\n}\n\nimpl<L, R> dasp::Signal for EitherSignal<L, R>\nwhere\n    L: dasp::Signal,\n    R: dasp::Signal<Frame = L::Frame>,\n{\n    type Frame = L::Frame;\n\n    fn next(&mut self) -> Self::Frame {\n        match self {\n            EitherSignal::Left(s) => s.next(),\n            EitherSignal::Right(s) => s.next(),\n        }\n    }\n\n    fn is_exhausted(&self) -> bool {\n        match self {\n            EitherSignal::Left(s) => s.is_exhausted(),\n            EitherSignal::Right(s) => s.is_exhausted(),\n        }\n    }\n}\n\npub struct Buffer<F>\nwhere\n    F: dasp::Frame<Sample = f32>,\n{\n    inner: VecDeque<u8>,\n    sample_spec: pulse::SampleSpec,\n    bpp: usize,\n    _phantom: std::marker::PhantomData<F>,\n}\n\nimpl<F> Buffer<F>\nwhere\n    F: dasp::Frame<Sample = f32>,\n{\n    pub fn new(sample_spec: pulse::SampleSpec) -> Self {\n        Self {\n            inner: VecDeque::new(),\n            sample_spec,\n            bpp: sample_spec.format.bytes_per_sample(),\n            _phantom: std::marker::PhantomData,\n        }\n    }\n\n    fn len_bytes(&self) -> usize {\n        self.inner.len()\n    }\n\n    fn len_frames(&self) -> usize {\n        let input_channels = self.sample_spec.channels as usize;\n        self.inner.len() / (input_channels * self.bpp)\n    }\n\n    fn read_frame(&mut self) -> Option<F> {\n        if self.len_frames() == 0 {\n            return None;\n        }\n\n        let frame = F::from_fn(|_| self.read_sample().unwrap());\n\n        // Throw away additional channels.\n        // TODO: be more intelligent about up/downmixing.\n        let input_channels = self.sample_spec.channels as usize;\n        for _ in 0..input_channels.saturating_sub(F::CHANNELS) {\n            let _ = self.read_sample();\n        }\n\n        Some(frame)\n    }\n\n    fn read_sample(&mut self) -> Option<F::Sample> {\n        use dasp::Sample;\n\n        match self.sample_spec.format {\n            pulse::SampleFormat::Float32Le => self.inner.read_f32::<LE>().ok(),\n            pulse::SampleFormat::Float32Be => self.inner.read_f32::<BE>().ok(),\n            pulse::SampleFormat::S16Le => self.inner.read_i16::<LE>().ok().map(Sample::from_sample),\n            pulse::SampleFormat::S16Be => self.inner.read_i16::<BE>().ok().map(Sample::from_sample),\n            pulse::SampleFormat::U8 => self.inner.read_u8().ok().map(Sample::from_sample),\n            pulse::SampleFormat::S32Le => self.inner.read_i32::<LE>().ok().map(Sample::from_sample),\n            pulse::SampleFormat::S32Be => self.inner.read_i32::<BE>().ok().map(Sample::from_sample),\n            pulse::SampleFormat::S24Le => self.inner.read_i24::<LE>().ok().map(Sample::from_sample),\n            _ => unimplemented!(),\n        }\n    }\n}\n\nimpl<F> Buffer<F>\nwhere\n    F: dasp::Frame<Sample = f32>,\n{\n    fn drain(&mut self, num_frames: usize) -> Option<Drain<Self>> {\n        if self.len_frames() < num_frames {\n            return None; // Not enough data.\n        }\n\n        Some(Drain {\n            signal: self,\n            remaining: num_frames,\n        })\n    }\n}\n\nimpl<F> dasp::Signal for Buffer<F>\nwhere\n    F: dasp::Frame<Sample = f32>,\n{\n    type Frame = F;\n\n    fn next(&mut self) -> Self::Frame {\n        self.read_frame()\n            .unwrap_or(<Self::Frame as dasp::Frame>::EQUILIBRIUM)\n    }\n}\n\nstruct Drain<'a, S: dasp::Signal> {\n    signal: &'a mut S,\n    remaining: usize,\n}\n\nimpl<S: dasp::Signal> dasp::Signal for Drain<'_, S> {\n    type Frame = S::Frame;\n\n    fn is_exhausted(&self) -> bool {\n        self.remaining == 0\n    }\n\n    fn next(&mut self) -> Self::Frame {\n        if self.remaining == 0 {\n            <Self::Frame as dasp::Frame>::EQUILIBRIUM\n        } else {\n            self.remaining -= 1;\n            dasp::Signal::next(&mut self.signal)\n        }\n    }\n}\n\nimpl<S: dasp::Signal> Drop for Drain<'_, S> {\n    fn drop(&mut self) {\n        for _ in 0..self.remaining {\n            if self.signal.is_exhausted() {\n                break;\n            }\n\n            let _ = dasp::Signal::next(&mut self.signal);\n        }\n    }\n}\n\n#[cfg(test)]\nmod test {\n    use byteorder::WriteBytesExt as _;\n    use dasp::Signal as _;\n\n    use super::*;\n\n    #[test]\n    fn passthrough() {\n        let mut buf = PlaybackBuffer::<[f32; 2]>::new(\n            pulse::SampleSpec {\n                format: pulse::SampleFormat::Float32Le,\n                channels: 2,\n                sample_rate: 24000,\n            },\n            pulse::SampleSpec {\n                format: pulse::SampleFormat::Float32Le,\n                channels: 2,\n                sample_rate: 24000,\n            },\n        );\n\n        let mut data = vec![];\n        data.write_f32::<LE>(1.0).unwrap();\n        data.write_f32::<LE>(2.0).unwrap();\n        data.write_f32::<LE>(1.0).unwrap();\n        data.write_f32::<LE>(2.0).unwrap();\n        buf.write(&data);\n\n        assert_eq!(buf.len_bytes(), 16);\n        assert_eq!(buf.len_frames(), 2);\n        assert!(buf.drain(3).is_none());\n\n        {\n            let mut frames = buf.drain(2).unwrap();\n            assert_eq!(frames.next(), [1.0, 2.0]);\n            assert_eq!(frames.next(), [1.0, 2.0]);\n            assert!(frames.is_exhausted());\n            assert_eq!(frames.next(), [0.0, 0.0]);\n        }\n\n        assert!(buf.drain(1).is_none());\n    }\n\n    #[test]\n    fn downmix() {\n        let mut buf = PlaybackBuffer::<[f32; 2]>::new(\n            pulse::SampleSpec {\n                format: pulse::SampleFormat::Float32Le,\n                channels: 5,\n                sample_rate: 24000,\n            },\n            pulse::SampleSpec {\n                format: pulse::SampleFormat::Float32Le,\n                channels: 2,\n                sample_rate: 24000,\n            },\n        );\n\n        let mut data = vec![];\n        data.write_f32::<LE>(1.0).unwrap();\n        data.write_f32::<LE>(1.0).unwrap();\n        data.write_f32::<LE>(2.0).unwrap();\n        data.write_f32::<LE>(2.0).unwrap();\n        data.write_f32::<LE>(2.0).unwrap();\n        data.write_f32::<LE>(-1.0).unwrap();\n        data.write_f32::<LE>(-1.0).unwrap();\n        data.write_f32::<LE>(-2.0).unwrap();\n        data.write_f32::<LE>(-2.0).unwrap();\n        data.write_f32::<LE>(-2.0).unwrap();\n        buf.write(&data);\n\n        assert_eq!(buf.len_bytes(), 40);\n        assert_eq!(buf.len_frames(), 2);\n        assert!(buf.drain(3).is_none());\n\n        {\n            let mut frames = buf.drain(2).unwrap();\n            assert_eq!(frames.next(), [1.0, 1.0]);\n            assert_eq!(frames.next(), [-1.0, -1.0]);\n            assert!(frames.is_exhausted());\n        }\n\n        assert!(buf.drain(1).is_none());\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/audio/pulse.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    collections::BTreeMap,\n    ffi::{CStr, CString},\n    io::{prelude::*, Cursor},\n    path::Path,\n    sync::Arc,\n    time,\n};\n\nuse anyhow::{bail, Context};\nuse bytes::BytesMut;\nuse crossbeam_channel as crossbeam;\nuse cstr::cstr;\nuse mio::net::UnixListener;\nuse pulseaudio::protocol::{self as pulse, ClientInfoList};\nuse tracing::{debug, error, trace, warn};\n\nuse super::buffer::PlaybackBuffer;\nuse super::EncodeFrame;\nuse crate::{session::EPOCH, waking_sender::WakingSender};\n\nconst WAKER: mio::Token = mio::Token(0);\nconst LISTENER: mio::Token = mio::Token(1);\nconst CLOCK: mio::Token = mio::Token(2);\n\n// The server emits samples at this rate to the encoder.\npub const CAPTURE_SAMPLE_RATE: u32 = 48000;\npub const CAPTURE_CHANNEL_COUNT: u32 = 2;\npub const CAPTURE_SPEC: pulse::SampleSpec = pulse::SampleSpec {\n    format: pulse::SampleFormat::Float32Le,\n    channels: CAPTURE_CHANNEL_COUNT as u8,\n    sample_rate: CAPTURE_SAMPLE_RATE,\n};\n\n// Run the clock every 10ms, which is the smallest Opus frame size.\nconst CLOCK_RATE_HZ: u32 = 100;\n\nconst SINK_NAME: &CStr = cstr!(\"magic_mirror\");\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\npub enum StreamState {\n    Prebuffering(u64), // The number of bytes remaining before we can start 'playback'.\n    Corked,\n    Playing,\n    Draining(u32), // The seq of the drain request, so we can ack it.\n}\n\nstruct PlaybackStream {\n    state: StreamState,\n    buffer_attr: pulse::stream::BufferAttr,\n    buffer: PlaybackBuffer<[f32; 2]>,\n    requested_bytes: usize,\n    played_bytes: u64,\n    write_offset: u64,\n    read_offset: u64,\n}\n\nstruct Client {\n    id: u32,\n    socket: mio::net::UnixStream,\n    protocol_version: u16,\n    props: Option<pulse::Props>,\n    incoming: BytesMut,\n    playback_streams: BTreeMap<u32, PlaybackStream>,\n}\n\nstruct ServerState {\n    server_info: pulse::ServerInfo,\n    cards: Vec<pulse::CardInfo>,\n    sinks: Vec<pulse::SinkInfo>,\n    default_format_info: pulse::FormatInfo,\n    next_playback_channel_index: u32,\n}\n\npub struct PulseServer {\n    listener: UnixListener,\n    poll: mio::Poll,\n    clock: mio_timerfd::TimerFd,\n\n    close_rx: crossbeam::Receiver<()>,\n    unencoded_tx: crossbeam::Sender<EncodeFrame>,\n    done_rx: crossbeam::Receiver<EncodeFrame>,\n\n    clients: BTreeMap<mio::Token, Client>,\n    server_state: ServerState,\n}\n\nimpl PulseServer {\n    pub fn new(\n        socket_name: impl AsRef<Path>,\n        unencoded_tx: crossbeam::Sender<super::EncodeFrame>,\n        done_rx: crossbeam::Receiver<super::EncodeFrame>,\n    ) -> anyhow::Result<(Self, WakingSender<()>)> {\n        let listener = UnixListener::bind(socket_name)?;\n        let poll = mio::Poll::new()?;\n        let waker = Arc::new(mio::Waker::new(poll.registry(), WAKER)?);\n\n        let mut clock = mio_timerfd::TimerFd::new(mio_timerfd::ClockId::Monotonic)?;\n        clock.set_timeout_interval(&time::Duration::from_nanos(\n            1_000_000_000 / CLOCK_RATE_HZ as u64,\n        ))?;\n\n        let mut server_info = pulse::ServerInfo {\n            server_name: Some(cstr!(\"Magic Mirror\").into()),\n            server_version: Some(cstr!(\"0.0.1\").into()),\n            host_name: Some(CString::new(\"mmserver\")?),\n            default_sink_name: Some(SINK_NAME.into()),\n            default_source_name: Some(SINK_NAME.into()),\n            ..Default::default()\n        };\n\n        // let dummy_card_index = 99;\n        // let mut dummy_card = pulse::CardInfo {\n        //     index: dummy_card_index,\n        //     name: cstr!(\"Magic Mirror virtual output\").into(),\n        //     props: pulse::Props::new(),\n        //     owner_module_index: None,\n        //     driver: Some(cstr!(\"magic-mirror\").into()),\n        //     ports: vec![pulse::CardPortInfo {\n        //         name: cstr!(\"virtual-output-0\").into(),\n        //         description: Some(cstr!(\"virtual output\").into()),\n        //         priority: 0,\n        //         available: pulse::port_info::PortAvailable::Yes,\n        //         dir: pulse::port_info::PortDirection::Input,\n        //         props: pulse::Props::new(),\n        //         port_type: pulse::port_info::PortType::Network,\n        //         availability_group: None, //Some(cstr!(\"output\").into()),\n        //         profiles: vec![cstr!(\"output:stereo\").into()],\n        //         latency_offset: 0,\n        //     }],\n        //     profiles: vec![pulse::CardProfileInfo {\n        //         name: cstr!(\"output:stereo\").into(),\n        //         description: Some(cstr!(\"Stereo\").into()),\n        //         priority: 1000,\n        //         available: 1,\n        //         num_sinks: 1,\n        //         num_sources: 0,\n        //     }],\n        //     active_profile: Some(cstr!(\"output:stereo\").into()),\n        // };\n\n        // dummy_card.props.set(\n        //     pulse::Prop::DeviceDescription,\n        //     cstr!(\"Magic Mirror virtual output\"),\n        // );\n\n        let mut dummy_sink = pulse::SinkInfo::new_dummy(1);\n        dummy_sink.name = SINK_NAME.into();\n        dummy_sink.description = Some(cstr!(\"Magic Mirror virtual output\").into());\n        dummy_sink.sample_spec = pulse::SampleSpec {\n            format: pulse::SampleFormat::Float32Le,\n            channels: 2,\n            sample_rate: CAPTURE_SAMPLE_RATE,\n        };\n\n        server_info.channel_map = dummy_sink.channel_map;\n        server_info.sample_spec = dummy_sink.sample_spec;\n\n        // dummy_sink.card_index = Some(dummy_card_index);\n        dummy_sink.ports[0].port_type = pulse::port_info::PortType::Network;\n        dummy_sink.ports[0].description = Some(cstr!(\"virtual output\").into());\n\n        let mut format_props = pulse::Props::new();\n        format_props.set(pulse::Prop::FormatChannels, cstr!(\"2\"));\n        format_props.set(\n            pulse::Prop::FormatChannelMap,\n            cstr!(\"front-left,front-right\"),\n        );\n        format_props.set(pulse::Prop::FormatSampleFormat, cstr!(\"float32le\"));\n        format_props.set(\n            pulse::Prop::FormatRate,\n            CString::new(CAPTURE_SAMPLE_RATE.to_string()).unwrap(),\n        );\n\n        let default_format_info = pulse::FormatInfo {\n            encoding: pulse::FormatEncoding::Pcm,\n            props: format_props,\n        };\n\n        dummy_sink.formats[0] = default_format_info.clone();\n\n        let (close_tx, close_rx) = crossbeam::bounded(1);\n        let close_tx = WakingSender::new(waker.clone(), close_tx);\n\n        Ok((\n            Self {\n                listener,\n                poll,\n                clock,\n                unencoded_tx,\n                done_rx,\n                close_rx,\n                clients: BTreeMap::new(),\n                server_state: ServerState {\n                    server_info,\n                    cards: vec![], // vec![dummy_card],\n                    sinks: vec![dummy_sink],\n                    default_format_info,\n                    next_playback_channel_index: 0,\n                },\n            },\n            close_tx,\n        ))\n    }\n\n    pub fn run(&mut self) -> anyhow::Result<()> {\n        // Client tokens start from 1024.\n        let mut next_client_token = 1024;\n\n        self.poll\n            .registry()\n            .register(&mut self.clock, CLOCK, mio::Interest::READABLE)?;\n\n        self.poll\n            .registry()\n            .register(&mut self.listener, LISTENER, mio::Interest::READABLE)?;\n\n        let mut events = mio::Events::with_capacity(1024);\n\n        loop {\n            match self\n                .poll\n                .poll(&mut events, Some(time::Duration::from_secs(1)))\n            {\n                Ok(_) => (),\n                Err(e) if e.kind() == std::io::ErrorKind::Interrupted => continue,\n                Err(e) => return Err(e.into()),\n            }\n\n            match self.close_rx.try_recv() {\n                Ok(()) | Err(crossbeam::TryRecvError::Disconnected) => return Ok(()),\n                _ => (),\n            }\n\n            for event in events.iter() {\n                match event.token() {\n                    CLOCK => {\n                        self.clock.read()?;\n                        self.clock_tick()?;\n                    }\n                    LISTENER => {\n                        let (mut socket, _) = self.listener.accept()?;\n                        let id = next_client_token as u32;\n                        let token = mio::Token(next_client_token);\n                        next_client_token += 1;\n\n                        debug!(\"pulseaudio client connected\");\n\n                        self.poll.registry().register(\n                            &mut socket,\n                            token,\n                            mio::Interest::READABLE,\n                        )?;\n\n                        self.clients.insert(\n                            token,\n                            Client {\n                                id,\n                                socket,\n                                protocol_version: pulse::MAX_VERSION,\n                                props: None,\n                                incoming: BytesMut::new(),\n                                playback_streams: BTreeMap::new(),\n                            },\n                        );\n                    }\n                    client_token if event.is_read_closed() => {\n                        if let Some(mut client) = self.clients.remove(&client_token) {\n                            debug!(\"pulseaudio client disconnected\");\n                            self.poll.registry().deregister(&mut client.socket)?;\n                        }\n                    }\n                    client_token\n                        if event.is_readable() && self.clients.contains_key(&client_token) =>\n                    {\n                        if let Err(e) = self.recv(client_token) {\n                            error!(\"pulseaudio client error: {}\", e);\n                            let mut client = self.clients.remove(&client_token).unwrap();\n                            self.poll.registry().deregister(&mut client.socket)?;\n                        }\n                    }\n                    _ => (),\n                }\n            }\n        }\n    }\n\n    fn recv(&mut self, client_token: mio::Token) -> anyhow::Result<()> {\n        let client = self.clients.get_mut(&client_token).unwrap();\n\n        let mut read_size = 8192;\n\n        'read: loop {\n            let off = client.incoming.len();\n            client.incoming.resize(off + read_size, 0);\n            let n = match client.socket.read(&mut client.incoming[off..]) {\n                Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => {\n                    client.incoming.truncate(off);\n                    return Ok(());\n                }\n                v => v.context(\"recv error\")?,\n            };\n\n            client.incoming.truncate(off + n);\n\n            loop {\n                if client.incoming.len() < pulse::DESCRIPTOR_SIZE {\n                    read_size = 8192;\n                    continue 'read;\n                }\n\n                let desc = pulse::read_descriptor(&mut Cursor::new(\n                    &client.incoming[..pulse::DESCRIPTOR_SIZE],\n                ))?;\n                if client.incoming.len() < (desc.length as usize + pulse::DESCRIPTOR_SIZE) {\n                    read_size =\n                        desc.length as usize + pulse::DESCRIPTOR_SIZE - client.incoming.len();\n                    continue 'read;\n                }\n\n                let _desc_bytes = client.incoming.split_to(pulse::DESCRIPTOR_SIZE);\n                let payload = client.incoming.split_to(desc.length as usize).freeze();\n\n                if desc.channel == u32::MAX {\n                    let (seq, cmd) = match pulse::Command::read_tag_prefixed(\n                        &mut Cursor::new(payload),\n                        client.protocol_version,\n                    ) {\n                        Err(pulse::ProtocolError::Unimplemented(seq, cmd)) => {\n                            error!(\"received unimplemented command {:?}\", cmd);\n\n                            pulse::write_error(\n                                &mut client.socket,\n                                seq,\n                                pulse::PulseError::NotImplemented,\n                            )?;\n\n                            continue;\n                        }\n                        v => v.context(\"decoding command\")?,\n                    };\n\n                    match handle_command(client, &mut self.server_state, seq, cmd) {\n                        Ok(()) => (),\n                        Err(e) => {\n                            let _ = pulse::write_error(\n                                &mut client.socket,\n                                seq,\n                                pulse::PulseError::Internal,\n                            );\n\n                            return Err(e);\n                        }\n                    }\n                } else {\n                    handle_stream_write(client, desc, &payload)?;\n                }\n            }\n        }\n    }\n\n    fn clock_tick(&mut self) -> anyhow::Result<()> {\n        let mut done_draining = Vec::new();\n\n        let capture_ts = EPOCH.elapsed().as_millis() as u64;\n        let num_frames = CAPTURE_SAMPLE_RATE / CLOCK_RATE_HZ;\n        let encode_len = num_frames * CAPTURE_CHANNEL_COUNT;\n\n        let mut frame = match self.done_rx.try_recv() {\n            Ok(mut frame) => {\n                frame.buf.resize(encode_len as usize, 0.0);\n                frame.buf.fill(0.0);\n                Some(frame)\n            }\n            Err(crossbeam::TryRecvError::Empty) => {\n                // No one's listening, but we still need to capture audio from\n                // clients.\n                None\n            }\n            Err(crossbeam::TryRecvError::Disconnected) => return Ok(()),\n        };\n\n        for client in self.clients.values_mut() {\n            done_draining.clear();\n            for (id, stream) in client.playback_streams.iter_mut() {\n                if matches!(\n                    stream.state,\n                    StreamState::Playing | StreamState::Draining(_)\n                ) {\n                    // Track how much we read.\n                    let buffer_len = stream.buffer.len_bytes();\n\n                    // Check for underrun.\n                    let Some(frames) = stream.buffer.drain(num_frames as usize) else {\n                        error!(id, \"buffer underrun for stream\");\n                        pulse::write_command_message(\n                            &mut client.socket,\n                            u32::MAX,\n                            pulse::Command::Underflow(pulse::Underflow {\n                                channel: *id,\n                                offset: 0, // TODO\n                            }),\n                            client.protocol_version,\n                        )?;\n\n                        if stream.buffer_attr.pre_buffering > 0\n                            && matches!(stream.state, StreamState::Playing)\n                        {\n                            stream.state =\n                                StreamState::Prebuffering(stream.buffer_attr.pre_buffering as u64);\n                            // TODO: request in this case?\n                        }\n\n                        continue;\n                    };\n\n                    if let Some(ref mut frame) = frame {\n                        let mut resampled =\n                            dasp::Signal::into_interleaved_samples(frames).into_iter();\n\n                        for sample in &mut frame.buf {\n                            *sample += resampled.next().unwrap_or_default();\n                        }\n                    } else {\n                        // Discard data even if we're not encoding it.\n                        drop(frames)\n                    }\n\n                    let read_len = buffer_len - stream.buffer.len_bytes();\n                    trace!(\n                        id,\n                        read_len,\n                        buffer_len,\n                        new_len = buffer_len - read_len,\n                        \"stream read\"\n                    );\n\n                    stream.read_offset += read_len as u64;\n                    stream.played_bytes += read_len as u64;\n\n                    // If we've drained the buffer, we can drop the stream.\n                    if matches!(stream.state, StreamState::Draining(_)) && stream.buffer.is_empty()\n                    {\n                        debug!(id, \"finished draining stream\");\n                        done_draining.push(*id)\n                    }\n                }\n\n                // Request a write to fill the buffer.\n                let bytes_needed = (stream.buffer_attr.target_length as usize)\n                    .saturating_sub(stream.buffer.len_bytes() + stream.requested_bytes);\n                if matches!(stream.state, StreamState::Playing | StreamState::Corked)\n                    && bytes_needed >= stream.buffer_attr.minimum_request_length as usize\n                {\n                    trace!(id, bytes_needed, \"requesting buffer write\");\n\n                    stream.requested_bytes += bytes_needed;\n                    pulse::write_command_message(\n                        &mut client.socket,\n                        u32::MAX,\n                        pulse::Command::Request(pulse::Request {\n                            channel: *id,\n                            length: bytes_needed as u32,\n                        }),\n                        client.protocol_version,\n                    )?;\n                }\n            }\n\n            for id in done_draining.iter() {\n                let stream = client.playback_streams.remove(id).unwrap();\n                if let StreamState::Draining(drain_seq) = stream.state {\n                    pulse::write_ack_message(&mut client.socket, drain_seq)?;\n                } else {\n                    unreachable!()\n                }\n            }\n        }\n\n        // Encode the frame.\n        if let Some(mut frame) = frame {\n            frame.capture_ts = capture_ts;\n            self.unencoded_tx.send(frame)?;\n        }\n\n        Ok(())\n    }\n}\n\nfn handle_command(\n    client: &mut Client,\n    server: &mut ServerState,\n    seq: u32,\n    cmd: pulse::Command,\n) -> anyhow::Result<()> {\n    trace!(\"got command [{}]: {:#?}\", seq, cmd);\n\n    match cmd {\n        pulse::Command::Auth(pulse::AuthParams { version, .. }) => {\n            let version = std::cmp::min(version, pulse::MAX_VERSION);\n            client.protocol_version = version;\n            trace!(\"client protocol version: {}\", version);\n\n            write_reply(\n                &mut client.socket,\n                seq,\n                &pulse::AuthReply {\n                    version: pulse::MAX_VERSION,\n                    ..Default::default()\n                },\n                client.protocol_version,\n            )?;\n\n            Ok(())\n        }\n        pulse::Command::SetClientName(props) => {\n            client.props = Some(props);\n\n            write_reply(\n                &mut client.socket,\n                seq,\n                &pulse::SetClientNameReply {\n                    client_id: client.id,\n                },\n                client.protocol_version,\n            )?;\n\n            Ok(())\n        }\n        // Introspection commands.\n        pulse::Command::GetServerInfo => {\n            write_reply(\n                &mut client.socket,\n                seq,\n                &server.server_info,\n                client.protocol_version,\n            )?;\n            Ok(())\n        }\n        pulse::Command::GetClientInfo(id) => {\n            let reply = pulse::ClientInfo {\n                index: id,\n                ..Default::default()\n            };\n\n            write_reply(&mut client.socket, seq, &reply, client.protocol_version)?;\n            Ok(())\n        }\n        pulse::Command::GetClientInfoList => {\n            let reply: ClientInfoList = Vec::new(); // TODO\n            write_reply(&mut client.socket, seq, &reply, client.protocol_version)?;\n            Ok(())\n        }\n        pulse::Command::GetCardInfo(_) => {\n            write_reply(\n                &mut client.socket,\n                seq,\n                &server.cards[0],\n                client.protocol_version,\n            )?;\n\n            Ok(())\n        }\n        pulse::Command::GetCardInfoList => {\n            write_reply(\n                &mut client.socket,\n                seq,\n                &server.cards,\n                client.protocol_version,\n            )?;\n\n            Ok(())\n        }\n        pulse::Command::GetSinkInfo(_) => {\n            write_reply(\n                &mut client.socket,\n                seq,\n                &server.sinks[0],\n                client.protocol_version,\n            )?;\n\n            Ok(())\n        }\n        pulse::Command::GetSinkInfoList => {\n            write_reply(\n                &mut client.socket,\n                seq,\n                &server.sinks,\n                client.protocol_version,\n            )?;\n\n            Ok(())\n        }\n        pulse::Command::GetSinkInputInfoList => {\n            let reply: pulse::SinkInputInfoList = Vec::new();\n            write_reply(&mut client.socket, seq, &reply, client.protocol_version)?;\n            Ok(())\n        }\n        pulse::Command::GetSourceInfo(_) => {\n            pulse::write_error(&mut client.socket, seq, pulse::PulseError::NoEntity)?;\n            Ok(())\n        }\n        pulse::Command::GetSourceOutputInfoList => {\n            let reply: pulse::SourceOutputInfoList = Vec::new();\n            write_reply(&mut client.socket, seq, &reply, client.protocol_version)?;\n            Ok(())\n        }\n        pulse::Command::GetSourceInfoList => {\n            let reply: pulse::SinkInfoList = Vec::new();\n            write_reply(&mut client.socket, seq, &reply, client.protocol_version)?;\n            Ok(())\n        }\n        pulse::Command::Subscribe(_) => {\n            // We don't have any state changes that would warrant an event.\n            pulse::write_ack_message(&mut client.socket, seq)?;\n            Ok(())\n        }\n        // Playback streams.\n        pulse::Command::CreatePlaybackStream(params) => {\n            let mut sample_spec = params.sample_spec;\n            if sample_spec.format == pulse::SampleFormat::Invalid {\n                if let Some(format) =\n                    params\n                        .formats\n                        .iter()\n                        .find_map(|f| match sample_spec_from_format(f) {\n                            Ok(ss) => Some(ss),\n                            Err(e) => {\n                                warn!(\"rejecting invalid format: {:#}\", e);\n                                None\n                            }\n                        })\n                {\n                    sample_spec = format;\n                }\n            }\n\n            // Check if the client set any buffer attrs\n            // to -1, which indicates that we should\n            // set the value.\n            let mut buffer_attr = params.buffer_attr;\n            configure_buffer(&mut buffer_attr, &sample_spec);\n\n            let target_length = buffer_attr.target_length;\n\n            let flags = params.flags;\n            let mut stream = PlaybackStream {\n                state: StreamState::Prebuffering(buffer_attr.pre_buffering as u64),\n                buffer_attr,\n                buffer: PlaybackBuffer::new(sample_spec, CAPTURE_SPEC),\n                requested_bytes: target_length as usize,\n                played_bytes: 0,\n                write_offset: 0,\n                read_offset: 0,\n            };\n\n            // Returning a nonzero pre_buffering value always causes the stream\n            // to start after prebuffering is complete, even if the client\n            // requested otherwise.\n            if buffer_attr.pre_buffering == 0 || flags.start_corked {\n                stream.state = StreamState::Corked;\n            }\n\n            let channel = server.next_playback_channel_index;\n            server.next_playback_channel_index += 1;\n\n            client.playback_streams.insert(channel, stream);\n\n            let reply = pulse::CreatePlaybackStreamReply {\n                channel,\n                stream_index: 500,\n                sample_spec,\n                channel_map: params.channel_map,\n                buffer_attr,\n                requested_bytes: target_length,\n                sink_name: Some(SINK_NAME.into()),\n                format: server.default_format_info.clone(),\n                stream_latency: 10000, // TODO\n                ..Default::default()\n            };\n\n            write_reply(&mut client.socket, seq, &reply, client.protocol_version)?;\n            Ok(())\n        }\n        pulse::Command::DrainPlaybackStream(channel) => {\n            if let Some(stream) = client.playback_streams.get_mut(&channel) {\n                // The ack gets sent once we finish draining.\n                stream.state = StreamState::Draining(seq);\n            }\n\n            Ok(())\n        }\n        pulse::Command::GetPlaybackLatency(pulse::LatencyParams { channel, now, .. }) => {\n            if let Some(stream) = client.playback_streams.get_mut(&channel) {\n                let reply = pulse::PlaybackLatency {\n                    sink_usec: 10000,\n                    source_usec: 0,\n                    playing: matches!(stream.state, StreamState::Playing),\n                    local_time: now,\n                    remote_time: time::SystemTime::now(),\n                    write_offset: stream.write_offset as i64,\n                    read_offset: stream.read_offset as i64,\n                    underrun_for: u64::MAX,\n                    playing_for: stream.played_bytes,\n                };\n\n                write_reply(&mut client.socket, seq, &reply, client.protocol_version)?;\n            }\n\n            Ok(())\n        }\n        pulse::Command::UpdatePlaybackStreamProplist(_) => {\n            pulse::write_ack_message(&mut client.socket, seq)?;\n            Ok(())\n        }\n        pulse::Command::CorkPlaybackStream(params) => {\n            if let Some(stream) = client.playback_streams.get_mut(&params.channel) {\n                match stream.state {\n                    StreamState::Corked if !params.cork => {\n                        let needed = stream\n                            .buffer_attr\n                            .target_length\n                            .saturating_sub(stream.buffer.len_bytes() as u32);\n\n                        stream.state = if needed > 0 {\n                            // Request bytes to fill the buffer.\n                            trace!(\n                                id = params.channel,\n                                bytes_needed = needed,\n                                \"requesting buffer write\"\n                            );\n                            pulse::write_command_message(\n                                &mut client.socket,\n                                u32::MAX,\n                                pulse::Command::Request(pulse::Request {\n                                    channel: params.channel,\n                                    length: needed,\n                                }),\n                                client.protocol_version,\n                            )?;\n\n                            stream.requested_bytes = needed as usize;\n                            StreamState::Prebuffering(needed as u64)\n                        } else {\n                            StreamState::Playing\n                        };\n                    }\n                    StreamState::Playing if params.cork => {\n                        stream.state = StreamState::Corked;\n                    }\n                    _ => (),\n                }\n            }\n\n            pulse::write_ack_message(&mut client.socket, seq)?;\n            Ok(())\n        }\n        pulse::Command::FlushPlaybackStream(channel) => {\n            if let Some(stream) = client.playback_streams.get_mut(&channel) {\n                stream.buffer.clear();\n                stream.requested_bytes = 0;\n                stream.played_bytes = 0;\n                stream.read_offset = stream.write_offset;\n            }\n\n            pulse::write_ack_message(&mut client.socket, seq)?;\n            Ok(())\n        }\n        pulse::Command::Extension(_) => {\n            pulse::write_error(&mut client.socket, seq, pulse::PulseError::NoExtension)?;\n            Ok(())\n        }\n        _ => {\n            warn!(\"ignoring command {:?}\", cmd.tag());\n            pulse::write_error(&mut client.socket, seq, pulse::PulseError::NotImplemented)?;\n\n            Ok(())\n        }\n    }\n}\n\nfn sample_spec_from_format(f: &pulse::FormatInfo) -> anyhow::Result<pulse::SampleSpec> {\n    let format = f\n        .props\n        .get(pulse::Prop::FormatSampleFormat)\n        .context(\"missing sample format\")?;\n    let rate = f\n        .props\n        .get(pulse::Prop::FormatRate)\n        .context(\"missing sample rate\")?;\n    let channels = f\n        .props\n        .get(pulse::Prop::FormatChannels)\n        .context(\"missing channel count\")?;\n\n    let format = match sanitize_prop_str(format)? {\n        \"s16le\" => pulse::SampleFormat::S16Le,\n        \"s16be\" => pulse::SampleFormat::S16Be,\n        \"u8\" => pulse::SampleFormat::U8,\n        \"s32le\" => pulse::SampleFormat::S32Le,\n        \"s32be\" => pulse::SampleFormat::S32Be,\n        \"s24le\" => pulse::SampleFormat::S24Le,\n        \"s24be\" => pulse::SampleFormat::S24Be,\n        \"float32le\" => pulse::SampleFormat::Float32Le,\n        \"float32be\" => pulse::SampleFormat::Float32Be,\n        _ => bail!(\"unsupported sample format: {:?}\", format),\n    };\n\n    let rate = sanitize_prop_str(rate)?\n        .parse()\n        .context(format!(\"invalid sample rate: {:?}\", rate))?;\n\n    let channels = sanitize_prop_str(channels)?\n        .parse()\n        .context(format!(\"invalid channel count: {:?}\", channels))?;\n\n    Ok(pulse::SampleSpec {\n        format,\n        sample_rate: rate,\n        channels,\n    })\n}\n\nfn sanitize_prop_str(b: &[u8]) -> anyhow::Result<&str> {\n    let s = CStr::from_bytes_with_nul(b).context(\"invalid string\")?;\n    let s = s.to_str().context(\"invalid utf-8\")?;\n    Ok(s.trim_matches('\"'))\n}\n\nfn handle_stream_write(\n    client: &mut Client,\n    desc: pulse::Descriptor,\n    payload: &[u8],\n) -> anyhow::Result<()> {\n    let stream = match client.playback_streams.get_mut(&desc.channel) {\n        Some(v) => v,\n        None => {\n            bail!(\"invalid channel\")\n        }\n    };\n\n    let buffer_len = stream.buffer.len_bytes();\n    trace!(\n        id = desc.channel,\n        ?stream.state,\n        write_len = desc.length,\n        current_len = buffer_len,\n        future_len = buffer_len + desc.length as usize,\n        \"got stream write\",\n    );\n\n    // We don't handle seeks yet.\n    if desc.offset != 0 {\n        bail!(\"seeking not supported\")\n    }\n\n    // Check for overrun.\n    let remaining = (stream.buffer_attr.max_length as usize).saturating_sub(buffer_len);\n    let overflow = payload.len().saturating_sub(remaining);\n    let payload = if overflow > 0 {\n        pulse::write_command_message(\n            &mut client.socket,\n            u32::MAX,\n            pulse::Command::Overflow(overflow as u32),\n            client.protocol_version,\n        )?;\n\n        &payload[..remaining as usize]\n    } else {\n        payload\n    };\n\n    if let StreamState::Prebuffering(n) = stream.state {\n        let needed = n.saturating_sub(payload.len() as u64);\n        if needed > 0 {\n            stream.state = StreamState::Prebuffering(needed)\n        } else {\n            debug!(\"starting playback for stream {}\", desc.channel);\n            pulse::write_command_message(\n                &mut client.socket,\n                u32::MAX,\n                pulse::Command::Started(desc.channel),\n                client.protocol_version,\n            )?;\n\n            stream.state = StreamState::Playing\n        }\n    }\n\n    // Read the data into the buffer.\n    stream.buffer.write(payload);\n    stream.requested_bytes = stream.requested_bytes.saturating_sub(payload.len());\n    stream.write_offset += payload.len() as u64;\n\n    Ok(())\n}\n\nfn configure_buffer(attr: &mut pulse::stream::BufferAttr, spec: &pulse::SampleSpec) {\n    let sample_size = spec.format.bytes_per_sample();\n    let frame_size = spec.channels as usize * sample_size;\n    let len_10ms = (frame_size * spec.sample_rate as usize / 100) as u32;\n\n    // Max length is min(200ms, client value).\n    if attr.max_length == u32::MAX {\n        attr.max_length = len_10ms * 20;\n    } else {\n        attr.max_length = attr\n            .max_length\n            .next_multiple_of(frame_size as u32)\n            .min(len_10ms * 100);\n    }\n\n    // Minimum request length is max(5ms, client value).\n    if attr.minimum_request_length == u32::MAX {\n        attr.minimum_request_length = (len_10ms / 2).next_multiple_of(frame_size as u32);\n    } else {\n        attr.minimum_request_length = attr\n            .minimum_request_length\n            .next_multiple_of(frame_size as u32)\n            .max(len_10ms / 2);\n    }\n\n    // Target length should be a multiple of the minimum request length, and by\n    // default 20ms of audio.\n    if attr.target_length == u32::MAX {\n        attr.target_length = (len_10ms * 2)\n            .next_multiple_of(attr.minimum_request_length)\n            .min(attr.max_length);\n    } else {\n        attr.target_length = attr\n            .target_length\n            .next_multiple_of(attr.minimum_request_length)\n            .max(len_10ms)\n            .min(attr.max_length);\n\n        if attr.target_length < (attr.minimum_request_length * 2) {\n            attr.target_length = attr.minimum_request_length * 2;\n        }\n    }\n\n    // Prebuffering shouldn't be more than the target length.\n    if attr.pre_buffering == u32::MAX {\n        attr.pre_buffering = attr.target_length;\n    } else {\n        attr.pre_buffering = attr\n            .pre_buffering\n            .next_multiple_of(attr.minimum_request_length)\n            .min(attr.target_length);\n    }\n}\n\nfn write_reply<T: pulse::CommandReply + std::fmt::Debug>(\n    socket: &mut mio::net::UnixStream,\n    seq: u32,\n    reply: &T,\n    version: u16,\n) -> anyhow::Result<()> {\n    trace!(\"sending reply [{}] ({}): {:#?}\", seq, version, reply);\n    pulse::write_reply_message(socket, seq, reply, version)?;\n\n    Ok(())\n}\n"
  },
  {
    "path": "mm-server/src/session/audio.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{path::Path, sync::Arc};\n\nuse crate::{session::SessionHandle, waking_sender::WakingSender};\n\nmod buffer;\nmod pulse;\n\nuse anyhow::Context as _;\nuse bytes::BytesMut;\nuse crossbeam_channel as crossbeam;\nuse parking_lot::Mutex;\nuse pulse::PulseServer;\nuse tracing::error;\n\nuse super::AudioStreamParams;\n\nstruct EncodeFrame {\n    buf: Vec<f32>,\n    capture_ts: u64,\n}\n\nstruct Encoder {\n    thread_handle: Option<std::thread::JoinHandle<anyhow::Result<()>>>,\n    close_tx: crossbeam::Sender<()>,\n}\n\nimpl Drop for Encoder {\n    fn drop(&mut self) {\n        if let Some(handle) = self.thread_handle.take() {\n            let _ = self.close_tx.send(());\n\n            match handle.join() {\n                Ok(Ok(())) => (),\n                Ok(Err(e)) => error!(\"audio encoder thread died: {}\", e),\n                Err(_) => error!(\"audio encoder thread panicked\"),\n            }\n        }\n    }\n}\n\npub struct EncodePipeline {\n    server_thread_handle: Option<std::thread::JoinHandle<anyhow::Result<()>>>,\n    server_close_tx: WakingSender<()>,\n\n    compositor: SessionHandle,\n\n    encoder: Option<Encoder>,\n    done_tx: crossbeam::Sender<EncodeFrame>,\n    unencoded_rx: Arc<Mutex<crossbeam::Receiver<EncodeFrame>>>,\n}\n\nimpl EncodePipeline {\n    pub fn new(\n        compositor: SessionHandle,\n        xdg_runtime_dir: &Path,\n    ) -> anyhow::Result<EncodePipeline> {\n        // In this location, the server gets picked up without setting PULSE_SERVER\n        // explicitly.\n        std::fs::create_dir_all(Path::join(xdg_runtime_dir, \"pulse\"))?;\n        let socket_name = Path::join(xdg_runtime_dir, \"pulse/native\");\n\n        // The pulse server reads empty frames from the done channel, fills\n        // them, and sends them back over the undecoded channel.\n        let (unencoded_tx, unencoded_rx) = crossbeam::unbounded();\n        let (done_tx, done_rx) = crossbeam::unbounded();\n\n        let (mut server, close_tx) = PulseServer::new(&socket_name, unencoded_tx, done_rx)\n            .context(\"creating PulseAudio server\")?;\n\n        let server_handle = std::thread::Builder::new()\n            .name(format!(\"pulse server ({})\", socket_name.to_string_lossy()))\n            .spawn(move || server.run())?;\n\n        Ok(Self {\n            server_thread_handle: Some(server_handle),\n            server_close_tx: close_tx,\n\n            compositor,\n\n            encoder: None,\n            done_tx,\n            // We wrap the receiver in a mutex to ensure that only one encoder\n            // is interacting with the pulse server at a time (and because it's\n            // not Clone).\n            unencoded_rx: Arc::new(Mutex::new(unencoded_rx)),\n        })\n    }\n\n    pub fn stop_stream(&mut self) {\n        self.encoder = None;\n    }\n\n    pub fn restart_stream(&mut self, params: AudioStreamParams) -> anyhow::Result<()> {\n        // TODO: pass sample rate on input frames, do resampling on the pulse side.\n        // For now we only support 48khz stereo anyway.\n        assert_eq!(params.sample_rate, pulse::CAPTURE_SAMPLE_RATE);\n        assert_eq!(params.channels, pulse::CAPTURE_CHANNEL_COUNT);\n\n        assert!(self.encoder.is_none());\n        let done_tx = self.done_tx.clone();\n        let unencoded_rx = self.unencoded_rx.clone();\n\n        let (close_tx, close_rx) = crossbeam::unbounded();\n\n        let ch = match params.channels {\n            1 => opus::Channels::Mono,\n            2 => opus::Channels::Stereo,\n            _ => panic!(\"unsupported number of channels: {}\", params.channels),\n        };\n\n        let mut encoder = opus::Encoder::new(params.sample_rate, ch, opus::Application::LowDelay)\n            .context(\"failed to create opus encoder\")?;\n\n        let compositor = self.compositor.clone();\n        let thread_handle = std::thread::Builder::new()\n            .name(\"audio encode\".into())\n            .spawn(move || {\n                // Lock the receiver until the encoder thread exits.\n                let unencoded_rx = unencoded_rx.lock();\n\n                let mut signal_restart = true;\n\n                let mut buf = BytesMut::new();\n\n                let mut in_flight = 3;\n                for _ in 0..in_flight {\n                    if done_tx\n                        .send(EncodeFrame {\n                            buf: Vec::new(),\n                            capture_ts: 0,\n                        })\n                        .is_err()\n                    {\n                        return Ok(());\n                    }\n                }\n\n                let mut closing = false;\n                while in_flight > 0 {\n                    if let Ok(()) = close_rx.try_recv() {\n                        closing = true;\n                    }\n\n                    let frame = match unencoded_rx.recv() {\n                        Ok(frame) => frame,\n                        Err(_) => return Ok(()), // Pulse server hung up.\n                    };\n\n                    buf.resize(frame.buf.len(), 0);\n\n                    let len = encoder.encode_float(&frame.buf, &mut buf)?;\n                    compositor.dispatch_audio_frame(\n                        frame.capture_ts,\n                        buf.split_to(len).freeze(),\n                        signal_restart,\n                    );\n                    signal_restart = false;\n\n                    if !closing {\n                        match done_tx.send(frame) {\n                            Ok(()) => (),\n                            Err(_) => return Ok(()), // Pulse server hung up.\n                        }\n                    } else {\n                        in_flight -= 1;\n                    }\n                }\n\n                Ok(())\n            })?;\n\n        self.encoder = Some(Encoder {\n            thread_handle: Some(thread_handle),\n            close_tx,\n        });\n\n        Ok(())\n    }\n}\n\nimpl Drop for EncodePipeline {\n    fn drop(&mut self) {\n        let _ = self.server_close_tx.send(());\n\n        if let Some(handle) = self.server_thread_handle.take() {\n            match handle.join() {\n                Ok(Ok(())) => (),\n                Ok(Err(e)) => error!(\"pulseaudio server error: {}\", e),\n                Err(_) => error!(\"pulseaudio server panicked\"),\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/buffers/modifiers.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{os::fd::AsFd as _, sync::Arc};\n\nuse ash::vk;\nuse cstr::cstr;\nuse drm_fourcc::{DrmFormat, DrmFourcc};\nuse tracing::{debug, trace};\nuse wayland_protocols::wp::linux_dmabuf::zv1::server::zwp_linux_dmabuf_feedback_v1;\n\nuse crate::{\n    session::compositor::{sealed::SealedFile, Compositor},\n    vulkan::VkContext,\n};\n\n// Note that Mesa will throw out a format if either the opaque or alpha version\n// is missing. For example, Argb8888 requires Xrgb8888, and vice versa.\n/// (Fourcc, VkFormat, alpha, bpp)\npub const SUPPORTED_DRM_FORMATS: &[(DrmFourcc, vk::Format, bool, usize)] = &[\n    (DrmFourcc::Argb8888, vk::Format::B8G8R8A8_UNORM, false, 4),\n    (DrmFourcc::Xrgb8888, vk::Format::B8G8R8A8_UNORM, true, 4),\n    (DrmFourcc::Abgr8888, vk::Format::R8G8B8A8_UNORM, false, 4),\n    (DrmFourcc::Xbgr8888, vk::Format::R8G8B8A8_UNORM, true, 4),\n    (\n        DrmFourcc::Argb16161616f,\n        vk::Format::R16G16B16A16_SFLOAT,\n        false,\n        8,\n    ),\n    (\n        DrmFourcc::Xrgb16161616f,\n        vk::Format::R16G16B16A16_SFLOAT,\n        true,\n        8,\n    ),\n    (\n        DrmFourcc::Abgr16161616f,\n        vk::Format::R16G16B16A16_SFLOAT,\n        false,\n        8,\n    ),\n    (\n        DrmFourcc::Xbgr16161616f,\n        vk::Format::R16G16B16A16_SFLOAT,\n        true,\n        8,\n    ),\n];\n\npub fn fourcc_to_vk(fourcc: DrmFourcc) -> Option<(vk::Format, bool)> {\n    SUPPORTED_DRM_FORMATS\n        .iter()\n        .find(|(f, _, _, _)| *f == fourcc)\n        .map(|(_, vk, ignore_alpha, _)| (*vk, *ignore_alpha))\n}\n\npub fn fourcc_bpp(fourcc: DrmFourcc) -> Option<usize> {\n    SUPPORTED_DRM_FORMATS\n        .iter()\n        .find(|(f, _, _, _)| *f == fourcc)\n        .map(|(_, _, _, bpp)| *bpp)\n}\n\npub struct CachedDmabufFeedback {\n    drm_node: u64,\n    formats: Vec<DrmFormat>,\n    table: SealedFile,\n}\n\nimpl CachedDmabufFeedback {\n    pub fn contains(&self, modifier: u64) -> bool {\n        self.formats\n            .iter()\n            .any(|format| format.modifier == modifier)\n    }\n\n    pub fn new(vk: Arc<VkContext>) -> anyhow::Result<Self> {\n        let formats = unsafe {\n            SUPPORTED_DRM_FORMATS\n                .iter()\n                .flat_map(|(fourcc, format, _, _)| {\n                    let mods =\n                        query_drm_format_modifiers(&vk.instance, vk.device_info.pdevice, *format);\n\n                    mods.into_iter().filter_map(|props| {\n                        if props.drm_format_modifier_plane_count == 1 {\n                            let modifier = props.drm_format_modifier.into();\n                            assert!(verify_dmabuf_support(\n                                vk.clone(),\n                                *format,\n                                modifier,\n                                vk::ImageUsageFlags::SAMPLED,\n                            ));\n\n                            Some(DrmFormat {\n                                code: *fourcc,\n                                modifier,\n                            })\n                        } else {\n                            None\n                        }\n                    })\n                })\n                .collect::<Vec<_>>()\n        };\n\n        let mut table = vec![0_u8; 16 * formats.len()];\n        for (idx, format) in formats.iter().enumerate() {\n            let off = idx * 16;\n            let modifier: u64 = format.modifier.into();\n            let code = format.code as u32;\n            trace!(idx, code = ?format.code, code, modifier, \"adding format to table\");\n\n            table[off..off + 4].copy_from_slice(&code.to_ne_bytes());\n            table[off + 8..off + 16].copy_from_slice(&modifier.to_ne_bytes());\n        }\n\n        Ok(Self {\n            formats,\n            drm_node: vk.device_info.drm_node,\n            table: SealedFile::new(cstr!(\"dmabuf_formats\"), &table)?,\n        })\n    }\n}\n\nimpl Compositor {\n    pub fn emit_dmabuf_feedback(\n        &self,\n        feedback: &zwp_linux_dmabuf_feedback_v1::ZwpLinuxDmabufFeedbackV1,\n    ) {\n        let fb = &self.cached_dmabuf_feedback;\n        let dev = fb.drm_node.to_ne_bytes().to_vec();\n        feedback.main_device(dev.clone());\n        feedback.format_table(fb.table.as_fd(), fb.table.size() as u32);\n        feedback.tranche_target_device(dev.clone());\n        feedback.tranche_flags(zwp_linux_dmabuf_feedback_v1::TrancheFlags::empty());\n\n        let indices = (0..(fb.formats.len() as u16))\n            .flat_map(|i| i.to_ne_bytes())\n            .collect::<Vec<_>>();\n        feedback.tranche_formats(indices);\n        feedback.tranche_done();\n        feedback.done();\n    }\n}\n\nunsafe fn query_drm_format_modifiers(\n    instance: &ash::Instance,\n    device: vk::PhysicalDevice,\n    format: vk::Format,\n) -> Vec<vk::DrmFormatModifierPropertiesEXT> {\n    let count = {\n        let mut modifiers = vk::DrmFormatModifierPropertiesListEXT::default();\n        let mut format_props = vk::FormatProperties2::default().push_next(&mut modifiers);\n\n        instance.get_physical_device_format_properties2(device, format, &mut format_props);\n        modifiers.drm_format_modifier_count\n    };\n\n    let mut res = vec![vk::DrmFormatModifierPropertiesEXT::default(); count as usize];\n    let mut modifiers =\n        vk::DrmFormatModifierPropertiesListEXT::default().drm_format_modifier_properties(&mut res);\n    let mut format_props = vk::FormatProperties2::default().push_next(&mut modifiers);\n    instance.get_physical_device_format_properties2(device, format, &mut format_props);\n\n    res\n}\n\npub unsafe fn verify_dmabuf_support(\n    vk: Arc<VkContext>,\n    format: vk::Format,\n    modifier: drm_fourcc::DrmModifier,\n    usage: vk::ImageUsageFlags,\n) -> bool {\n    let mut drm_props = vk::ExternalImageFormatProperties::default();\n    let mut props = vk::ImageFormatProperties2::default().push_next(&mut drm_props);\n\n    let mut modifier_info = vk::PhysicalDeviceImageDrmFormatModifierInfoEXT::default()\n        .drm_format_modifier(modifier.into());\n\n    let mut external_format_info = vk::PhysicalDeviceExternalImageFormatInfo::default()\n        .handle_type(vk::ExternalMemoryHandleTypeFlags::DMA_BUF_EXT);\n\n    let format_info = vk::PhysicalDeviceImageFormatInfo2::default()\n        .format(format)\n        .ty(vk::ImageType::TYPE_2D)\n        .usage(usage)\n        .tiling(vk::ImageTiling::DRM_FORMAT_MODIFIER_EXT)\n        .push_next(&mut external_format_info)\n        .push_next(&mut modifier_info);\n\n    match vk.instance.get_physical_device_image_format_properties2(\n        vk.device_info.pdevice,\n        &format_info,\n        &mut props,\n    ) {\n        Ok(_) => (),\n        Err(_) => {\n            debug!(?format, ?modifier, \"format not supported for dma import\");\n            return false;\n        }\n    }\n\n    drm_props\n        .external_memory_properties\n        .compatible_handle_types\n        .contains(vk::ExternalMemoryHandleTypeFlags::DMA_BUF_EXT)\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/buffers/syncobj_timeline.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    io,\n    os::fd::{AsFd as _, OwnedFd},\n    sync::Arc,\n};\n\nuse ash::vk;\nuse drm::control::{syncobj, Device as _};\nuse tracing::{instrument, trace};\nuse wayland_protocols::wp::linux_drm_syncobj::v1::server::wp_linux_drm_syncobj_timeline_v1;\n\nuse crate::vulkan::VkContext;\n\nslotmap::new_key_type! { pub struct SyncobjTimelineKey; }\n\npub struct SyncobjTimeline(Arc<TimelineHandle>);\n\nstruct TimelineHandle {\n    pub _wp_syncobj_timeline: wp_linux_drm_syncobj_timeline_v1::WpLinuxDrmSyncobjTimelineV1,\n    handle: syncobj::Handle,\n    vk: Arc<VkContext>,\n}\n\nimpl Drop for TimelineHandle {\n    fn drop(&mut self) {\n        let _ = self.vk.drm_device.destroy_syncobj(self.handle);\n    }\n}\n\n#[derive(Clone)]\npub struct SyncobjTimelinePoint {\n    pub value: u64,\n    handle: Arc<TimelineHandle>,\n}\n\nimpl SyncobjTimelinePoint {\n    pub fn signal(&self) -> io::Result<()> {\n        trace!(handle = ?self.handle.handle, value = self.value, \"signaling timeline point\");\n\n        self.handle\n            .vk\n            .drm_device\n            .syncobj_timeline_signal(&[self.handle.handle], &[self.value])\n    }\n\n    #[instrument(skip_all)]\n    pub fn import_as_semaphore(&self, semaphore: vk::Semaphore) -> anyhow::Result<()> {\n        trace!(\n            value = self.value,\n            ?semaphore,\n            \"importing timeline point as semaphore\"\n        );\n\n        let device = &self.handle.vk.drm_device;\n\n        // First, we export a sync file by creating a new syncobj and copying\n        // the timeline point to 0 on the new syncobj.\n        let syncobj = device.create_syncobj(false)?;\n        scopeguard::defer! {\n            self.handle.vk\n                .drm_device\n                .destroy_syncobj(syncobj)\n                .expect(\"failed to destroy syncobj\")\n        };\n\n        device.syncobj_timeline_transfer(self.handle.handle, syncobj, self.value, 0)?;\n        let sync_fd = device.syncobj_to_fd(syncobj, true)?;\n\n        // Then we can import it into a vulkan semaphore.\n        unsafe { super::import_sync_file_as_semaphore(self.handle.vk.clone(), sync_fd, semaphore) }\n    }\n}\n\nimpl SyncobjTimeline {\n    pub fn import(\n        vk: Arc<VkContext>,\n        wp_syncobj_timeline: wp_linux_drm_syncobj_timeline_v1::WpLinuxDrmSyncobjTimelineV1,\n        fd: OwnedFd,\n    ) -> io::Result<Self> {\n        let handle = vk.drm_device.fd_to_syncobj(fd.as_fd(), false)?;\n\n        Ok(Self(Arc::new(TimelineHandle {\n            _wp_syncobj_timeline: wp_syncobj_timeline,\n            handle,\n            vk,\n        })))\n    }\n\n    pub fn new_timeline_point(&self, value: u64) -> SyncobjTimelinePoint {\n        SyncobjTimelinePoint {\n            value,\n            handle: self.0.clone(),\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/buffers.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nmod modifiers;\nmod syncobj_timeline;\n\nuse std::{\n    collections::BTreeSet,\n    os::fd::{AsFd, AsRawFd, FromRawFd as _, IntoRawFd as _, OwnedFd},\n    sync::{Arc, RwLock},\n};\n\nuse anyhow::{bail, Context as _};\nuse ash::vk;\nuse drm_fourcc::DrmModifier;\npub use modifiers::*;\npub use syncobj_timeline::*;\nuse tracing::{instrument, trace};\nuse wayland_server::{protocol::wl_buffer, Resource as _};\n\nuse crate::{\n    session::compositor::{shm::Pool, Compositor},\n    vulkan::{create_image_view, select_memory_type, VkContext, VkHostBuffer, VkImage},\n};\n\nslotmap::new_key_type! { pub struct BufferKey; }\n\npub struct Buffer {\n    pub wl_buffer: wl_buffer::WlBuffer,\n    pub backing: BufferBacking,\n\n    /// Next time we release this buffer, we should destroy it as well.\n    pub needs_destruction: bool,\n}\n\nimpl Buffer {\n    pub fn dimensions(&self) -> glam::UVec2 {\n        match self.backing {\n            BufferBacking::Shm { format, .. } => (format.width, format.height).into(),\n            BufferBacking::Dmabuf { format, .. } => (format.width, format.height).into(),\n        }\n    }\n}\n\npub enum BufferBacking {\n    Shm {\n        format: PlaneMetadata,\n        pool: Arc<RwLock<Pool>>,\n        staging_buffer: VkHostBuffer,\n        image: VkImage,\n\n        /// Indicates that staging_buffer has been written to and needs to\n        /// be uploaded to the image.\n        dirty: bool,\n    },\n    Dmabuf {\n        format: PlaneMetadata,\n        fd: OwnedFd,\n        image: VkImage,\n    },\n}\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\npub struct PlaneMetadata {\n    pub format: drm_fourcc::DrmFourcc,\n    pub bpp: usize,\n    pub width: u32,\n    pub height: u32,\n    pub stride: u32,\n    pub offset: u32,\n}\n\nimpl Compositor {\n    #[instrument(skip_all)]\n    pub fn release_buffers(&mut self) -> anyhow::Result<()> {\n        // Check if any content updates have finished.\n        let mut still_in_flight = Vec::new();\n        for content in self.in_flight_buffers.drain(..) {\n            if let Some(tp) = &content.tp_done {\n                if unsafe { !tp.poll()? } {\n                    // The frame using this content is still in-progress.\n                    still_in_flight.push(content);\n                    continue;\n                }\n            }\n\n            if content.needs_release {\n                let buffer = self\n                    .buffers\n                    .get(content.buffer)\n                    .expect(\"buffer has no entry\");\n\n                trace!(\n                    wl_buffer = buffer.wl_buffer.id().protocol_id(),\n                    \"explicitly releasing buffer\"\n                );\n\n                buffer.wl_buffer.release();\n            }\n\n            if let Some((_, release)) = content.explicit_sync {\n                release.signal()?;\n            }\n\n            // If we didn't move the presentation feedback into a separate queue,\n            // that means we didn't use the content update and we should relate\n            // that to the client.\n            if let Some(feedback) = &content.wp_presentation_feedback {\n                feedback.discarded();\n            }\n        }\n\n        self.in_flight_buffers = still_in_flight;\n\n        // A buffer is in use if it's either part of an in-flight frame, or if\n        // we're holding on to it because the client hasn't committed a new one\n        // yet, and we may need to display it again.\n        let used_buffers: BTreeSet<BufferKey> = self\n            .surfaces\n            .values()\n            .flat_map(|s| &s.content)\n            .chain(self.in_flight_buffers.iter())\n            .map(|c| c.buffer)\n            .collect();\n\n        self.buffers.retain(|id, buffer| {\n            if !buffer.needs_destruction || used_buffers.contains(&id) {\n                true\n            } else {\n                assert!(!buffer.wl_buffer.is_alive());\n                trace!(\n                    wl_buffer = buffer.wl_buffer.id().protocol_id(),\n                    \"destroying buffer\"\n                );\n\n                false\n            }\n        });\n\n        Ok(())\n    }\n}\n\n#[instrument(skip_all)]\npub fn import_shm_buffer(\n    vk: Arc<VkContext>,\n    wl_buffer: wl_buffer::WlBuffer,\n    pool: Arc<RwLock<Pool>>,\n    format: PlaneMetadata,\n) -> anyhow::Result<Buffer> {\n    let (vk_format, ignore_alpha) = match format.format {\n        drm_fourcc::DrmFourcc::Argb8888 => (vk::Format::B8G8R8A8_UNORM, false),\n        drm_fourcc::DrmFourcc::Xrgb8888 => (vk::Format::B8G8R8A8_UNORM, true),\n        _ => unreachable!(),\n    };\n\n    let len = format.stride * format.height;\n    trace!(?format, len, \"importing shm buffer\");\n\n    let staging_buffer = VkHostBuffer::new(\n        vk.clone(),\n        vk.device_info.host_visible_mem_type_index,\n        vk::BufferUsageFlags::TRANSFER_SRC,\n        len as usize,\n    )?;\n\n    let image = VkImage::new(\n        vk.clone(),\n        vk_format,\n        ignore_alpha,\n        format.width,\n        format.height,\n        vk::ImageUsageFlags::TRANSFER_DST | vk::ImageUsageFlags::SAMPLED,\n        vk::SharingMode::EXCLUSIVE,\n        vk::ImageCreateFlags::empty(),\n    )?;\n\n    Ok(Buffer {\n        wl_buffer,\n        backing: BufferBacking::Shm {\n            pool,\n            staging_buffer,\n            image,\n            format,\n            dirty: true,\n        },\n        needs_destruction: false,\n    })\n}\n\n#[instrument(skip_all)]\npub fn import_dmabuf_buffer(\n    vk: Arc<VkContext>,\n    wl_buffer: wl_buffer::WlBuffer,\n    format: PlaneMetadata,\n    modifier: DrmModifier,\n    fd: OwnedFd,\n) -> anyhow::Result<Buffer> {\n    let PlaneMetadata {\n        format: fourcc,\n        width,\n        height,\n        stride,\n        offset,\n        ..\n    } = format;\n\n    let (vk_format, ignore_alpha) = match modifiers::fourcc_to_vk(fourcc) {\n        Some(format) => format,\n        None => bail!(\"unsupported dmabuf format: {:?}\", format),\n    };\n\n    unsafe {\n        if !modifiers::verify_dmabuf_support(\n            vk.clone(),\n            vk_format,\n            modifier,\n            vk::ImageUsageFlags::SAMPLED,\n        ) {\n            bail!(\"unsupported dmabuf format: {:?}\", vk_format);\n        }\n    }\n\n    trace!(\n        ?fourcc,\n        ?vk_format,\n        width,\n        height,\n        offset,\n        stride,\n        fd = fd.as_fd().as_raw_fd(),\n        \"importing dmabuf texture\"\n    );\n\n    // Vulkan wants to own the file descriptor, so we create a dup'd one just for\n    // the driver.\n    let vk_fd = fd.as_fd().try_clone_to_owned()?;\n\n    let image = {\n        let plane_layouts = [vk::SubresourceLayout {\n            offset: offset as u64,\n            size: 0, // Must be zero, according to the spec.\n            row_pitch: stride as u64,\n            ..Default::default()\n        }];\n\n        let mut format_modifier_info = vk::ImageDrmFormatModifierExplicitCreateInfoEXT::default()\n            .drm_format_modifier(modifier.into())\n            .plane_layouts(&plane_layouts);\n\n        let mut external_memory_info = vk::ExternalMemoryImageCreateInfo::default()\n            .handle_types(vk::ExternalMemoryHandleTypeFlags::DMA_BUF_EXT);\n\n        let create_info = vk::ImageCreateInfo::default()\n            .image_type(vk::ImageType::TYPE_2D)\n            .format(vk_format)\n            .extent(vk::Extent3D {\n                width,\n                height,\n                depth: 1,\n            })\n            .mip_levels(1)\n            .array_layers(1)\n            .tiling(vk::ImageTiling::DRM_FORMAT_MODIFIER_EXT)\n            .samples(vk::SampleCountFlags::TYPE_1)\n            .usage(vk::ImageUsageFlags::SAMPLED)\n            .sharing_mode(vk::SharingMode::EXCLUSIVE)\n            .initial_layout(vk::ImageLayout::UNDEFINED)\n            .push_next(&mut external_memory_info)\n            .push_next(&mut format_modifier_info);\n\n        unsafe { vk.device.create_image(&create_info, None).unwrap() }\n    };\n\n    let memory = {\n        let mut fd_props = vk::MemoryFdPropertiesKHR::default();\n\n        unsafe {\n            vk.external_memory_api.get_memory_fd_properties(\n                vk::ExternalMemoryHandleTypeFlags::DMA_BUF_EXT,\n                vk_fd.as_raw_fd(),\n                &mut fd_props,\n            )?;\n        };\n\n        let image_memory_req = unsafe { vk.device.get_image_memory_requirements(image) };\n        let memory_type_index = select_memory_type(\n            &vk.device_info.memory_props,\n            vk::MemoryPropertyFlags::empty(),\n            Some(image_memory_req.memory_type_bits & fd_props.memory_type_bits),\n        );\n\n        trace!(\n            ?fd_props,\n            ?memory_type_index,\n            ?image_memory_req,\n            \"memory import for dmabuf\"\n        );\n\n        let mut external_mem_info = vk::ImportMemoryFdInfoKHR::default()\n            .handle_type(vk::ExternalMemoryHandleTypeFlags::DMA_BUF_EXT)\n            .fd(vk_fd.into_raw_fd()); // Vulkan owns the fd now.\n\n        // Technically we can query whether this is required, but it doesn't\n        // hurt anyways. It seems to be only required on some NVIDIA cards.\n        let mut dedicated_memory_info = vk::MemoryDedicatedAllocateInfo::default().image(image);\n\n        let image_allocate_info = vk::MemoryAllocateInfo::default()\n            .allocation_size(image_memory_req.size)\n            .push_next(&mut external_mem_info)\n            .push_next(&mut dedicated_memory_info);\n\n        unsafe { vk.device.allocate_memory(&image_allocate_info, None)? }\n    };\n\n    unsafe {\n        vk.device.bind_image_memory(image, memory, 0)?;\n    }\n\n    let view = unsafe { create_image_view(&vk.device, image, vk_format, ignore_alpha)? };\n    let image = VkImage::wrap(vk.clone(), image, view, memory, vk_format, width, height);\n\n    Ok(Buffer {\n        wl_buffer,\n        backing: BufferBacking::Dmabuf { format, fd, image },\n        needs_destruction: false,\n    })\n}\n\npub fn validate_buffer_parameters(\n    offset: i32,\n    width: i32,\n    height: i32,\n    stride: i32,\n    bpp: usize,\n) -> Result<(), String> {\n    if offset < 0 {\n        return Err(\"Negative offset.\".to_string());\n    }\n\n    if width <= 0 || height <= 0 {\n        return Err(\"Invalid height or width.\".to_string());\n    }\n\n    if stride <= 0\n        || stride.checked_div(bpp as i32).unwrap_or(0) < width\n        || stride.checked_mul(height).is_none()\n    {\n        return Err(\"Invalid stride.\".to_string());\n    }\n\n    if let Some(size) = stride.checked_mul(height) {\n        if offset.checked_add(size).is_none() {\n            return Err(\"Invalid offset.\".to_string());\n        }\n    } else {\n        return Err(\"Invalid total size.\".to_string());\n    }\n\n    Ok(())\n}\n\n#[allow(dead_code)]\nmod ioctl {\n    use std::{ffi::c_void, os::fd::RawFd};\n\n    use rustix::{\n        io::Errno,\n        ioctl::{opcode, Opcode},\n    };\n\n    pub(super) const DMA_BUF_SYNC_READ: u32 = 1 << 0;\n    pub(super) const DMA_BUF_SYNC_WRITE: u32 = 1 << 1;\n\n    #[repr(C)]\n    #[allow(non_camel_case_types)]\n    struct dma_buf_export_sync_file {\n        pub flags: u32,\n        pub fd: i32,\n    }\n\n    #[repr(C)]\n    #[allow(non_camel_case_types)]\n    struct dma_buf_import_sync_file {\n        pub flags: u32,\n        pub fd: i32,\n    }\n\n    pub(super) struct ExportSyncFile(dma_buf_export_sync_file);\n\n    impl ExportSyncFile {\n        pub(super) fn new(flags: u32) -> Self {\n            Self(dma_buf_export_sync_file { flags, fd: -1 })\n        }\n    }\n\n    pub(super) struct ImportSyncFile(dma_buf_import_sync_file);\n\n    impl ImportSyncFile {\n        pub(super) fn new(fd: RawFd, flags: u32) -> Self {\n            Self(dma_buf_import_sync_file { flags, fd })\n        }\n    }\n\n    unsafe impl rustix::ioctl::Ioctl for ExportSyncFile {\n        type Output = RawFd;\n\n        const IS_MUTATING: bool = true;\n\n        fn opcode(&self) -> Opcode {\n            opcode::read_write::<dma_buf_export_sync_file>(b'b', 2)\n        }\n\n        fn as_ptr(&mut self) -> *mut c_void {\n            &mut self.0 as *mut dma_buf_export_sync_file as _\n        }\n\n        unsafe fn output_from_ptr(\n            out: rustix::ioctl::IoctlOutput,\n            extract_output: *mut c_void,\n        ) -> rustix::io::Result<Self::Output> {\n            let res: &mut dma_buf_export_sync_file = &mut *(extract_output as *mut _);\n            if out != 0 {\n                Err(rustix::io::Errno::from_raw_os_error(out))\n            } else if res.fd <= 0 {\n                Err(Errno::INVAL)\n            } else {\n                Ok(res.fd)\n            }\n        }\n    }\n\n    unsafe impl rustix::ioctl::Ioctl for ImportSyncFile {\n        type Output = ();\n\n        const IS_MUTATING: bool = true;\n\n        fn opcode(&self) -> Opcode {\n            opcode::write::<dma_buf_import_sync_file>(b'b', 3)\n        }\n\n        fn as_ptr(&mut self) -> *mut c_void {\n            &mut self.0 as *mut dma_buf_import_sync_file as _\n        }\n\n        unsafe fn output_from_ptr(\n            out: rustix::ioctl::IoctlOutput,\n            _: *mut c_void,\n        ) -> rustix::io::Result<Self::Output> {\n            if out == 0 {\n                Ok(())\n            } else {\n                Err(Errno::from_raw_os_error(out))\n            }\n        }\n    }\n}\n\n/// Retrieves a dmabuf fence, and uses it to set a semaphore. The semaphore will\n/// be triggered when the dmabuf texture is safe to read. Note that the spec\n/// insists that the semaphore must be waited on once set this way.\n#[instrument(skip_all)]\npub fn import_dmabuf_fence_as_semaphore(\n    vk: Arc<VkContext>,\n    semaphore: vk::Semaphore,\n    fd: impl AsFd,\n) -> anyhow::Result<()> {\n    let fd = fd.as_fd();\n    let sync_fd = unsafe { export_sync_file(fd, ioctl::DMA_BUF_SYNC_READ)? };\n\n    unsafe { import_sync_file_as_semaphore(vk, sync_fd, semaphore) }\n}\n\n#[instrument(skip_all)]\npub unsafe fn import_sync_file_as_semaphore(\n    vk: Arc<VkContext>,\n    fd: OwnedFd,\n    semaphore: vk::Semaphore,\n) -> anyhow::Result<()> {\n    let import_info = vk::ImportSemaphoreFdInfoKHR::default()\n        .semaphore(semaphore)\n        .handle_type(vk::ExternalSemaphoreHandleTypeFlags::SYNC_FD)\n        .flags(vk::SemaphoreImportFlags::TEMPORARY)\n        .fd(fd.into_raw_fd()); // Vulkan owns the fd now.\n\n    vk.external_semaphore_api\n        .import_semaphore_fd(&import_info)?;\n\n    Ok(())\n}\n\n/// Retrieves the fd of a sync file for a dmabuf.\npub unsafe fn export_sync_file(dmabuf: impl AsFd, flags: u32) -> anyhow::Result<OwnedFd> {\n    let raw_fd = rustix::ioctl::ioctl(dmabuf, ioctl::ExportSyncFile::new(flags))\n        .context(\"DMA_BUF_IOCTL_EXPORT_SYNC_FILE\")?;\n    Ok(OwnedFd::from_raw_fd(raw_fd))\n}\n\n/// Attaches a sync file to a dmabuf.\n// TODO: the kernel docs and online resources state that we need to use this to\n// attach a \"render finished\" semaphore back onto the client buffers once we\n// start rendering. I think that's unecessary as long as we wait to call\n// `wl_buffer.release` until long after we're done compositing, which we do as\n// of this writing.\n#[allow(dead_code)]\npub unsafe fn attach_sync_file(\n    dmabuf: impl AsFd,\n    flags: u32,\n    sync_file: OwnedFd, // Closed on return.\n) -> anyhow::Result<()> {\n    rustix::ioctl::ioctl(\n        dmabuf,\n        ioctl::ImportSyncFile::new(sync_file.as_raw_fd(), flags),\n    )\n    .context(\"DMA_BUF_IOCTL_IMPORT_SYNC_FILE\")?;\n\n    Ok(())\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/shm.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{cell::RefCell, os::fd::AsRawFd as _, rc::Rc};\n\nuse shipyard::{AddComponent as _, Component, EntityId, Get as _, NonSendSync, ViewMut};\nuse tracing::error;\nuse wayland_server::{\n    protocol::{wl_shm, wl_shm_pool},\n    Resource as _,\n};\n\nuse crate::compositor::{\n    buffers::{validate_buffer_parameters, Buffer, PlaneMetadata},\n    shm::Pool,\n    State,\n};\n\n#[derive(Component, Debug)]\npub struct ShmPool {\n    wl_shm: wl_shm::WlShm,\n    wl_shm_pool: wl_shm_pool::WlShmPool,\n    pool: Rc<RefCell<Pool>>,\n}\n\nimpl wayland_server::GlobalDispatch<wl_shm::WlShm, ()> for State {\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<wl_shm::WlShm>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        data_init.init(resource, ());\n    }\n}\n\nimpl wayland_server::Dispatch<wl_shm::WlShm, ()> for State {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        wl_shm: &wl_shm::WlShm,\n        request: wl_shm::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wl_shm::Request::CreatePool { id, fd, size } => {\n                if size <= 0 {\n                    wl_shm.post_error(\n                        wl_shm::Error::InvalidStride,\n                        \"Negative or zero size provided.\",\n                    );\n                }\n\n                let fd_debug = fd.as_raw_fd();\n                let pool = match Pool::new(fd, size as usize) {\n                    Ok(p) => p,\n                    Err(err) => {\n                        error!(?err, fd = fd_debug, size, \"failed to map client shm\");\n                        wl_shm.post_error(wl_shm::Error::InvalidFd, \"mmap failed.\");\n                        return;\n                    }\n                };\n\n                let entity_id = state.world.add_entity(());\n                let wl_shm_pool = data_init.init(id, entity_id);\n\n                // Required because Pool is not send or sync.\n                let mut vm = state\n                    .world\n                    .borrow::<NonSendSync<ViewMut<ShmPool>>>()\n                    .expect(\"borrow failed\");\n\n                // The pool shouldn't be unmapped until all buffers referencing it have been\n                // destroyed. We represent this with an Rc.\n                vm.add_component_unchecked(\n                    entity_id,\n                    ShmPool {\n                        wl_shm: wl_shm.clone(),\n                        wl_shm_pool,\n                        pool: Rc::new(RefCell::new(pool)),\n                    },\n                );\n            }\n            _ => unreachable!(),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<wl_shm_pool::WlShmPool, EntityId> for State {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        resource: &wl_shm_pool::WlShmPool,\n        request: wl_shm_pool::Request,\n        entity_id: &EntityId,\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wl_shm_pool::Request::CreateBuffer {\n                id,\n                offset,\n                width,\n                height,\n                stride,\n                format,\n            } => {\n                let vm = state\n                    .world\n                    .borrow::<NonSendSync<ViewMut<ShmPool>>>()\n                    .expect(\"borrow failed\");\n\n                let shm_pool = vm.get(*entity_id).expect(\"pool has no entity\");\n\n                if !matches!(\n                    format.into_result(),\n                    Ok(wl_shm::Format::Argb8888) | Ok(wl_shm::Format::Xrgb8888)\n                ) {\n                    resource.post_error(wl_shm::Error::InvalidFormat, \"Invalid format.\");\n                    return;\n                }\n\n                if let Err(msg) = validate_buffer_parameters(offset, width, height, stride, 4) {\n                    resource.post_error(wl_shm::Error::InvalidStride, msg);\n                    return;\n                }\n\n                let buffer_size = stride * height;\n                if (offset + buffer_size) as usize > shm_pool.pool.borrow().size {\n                    resource\n                        .post_error(wl_shm::Error::InvalidStride, \"Size exceeds pool capacity.\");\n                    return;\n                }\n\n                let entity_id = state.world.add_entity(());\n                let wl_buffer = data_init.init(id, entity_id);\n\n                let buffer = Buffer::Shm {\n                    wl_buffer,\n                    offset: offset as u32,\n                    pool: shm_pool.pool.clone(),\n                    metadata: PlaneMetadata {\n                        width: width as u32,\n                        height: height as u32,\n                        stride: stride as u32,\n                        bpp: 4,\n                    },\n                };\n\n                let mut vm = state\n                    .world\n                    .borrow::<NonSendSync<ViewMut<Buffer>>>()\n                    .expect(\"borrow failed\");\n                vm.add_component_unchecked(entity_id, buffer);\n            }\n            wl_shm_pool::Request::Resize { size } => {\n                let vm = state\n                    .world\n                    .borrow::<NonSendSync<ViewMut<ShmPool>>>()\n                    .expect(\"borrow failed\");\n\n                let shm_pool = vm.get(*entity_id).expect(\"pool has no entity\");\n\n                let mut pool = shm_pool.pool.borrow_mut();\n                if size <= pool.size as i32 {\n                    resource.post_error(wl_shm::Error::InvalidStride, \"Invalid size provided.\");\n                    return;\n                }\n\n                match pool.resize(size as usize) {\n                    Ok(_) => (),\n                    Err(err) => {\n                        error!(?err, \"failed to remap shm\");\n                        resource.post_error(wl_shm::Error::InvalidFd, \"mmap operation failed.\");\n                    }\n                }\n            }\n            wl_shm_pool::Request::Destroy => (),\n            _ => unreachable!(),\n        }\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        _resource: &wl_shm_pool::WlShmPool,\n        entity_id: &EntityId,\n    ) {\n        // Buffers continue to be valid after their backing pool is destroyed.\n        state.world.delete_entity(*entity_id);\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wl_buffer.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse wayland_server::protocol::wl_buffer;\n\nuse crate::session::compositor::{buffers::BufferKey, Compositor};\n\nimpl wayland_server::Dispatch<wl_buffer::WlBuffer, BufferKey> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wl_buffer::WlBuffer,\n        request: wl_buffer::Request,\n        _data: &BufferKey,\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wl_buffer::Request::Destroy => (),\n            _ => unreachable!(),\n        }\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        _resource: &wl_buffer::WlBuffer,\n        data: &BufferKey,\n    ) {\n        // We can't destroy the buffer until it's released. This marks it for\n        // destruction later.\n        if let Some(buffer) = state.buffers.get_mut(*data) {\n            buffer.needs_destruction = true;\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wl_compositor.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse tracing::warn;\nuse wayland_server::{\n    protocol::{wl_callback, wl_compositor, wl_output, wl_region, wl_surface},\n    Resource as _,\n};\n\nuse crate::{\n    pixel_scale::PixelScale,\n    session::compositor::{\n        surface::{CommitError, PendingBuffer, Surface, SurfaceKey},\n        Compositor,\n    },\n};\n\nimpl wayland_server::GlobalDispatch<wl_compositor::WlCompositor, ()> for Compositor {\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<wl_compositor::WlCompositor>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        data_init.init(resource, ());\n    }\n}\n\nimpl wayland_server::Dispatch<wl_compositor::WlCompositor, ()> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wl_compositor::WlCompositor,\n        request: wl_compositor::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wl_compositor::Request::CreateSurface { id } => {\n                state\n                    .surfaces\n                    .insert_with_key(|k| Surface::new(data_init.init(id, k)));\n            }\n            wl_compositor::Request::CreateRegion { id } => {\n                // We don't do anything with regions.\n                data_init.init(id, ());\n            }\n            _ => unreachable!(),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<wl_surface::WlSurface, SurfaceKey> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        resource: &wl_surface::WlSurface,\n        request: wl_surface::Request,\n        data: &SurfaceKey,\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wl_surface::Request::Attach { buffer, x, y } => {\n                if x != 0 || y != 0 {\n                    warn!(x, y, \"ignoring nonzero buffer x/y offset\")\n                }\n\n                state\n                    .surfaces\n                    .get_mut(*data)\n                    .expect(\"surface has no entry\")\n                    .pending_buffer = match buffer {\n                    Some(buf) => {\n                        let buffer_id = *buf.data().expect(\"buffer has no userdata\");\n                        Some(PendingBuffer::Attach(buffer_id))\n                    }\n                    None => Some(PendingBuffer::Detach),\n                };\n            }\n            wl_surface::Request::Frame { callback } => {\n                let callback = data_init.init(callback, *data);\n                state\n                    .surfaces\n                    .get_mut(*data)\n                    .expect(\"surface has no entry\")\n                    .frame_callback\n                    .pending = Some(callback);\n            }\n            wl_surface::Request::Commit => {\n                if let Err(CommitError(code, msg)) = state.surface_commit(*data) {\n                    resource.post_error(code, msg);\n                }\n            }\n\n            wl_surface::Request::SetBufferTransform { transform } => {\n                if !matches!(transform.into_result(), Ok(wl_output::Transform::Normal)) {\n                    warn!(?transform, \"ignoring nonzero buffer rotation\");\n                }\n            }\n            wl_surface::Request::SetBufferScale { scale } => {\n                if scale < 1 {\n                    resource.post_error(wl_surface::Error::InvalidScale, \"Scale must be >= 1\");\n                    return;\n                }\n\n                state\n                    .surfaces\n                    .get_mut(*data)\n                    .expect(\"surface has no entry\")\n                    .buffer_scale\n                    .pending = Some(PixelScale(scale as u32, 1));\n            }\n            wl_surface::Request::Offset { x, y } => {\n                if x != 0 || y != 0 {\n                    warn!(x, y, \"ignoring nonzero buffer offset\");\n                }\n            }\n            // We ignore damage and don't do any related optimizations.\n            wl_surface::Request::DamageBuffer { .. } => (),\n            wl_surface::Request::Damage { .. } => (),\n            // We ignore input and opaque regions, because we don't support subcompositing.\n            wl_surface::Request::SetOpaqueRegion { .. } => (),\n            wl_surface::Request::SetInputRegion { .. } => (),\n            wl_surface::Request::Destroy => (),\n            _ => unreachable!(),\n        }\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        _resource: &wl_surface::WlSurface,\n        data: &SurfaceKey,\n    ) {\n        state.surface_destroyed(*data);\n    }\n}\n\nimpl wayland_server::Dispatch<wl_callback::WlCallback, SurfaceKey> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wl_callback::WlCallback,\n        _request: wl_callback::Request,\n        _data: &SurfaceKey,\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n}\n\nimpl wayland_server::Dispatch<wl_region::WlRegion, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wl_region::WlRegion,\n        _request: wl_region::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wl_data_device_manager.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse wayland_server::protocol::{wl_data_device, wl_data_device_manager, wl_data_source};\n\nuse crate::session::compositor::Compositor;\n\n// We offer a stubbed version of this protocol, because GTK chokes without it\n// being present.\n\nimpl wayland_server::GlobalDispatch<wl_data_device_manager::WlDataDeviceManager, ()>\n    for Compositor\n{\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<wl_data_device_manager::WlDataDeviceManager>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        data_init.init(resource, ());\n    }\n}\n\nimpl wayland_server::Dispatch<wl_data_device_manager::WlDataDeviceManager, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wl_data_device_manager::WlDataDeviceManager,\n        request: wl_data_device_manager::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wl_data_device_manager::Request::CreateDataSource { id } => {\n                data_init.init(id, ());\n            }\n            wl_data_device_manager::Request::GetDataDevice { id, .. } => {\n                data_init.init(id, ());\n            }\n            _ => (),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<wl_data_source::WlDataSource, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wl_data_source::WlDataSource,\n        _request: wl_data_source::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n}\n\nimpl wayland_server::Dispatch<wl_data_device::WlDataDevice, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wl_data_device::WlDataDevice,\n        _request: wl_data_device::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wl_drm.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse crate::session::compositor::{protocols::wl_drm, Compositor};\n\nimpl wayland_server::GlobalDispatch<wl_drm::WlDrm, ()> for Compositor {\n    fn bind(\n        state: &mut Compositor,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<wl_drm::WlDrm>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Compositor>,\n    ) {\n        let wl_drm = data_init.init(resource, ());\n        wl_drm.device(\n            dev_path(state.vk.device_info.drm_node).expect(\"failed to determine device node\"),\n        );\n    }\n}\n\nimpl wayland_server::Dispatch<wl_drm::WlDrm, ()> for Compositor {\n    fn request(\n        _state: &mut Compositor,\n        _client: &wayland_server::Client,\n        _resource: &wl_drm::WlDrm,\n        _request: wl_drm::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Compositor>,\n    ) {\n    }\n}\n\npub fn dev_path(dev: libc::dev_t) -> std::io::Result<String> {\n    let (major, minor) = unsafe { (libc::major(dev), libc::minor(dev)) };\n\n    assert_eq!(major, 226, \"not a DRM device\");\n    assert!(minor >= 128, \"not a render node\");\n\n    for f in std::fs::read_dir(format!(\"/sys/dev/char/{}:{}/device/drm\", major, minor))?.flatten() {\n        let name = f.file_name();\n        let name = name.to_string_lossy();\n\n        if name.starts_with(\"renderD\") {\n            let path = format!(\"/dev/dri/{}\", name);\n            std::fs::metadata(&path)?;\n            return Ok(path);\n        }\n    }\n\n    Err(std::io::Error::new(\n        std::io::ErrorKind::NotFound,\n        \"no render node found\",\n    ))\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wl_output.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse wayland_server::protocol::wl_output;\n\nuse crate::session::compositor::{output::configure_output, Compositor};\n\nimpl wayland_server::GlobalDispatch<wl_output::WlOutput, ()> for Compositor {\n    fn bind(\n        state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<wl_output::WlOutput>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        let wl_output = data_init.init(resource, ());\n        configure_output(&wl_output, state.display_params);\n\n        state.output_proxies.push(wl_output);\n    }\n}\n\nimpl wayland_server::Dispatch<wl_output::WlOutput, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wl_output::WlOutput,\n        _request: wl_output::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        resource: &wl_output::WlOutput,\n        _data: &(),\n    ) {\n        state.output_proxies.retain(|o| o == resource);\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wl_seat.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse wayland_server::{\n    protocol::{wl_keyboard, wl_pointer, wl_seat},\n    Resource as _,\n};\n\nuse crate::session::compositor::{seat::Cursor, Compositor};\n\nimpl wayland_server::GlobalDispatch<wl_seat::WlSeat, ()> for Compositor {\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<wl_seat::WlSeat>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        let wl_seat = data_init.init(resource, ());\n        wl_seat.capabilities(wl_seat::Capability::Keyboard | wl_seat::Capability::Pointer);\n    }\n}\n\nimpl wayland_server::Dispatch<wl_seat::WlSeat, ()> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        resource: &wl_seat::WlSeat,\n        request: wl_seat::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wl_seat::Request::GetPointer { id } => {\n                let wl_pointer = data_init.init(id, ());\n                state.default_seat.get_pointer(wl_pointer);\n            }\n            wl_seat::Request::GetKeyboard { id } => {\n                let wl_keyboard = data_init.init(id, ());\n                state.default_seat.get_keyboard(wl_keyboard);\n            }\n            wl_seat::Request::GetTouch { .. } => {\n                resource.post_error(\n                    wl_seat::Error::MissingCapability,\n                    \"No touch capability advertized.\",\n                );\n            }\n            _ => (),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<wl_pointer::WlPointer, ()> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        resource: &wl_pointer::WlPointer,\n        request: wl_pointer::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wl_pointer::Request::SetCursor {\n                surface,\n                hotspot_x,\n                hotspot_y,\n                ..\n            } => {\n                let hotspot_x = hotspot_x.max(0) as u32;\n                let hotspot_y = hotspot_y.max(0) as u32;\n\n                let cursor = if let Some(wl_surface) = surface {\n                    Cursor::Surface {\n                        surface: *wl_surface.data().unwrap(),\n                        hotspot: (hotspot_x, hotspot_y).into(),\n                        needs_render: true,\n                        rendered: None,\n                    }\n                } else {\n                    Cursor::Hidden\n                };\n\n                state.set_cursor(resource, cursor);\n            }\n            wl_pointer::Request::Release => (),\n            _ => (),\n        }\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        resource: &wl_pointer::WlPointer,\n        _data: &(),\n    ) {\n        state.default_seat.destroy_pointer(resource);\n    }\n}\n\nimpl wayland_server::Dispatch<wl_keyboard::WlKeyboard, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wl_keyboard::WlKeyboard,\n        _request: wl_keyboard::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        resource: &wl_keyboard::WlKeyboard,\n        _data: &(),\n    ) {\n        state.default_seat.destroy_keyboard(resource);\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wl_shm.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    os::fd::AsRawFd as _,\n    sync::{Arc, RwLock},\n};\n\nuse tracing::error;\nuse wayland_server::{\n    protocol::{wl_shm, wl_shm_pool},\n    Resource as _,\n};\n\nuse crate::session::compositor::{\n    buffers::{fourcc_bpp, import_shm_buffer, validate_buffer_parameters, PlaneMetadata},\n    shm::{Pool, ShmPool, ShmPoolKey},\n    Compositor,\n};\n\nimpl wayland_server::GlobalDispatch<wl_shm::WlShm, ()> for Compositor {\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<wl_shm::WlShm>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        let wl_shm = data_init.init(resource, ());\n        wl_shm.format(wl_shm::Format::Xrgb8888);\n        wl_shm.format(wl_shm::Format::Argb8888);\n    }\n}\n\nimpl wayland_server::Dispatch<wl_shm::WlShm, ()> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        wl_shm: &wl_shm::WlShm,\n        request: wl_shm::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wl_shm::Request::CreatePool { id, fd, size } => {\n                if size <= 0 {\n                    wl_shm.post_error(\n                        wl_shm::Error::InvalidStride,\n                        \"Negative or zero size provided.\",\n                    );\n                }\n\n                let fd_debug = fd.as_raw_fd();\n                let pool = match Pool::new(fd, size as usize) {\n                    Ok(p) => p,\n                    Err(err) => {\n                        error!(?err, fd = fd_debug, size, \"failed to map client shm\");\n                        wl_shm.post_error(wl_shm::Error::InvalidFd, \"mmap failed.\");\n                        return;\n                    }\n                };\n\n                state.shm_pools.insert_with_key(|k| {\n                    let wl_shm_pool = data_init.init(id, k);\n                    ShmPool {\n                        _wl_shm: wl_shm.clone(),\n                        _wl_shm_pool: wl_shm_pool,\n                        // The pool shouldn't be unmapped until all buffers referencing it have been\n                        // destroyed. We represent this constraint with an Arc.\n                        pool: Arc::new(RwLock::new(pool)),\n                    }\n                });\n            }\n            _ => unreachable!(),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<wl_shm_pool::WlShmPool, ShmPoolKey> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        resource: &wl_shm_pool::WlShmPool,\n        request: wl_shm_pool::Request,\n        data: &ShmPoolKey,\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wl_shm_pool::Request::CreateBuffer {\n                id,\n                offset,\n                width,\n                height,\n                stride,\n                format,\n            } => {\n                let pool = state\n                    .shm_pools\n                    .get(*data)\n                    .expect(\"shm_pool has no entry\")\n                    .pool\n                    .clone();\n\n                let format = match format.into_result() {\n                    Ok(wl_shm::Format::Argb8888) => drm_fourcc::DrmFourcc::Argb8888,\n                    Ok(wl_shm::Format::Xrgb8888) => drm_fourcc::DrmFourcc::Xrgb8888,\n                    _ => {\n                        resource.post_error(wl_shm::Error::InvalidFormat, \"Invalid format.\");\n                        return;\n                    }\n                };\n\n                let Some(bpp) = fourcc_bpp(format) else {\n                    resource.post_error(wl_shm::Error::InvalidFormat, \"Invalid format.\");\n                    return;\n                };\n\n                if let Err(msg) = validate_buffer_parameters(offset, width, height, stride, bpp) {\n                    resource.post_error(wl_shm::Error::InvalidStride, msg);\n                    return;\n                }\n\n                let buffer_size = stride * height;\n                if (offset + buffer_size) as usize > pool.read().unwrap().size {\n                    resource\n                        .post_error(wl_shm::Error::InvalidStride, \"Size exceeds pool capacity.\");\n                    return;\n                }\n\n                let format = PlaneMetadata {\n                    format,\n                    bpp,\n                    width: width as u32,\n                    height: height as u32,\n                    stride: stride as u32,\n                    offset: offset as u32,\n                };\n\n                let res = state.buffers.try_insert_with_key(|k| {\n                    let wl_buffer = data_init.init(id, k);\n                    import_shm_buffer(state.vk.clone(), wl_buffer, pool, format)\n                });\n\n                if res.is_err() {\n                    resource.post_error(wl_shm::Error::InvalidFd, \"Import failed.\");\n                };\n            }\n            wl_shm_pool::Request::Resize { size } => {\n                let shm_pool = state.shm_pools.get_mut(*data).expect(\"pool has no entry\");\n                let mut pool = shm_pool.pool.write().unwrap();\n\n                if size <= pool.size as i32 {\n                    resource.post_error(wl_shm::Error::InvalidStride, \"Invalid size provided.\");\n                    return;\n                }\n\n                match pool.resize(size as usize) {\n                    Ok(_) => (),\n                    Err(err) => {\n                        error!(?err, \"failed to remap shm\");\n                        resource.post_error(wl_shm::Error::InvalidFd, \"mmap operation failed.\");\n                    }\n                }\n            }\n            wl_shm_pool::Request::Destroy => (),\n            _ => unreachable!(),\n        }\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        _resource: &wl_shm_pool::WlShmPool,\n        data: &ShmPoolKey,\n    ) {\n        // Buffers continue to be valid after their backing pool is destroyed.\n        state.shm_pools.remove(*data);\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wp_fractional_scale.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse wayland_protocols::wp::fractional_scale::v1::server::{\n    wp_fractional_scale_manager_v1, wp_fractional_scale_v1,\n};\nuse wayland_server::Resource as _;\n\nuse crate::session::compositor::{surface::SurfaceKey, Compositor};\n\nimpl wayland_server::GlobalDispatch<wp_fractional_scale_manager_v1::WpFractionalScaleManagerV1, ()>\n    for Compositor\n{\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<wp_fractional_scale_manager_v1::WpFractionalScaleManagerV1>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        data_init.init(resource, ());\n    }\n}\n\nimpl wayland_server::Dispatch<wp_fractional_scale_manager_v1::WpFractionalScaleManagerV1, ()>\n    for Compositor\n{\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        resource: &wp_fractional_scale_manager_v1::WpFractionalScaleManagerV1,\n        request: wp_fractional_scale_manager_v1::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wp_fractional_scale_manager_v1::Request::GetFractionalScale { id, surface } => {\n                if let Some(surface_key) = surface.data::<SurfaceKey>() {\n                    let wp_fractional_scale = data_init.init(id, *surface_key);\n\n                    let surface = state\n                        .surfaces\n                        .get_mut(*surface_key)\n                        .expect(\"surface has no entry\");\n\n                    if surface.wp_fractional_scale.is_some() {\n                        resource.post_error(\n                            wp_fractional_scale_manager_v1::Error::FractionalScaleExists,\n                            \"wp_fractional_scale object already exists for surface.\",\n                        )\n                    }\n\n                    surface.wp_fractional_scale = Some(wp_fractional_scale);\n                }\n            }\n            wp_fractional_scale_manager_v1::Request::Destroy => (),\n            _ => unreachable!(),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<wp_fractional_scale_v1::WpFractionalScaleV1, SurfaceKey>\n    for Compositor\n{\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wp_fractional_scale_v1::WpFractionalScaleV1,\n        _request: wp_fractional_scale_v1::Request,\n        _data: &SurfaceKey,\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wp_linux_dmabuf.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    os::fd::OwnedFd,\n    sync::{Arc, RwLock},\n};\n\nuse drm_fourcc::DrmFourcc;\nuse tracing::error;\nuse wayland_protocols::wp::linux_dmabuf::zv1::server::{\n    zwp_linux_buffer_params_v1, zwp_linux_dmabuf_feedback_v1, zwp_linux_dmabuf_v1,\n};\nuse wayland_server::{protocol::wl_buffer, Resource as _, WEnum};\n\nuse super::make_u64;\nuse crate::session::compositor::{\n    buffers::{fourcc_bpp, import_dmabuf_buffer, validate_buffer_parameters, PlaneMetadata},\n    Compositor,\n};\n\nimpl wayland_server::GlobalDispatch<zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1, ()> for Compositor {\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        data_init.init(resource, ());\n    }\n}\n\nimpl wayland_server::Dispatch<zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1, ()> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1,\n        request: zwp_linux_dmabuf_v1::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            zwp_linux_dmabuf_v1::Request::CreateParams { params_id } => {\n                data_init.init(params_id, Arc::new(RwLock::new(Params::Empty)));\n            }\n            zwp_linux_dmabuf_v1::Request::GetDefaultFeedback { id } => {\n                let feedback = data_init.init(id, ());\n                state.emit_dmabuf_feedback(&feedback);\n            }\n            zwp_linux_dmabuf_v1::Request::GetSurfaceFeedback { id, .. } => {\n                let feedback = data_init.init(id, ());\n                state.emit_dmabuf_feedback(&feedback);\n            }\n            zwp_linux_dmabuf_v1::Request::Destroy => (),\n            _ => (),\n        }\n    }\n}\n\n#[derive(Debug)]\nenum Params {\n    Empty,\n    Config {\n        fd: OwnedFd,\n        offset: u32,\n        stride: u32,\n        modifier: u64,\n    },\n    Done,\n}\n\nimpl\n    wayland_server::Dispatch<\n        zwp_linux_buffer_params_v1::ZwpLinuxBufferParamsV1,\n        Arc<RwLock<Params>>,\n    > for Compositor\n{\n    fn request(\n        state: &mut Self,\n        client: &wayland_server::Client,\n        resource: &zwp_linux_buffer_params_v1::ZwpLinuxBufferParamsV1,\n        request: zwp_linux_buffer_params_v1::Request,\n        data: &Arc<RwLock<Params>>,\n        dh: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            zwp_linux_buffer_params_v1::Request::Add {\n                fd,\n                plane_idx,\n                offset,\n                stride,\n                modifier_hi,\n                modifier_lo,\n            } => {\n                if plane_idx > 0 {\n                    resource.post_error(\n                        zwp_linux_buffer_params_v1::Error::PlaneIdx,\n                        \"Multiplane images are not supported.\",\n                    );\n                    return;\n                }\n\n                let mut params = data.write().unwrap();\n                if matches!(*params, Params::Config { .. } | Params::Done) {\n                    resource.post_error(\n                        zwp_linux_buffer_params_v1::Error::PlaneSet,\n                        \"Plane 0 already configured.\",\n                    );\n                    return;\n                }\n\n                let modifier = make_u64(modifier_hi, modifier_lo);\n\n                if resource.version() >= 4 && !state.cached_dmabuf_feedback.contains(modifier) {\n                    resource.post_error(\n                        zwp_linux_buffer_params_v1::Error::InvalidFormat,\n                        \"Unsupported format.\",\n                    );\n                }\n\n                *params = Params::Config {\n                    fd,\n                    offset,\n                    stride,\n                    modifier,\n                };\n            }\n            zwp_linux_buffer_params_v1::Request::Create {\n                width,\n                height,\n                format,\n                flags,\n            } => {\n                let mut params = data.write().unwrap();\n                let format = match validate_create(&params, width, height, format, flags) {\n                    Ok(f) => f,\n                    Err((e, s)) => {\n                        resource.post_error(e, s);\n                        return;\n                    }\n                };\n\n                let Params::Config { fd, modifier, .. } =\n                    std::mem::replace(&mut *params, Params::Done)\n                else {\n                    unreachable!();\n                };\n\n                let res = state.buffers.try_insert_with_key(|k| {\n                    let wl_buffer =\n                        client.create_resource::<wl_buffer::WlBuffer, _, Compositor>(dh, 1, k)?;\n\n                    import_dmabuf_buffer(state.vk.clone(), wl_buffer, format, modifier.into(), fd)\n                });\n\n                if res.is_err() {\n                    resource.failed();\n                };\n            }\n            zwp_linux_buffer_params_v1::Request::CreateImmed {\n                buffer_id,\n                width,\n                height,\n                format,\n                flags,\n            } => {\n                let mut params = data.write().unwrap();\n                let format = match validate_create(&params, width, height, format, flags) {\n                    Ok(f) => f,\n                    Err((e, s)) => {\n                        resource.post_error(e, s);\n                        return;\n                    }\n                };\n\n                let Params::Config { fd, modifier, .. } =\n                    std::mem::replace(&mut *params, Params::Done)\n                else {\n                    unreachable!();\n                };\n\n                let res = state.buffers.try_insert_with_key(|k| {\n                    let wl_buffer = data_init.init(buffer_id, k);\n                    import_dmabuf_buffer(state.vk.clone(), wl_buffer, format, modifier.into(), fd)\n                });\n\n                if let Err(err) = res {\n                    error!(?err, \"failed to import dmabuf\");\n                    resource.post_error(\n                        zwp_linux_buffer_params_v1::Error::InvalidWlBuffer,\n                        \"Import failed.\",\n                    );\n                };\n            }\n            zwp_linux_buffer_params_v1::Request::Destroy => (),\n            _ => (),\n        }\n    }\n}\n\nfn validate_create(\n    params: &Params,\n    width: i32,\n    height: i32,\n    format: u32,\n    flags: WEnum<zwp_linux_buffer_params_v1::Flags>,\n) -> Result<PlaneMetadata, (zwp_linux_buffer_params_v1::Error, String)> {\n    if !flags\n        .into_result()\n        .map(|f| f.is_empty())\n        .unwrap_or_default()\n    {\n        return Err((\n            zwp_linux_buffer_params_v1::Error::InvalidFormat,\n            \"Invalid flags.\".to_string(),\n        ));\n    }\n\n    match *params {\n        Params::Empty => {\n            return Err((\n                zwp_linux_buffer_params_v1::Error::Incomplete,\n                \"Plane 0 not configured.\".to_string(),\n            ))\n        }\n        Params::Done => {\n            return Err((\n                zwp_linux_buffer_params_v1::Error::AlreadyUsed,\n                \"Params already consumed.\".to_string(),\n            ))\n        }\n        _ => (),\n    }\n\n    let format = match DrmFourcc::try_from(format) {\n        Ok(format) => format,\n        Err(_) => {\n            return Err((\n                zwp_linux_buffer_params_v1::Error::InvalidFormat,\n                \"Unknown format.\".to_string(),\n            ))\n        }\n    };\n\n    let Some(bpp) = fourcc_bpp(format) else {\n        return Err((\n            zwp_linux_buffer_params_v1::Error::InvalidFormat,\n            \"Unsupported format.\".to_string(),\n        ));\n    };\n\n    let Params::Config { offset, stride, .. } = params else {\n        unreachable!()\n    };\n\n    if let Err(s) = validate_buffer_parameters(*offset as i32, width, height, *stride as i32, bpp) {\n        return Err((zwp_linux_buffer_params_v1::Error::InvalidDimensions, s));\n    }\n\n    Ok(PlaneMetadata {\n        format,\n        bpp,\n        width: width as u32,\n        height: height as u32,\n        stride: *stride,\n        offset: *offset,\n    })\n}\n\nimpl wayland_server::Dispatch<zwp_linux_dmabuf_feedback_v1::ZwpLinuxDmabufFeedbackV1, ()>\n    for Compositor\n{\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &zwp_linux_dmabuf_feedback_v1::ZwpLinuxDmabufFeedbackV1,\n        _request: zwp_linux_dmabuf_feedback_v1::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wp_linux_drm_syncobj.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse tracing::error;\nuse wayland_protocols::wp::linux_drm_syncobj::v1::server::{\n    wp_linux_drm_syncobj_manager_v1, wp_linux_drm_syncobj_surface_v1,\n    wp_linux_drm_syncobj_timeline_v1,\n};\nuse wayland_server::Resource as _;\n\nuse crate::session::compositor::{\n    buffers::{SyncobjTimeline, SyncobjTimelineKey},\n    surface::SurfaceKey,\n    Compositor,\n};\n\nimpl wayland_server::GlobalDispatch<wp_linux_drm_syncobj_manager_v1::WpLinuxDrmSyncobjManagerV1, ()>\n    for Compositor\n{\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<wp_linux_drm_syncobj_manager_v1::WpLinuxDrmSyncobjManagerV1>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        data_init.init(resource, ());\n    }\n}\n\nimpl wayland_server::Dispatch<wp_linux_drm_syncobj_manager_v1::WpLinuxDrmSyncobjManagerV1, ()>\n    for Compositor\n{\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        resource: &wp_linux_drm_syncobj_manager_v1::WpLinuxDrmSyncobjManagerV1,\n        request: wp_linux_drm_syncobj_manager_v1::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wp_linux_drm_syncobj_manager_v1::Request::GetSurface { id, surface } => {\n                if let Some(surface_key) = surface.data::<SurfaceKey>() {\n                    let wp_syncobj_surface = data_init.init(id, *surface_key);\n\n                    let surface = state\n                        .surfaces\n                        .get_mut(*surface_key)\n                        .expect(\"surface has no entry\");\n\n                    if surface.wp_syncobj_surface.is_some() {\n                        resource.post_error(\n                            wp_linux_drm_syncobj_manager_v1::Error::SurfaceExists,\n                            \"A syncobj surface already exists for that wl_surface.\",\n                        );\n                        return;\n                    }\n\n                    surface.wp_syncobj_surface = Some(wp_syncobj_surface);\n                }\n            }\n            wp_linux_drm_syncobj_manager_v1::Request::ImportTimeline { id, fd } => {\n                if let Err(err) = state.imported_syncobj_timelines.try_insert_with_key(|k| {\n                    SyncobjTimeline::import(state.vk.clone(), data_init.init(id, k), fd)\n                }) {\n                    error!(\"failed to import syncobj timeline: {err:#}\");\n                    resource.post_error(\n                        wp_linux_drm_syncobj_manager_v1::Error::InvalidTimeline,\n                        \"Failed to import timeline.\",\n                    );\n                }\n            }\n            wp_linux_drm_syncobj_manager_v1::Request::Destroy => (),\n            _ => unreachable!(),\n        }\n    }\n}\n\nimpl\n    wayland_server::Dispatch<\n        wp_linux_drm_syncobj_surface_v1::WpLinuxDrmSyncobjSurfaceV1,\n        SurfaceKey,\n    > for Compositor\n{\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wp_linux_drm_syncobj_surface_v1::WpLinuxDrmSyncobjSurfaceV1,\n        request: wp_linux_drm_syncobj_surface_v1::Request,\n        surface_key: &SurfaceKey,\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wp_linux_drm_syncobj_surface_v1::Request::SetAcquirePoint {\n                timeline,\n                point_hi,\n                point_lo,\n            } => {\n                let timeline = timeline\n                    .data::<SyncobjTimelineKey>()\n                    .and_then(|key| state.imported_syncobj_timelines.get(*key))\n                    .expect(\"timeline has no entry\");\n\n                let surface = state\n                    .surfaces\n                    .get_mut(*surface_key)\n                    .expect(\"surface has no entry\");\n\n                surface.pending_acquire_point =\n                    Some(timeline.new_timeline_point(super::make_u64(point_hi, point_lo)))\n            }\n            wp_linux_drm_syncobj_surface_v1::Request::SetReleasePoint {\n                timeline,\n                point_hi,\n                point_lo,\n            } => {\n                let timeline = timeline\n                    .data::<SyncobjTimelineKey>()\n                    .and_then(|key| state.imported_syncobj_timelines.get(*key))\n                    .expect(\"timeline has no entry\");\n\n                let surface = state\n                    .surfaces\n                    .get_mut(*surface_key)\n                    .expect(\"surface has no entry\");\n\n                surface.pending_release_point =\n                    Some(timeline.new_timeline_point(super::make_u64(point_hi, point_lo)))\n            }\n            wp_linux_drm_syncobj_surface_v1::Request::Destroy => (),\n            _ => unreachable!(),\n        }\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        _resource: &wp_linux_drm_syncobj_surface_v1::WpLinuxDrmSyncobjSurfaceV1,\n        surface_key: &SurfaceKey,\n    ) {\n        if let Some(surface) = state.surfaces.get_mut(*surface_key) {\n            surface.wp_syncobj_surface = None;\n            surface.pending_acquire_point = None;\n            surface.pending_release_point = None;\n        }\n    }\n}\n\nimpl\n    wayland_server::Dispatch<\n        wp_linux_drm_syncobj_timeline_v1::WpLinuxDrmSyncobjTimelineV1,\n        SyncobjTimelineKey,\n    > for Compositor\n{\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wp_linux_drm_syncobj_timeline_v1::WpLinuxDrmSyncobjTimelineV1,\n        _request: wp_linux_drm_syncobj_timeline_v1::Request,\n        _data: &SyncobjTimelineKey,\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wp_pointer_constraints.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse wayland_protocols::wp::pointer_constraints::zv1::server::{\n    zwp_confined_pointer_v1, zwp_locked_pointer_v1, zwp_pointer_constraints_v1,\n};\nuse wayland_server::Resource as _;\n\nuse crate::session::compositor::Compositor;\n\nimpl wayland_server::GlobalDispatch<zwp_pointer_constraints_v1::ZwpPointerConstraintsV1, ()>\n    for Compositor\n{\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<zwp_pointer_constraints_v1::ZwpPointerConstraintsV1>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        data_init.init(resource, ());\n    }\n}\n\nimpl wayland_server::Dispatch<zwp_pointer_constraints_v1::ZwpPointerConstraintsV1, ()>\n    for Compositor\n{\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        resource: &zwp_pointer_constraints_v1::ZwpPointerConstraintsV1,\n        request: zwp_pointer_constraints_v1::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            zwp_pointer_constraints_v1::Request::LockPointer {\n                id,\n                surface,\n                pointer,\n                lifetime,\n                ..\n            } => {\n                if state.default_seat.has_lock(&surface) {\n                    resource.post_error(\n                        zwp_pointer_constraints_v1::Error::AlreadyConstrained,\n                        \"There already exists a pointer constraint for that surface on this seat.\",\n                    );\n                    return;\n                }\n\n                let wp_locked_pointer = data_init.init(id, ());\n                let oneshot = lifetime.into_result().ok()\n                    == Some(zwp_pointer_constraints_v1::Lifetime::Oneshot);\n\n                state\n                    .default_seat\n                    .create_lock(pointer, surface, wp_locked_pointer, oneshot);\n            }\n            zwp_pointer_constraints_v1::Request::ConfinePointer { id, .. } => {\n                // We don't support confined pointers.\n                data_init.init(id, ());\n            }\n            zwp_pointer_constraints_v1::Request::Destroy => (),\n            _ => (),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<zwp_locked_pointer_v1::ZwpLockedPointerV1, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &zwp_locked_pointer_v1::ZwpLockedPointerV1,\n        _request: zwp_locked_pointer_v1::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        resource: &zwp_locked_pointer_v1::ZwpLockedPointerV1,\n        _data: &(),\n    ) {\n        state.default_seat.destroy_lock(resource);\n    }\n}\n\nimpl wayland_server::Dispatch<zwp_confined_pointer_v1::ZwpConfinedPointerV1, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &zwp_confined_pointer_v1::ZwpConfinedPointerV1,\n        _request: zwp_confined_pointer_v1::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wp_presentation.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse wayland_protocols::wp::presentation_time::server::{wp_presentation, wp_presentation_feedback};\nuse wayland_server::Resource as _;\n\nuse crate::session::compositor::{surface::SurfaceKey, Compositor};\n\nimpl wayland_server::GlobalDispatch<wp_presentation::WpPresentation, ()> for Compositor {\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<wp_presentation::WpPresentation>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        let wp_presentation = data_init.init(resource, ());\n        wp_presentation.clock_id(libc::CLOCK_MONOTONIC as u32)\n    }\n}\n\nimpl wayland_server::Dispatch<wp_presentation::WpPresentation, ()> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wp_presentation::WpPresentation,\n        request: wp_presentation::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            wp_presentation::Request::Feedback {\n                surface,\n                callback: id,\n            } => {\n                if let Some(surface_key) = surface.data::<SurfaceKey>() {\n                    let wp_presentation_feedback = data_init.init(id, *surface_key);\n\n                    // for wl_output in state.output_proxies.iter().filter(|wl_output|\n                    // wl_output.id().same_client_as(surface.id())) {\n                    //     wp_presentation_feedback.sync_output()\n                    // }\n\n                    let surface = state\n                        .surfaces\n                        .get_mut(*surface_key)\n                        .expect(\"surface has no entry\");\n\n                    surface.pending_feedback = Some(wp_presentation_feedback);\n                }\n            }\n            wp_presentation::Request::Destroy => (),\n            _ => (),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<wp_presentation_feedback::WpPresentationFeedback, SurfaceKey>\n    for Compositor\n{\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &wp_presentation_feedback::WpPresentationFeedback,\n        _request: wp_presentation_feedback::Request,\n        _data: &SurfaceKey,\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wp_relative_pointer.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse wayland_protocols::wp::relative_pointer::zv1::server::{\n    zwp_relative_pointer_manager_v1, zwp_relative_pointer_v1,\n};\n\nuse crate::session::compositor::Compositor;\n\nimpl\n    wayland_server::GlobalDispatch<zwp_relative_pointer_manager_v1::ZwpRelativePointerManagerV1, ()>\n    for Compositor\n{\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<zwp_relative_pointer_manager_v1::ZwpRelativePointerManagerV1>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        data_init.init(resource, ());\n    }\n}\n\nimpl wayland_server::Dispatch<zwp_relative_pointer_manager_v1::ZwpRelativePointerManagerV1, ()>\n    for Compositor\n{\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &zwp_relative_pointer_manager_v1::ZwpRelativePointerManagerV1,\n        request: zwp_relative_pointer_manager_v1::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            zwp_relative_pointer_manager_v1::Request::GetRelativePointer { id, pointer } => {\n                let wp_relative_pointer = data_init.init(id, ());\n\n                state\n                    .default_seat\n                    .get_relative_pointer(wp_relative_pointer, pointer);\n            }\n            zwp_relative_pointer_manager_v1::Request::Destroy => (),\n            _ => (),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<zwp_relative_pointer_v1::ZwpRelativePointerV1, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &zwp_relative_pointer_v1::ZwpRelativePointerV1,\n        _request: zwp_relative_pointer_v1::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        resource: &zwp_relative_pointer_v1::ZwpRelativePointerV1,\n        _data: &(),\n    ) {\n        state.default_seat.destroy_relative_pointer(resource);\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/wp_text_input.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse wayland_protocols::wp::text_input::zv3::server::{\n    zwp_text_input_manager_v3, zwp_text_input_v3,\n};\n\nuse crate::session::compositor::Compositor;\n\nimpl wayland_server::GlobalDispatch<zwp_text_input_manager_v3::ZwpTextInputManagerV3, ()>\n    for Compositor\n{\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<zwp_text_input_manager_v3::ZwpTextInputManagerV3>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        data_init.init(resource, ());\n    }\n}\n\nimpl wayland_server::Dispatch<zwp_text_input_manager_v3::ZwpTextInputManagerV3, ()> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &zwp_text_input_manager_v3::ZwpTextInputManagerV3,\n        request: zwp_text_input_manager_v3::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            zwp_text_input_manager_v3::Request::GetTextInput { id, .. } => {\n                let wp_text_input = data_init.init(id, ());\n\n                state.default_seat.get_text_input(wp_text_input);\n            }\n            zwp_text_input_manager_v3::Request::Destroy => (),\n            _ => (),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<zwp_text_input_v3::ZwpTextInputV3, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &zwp_text_input_v3::ZwpTextInputV3,\n        _request: zwp_text_input_v3::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        resource: &zwp_text_input_v3::ZwpTextInputV3,\n        _data: &(),\n    ) {\n        state.default_seat.destroy_text_input(resource);\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/xdg_shell.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse wayland_protocols::xdg::shell::server::{\n    xdg_popup, xdg_positioner, xdg_surface, xdg_toplevel, xdg_wm_base,\n};\nuse wayland_server::Resource as _;\n\nuse crate::session::compositor::{\n    surface::{SurfaceKey, SurfaceRole},\n    Compositor,\n};\n\nimpl wayland_server::GlobalDispatch<xdg_wm_base::XdgWmBase, ()> for Compositor {\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<xdg_wm_base::XdgWmBase>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        data_init.init(resource, ());\n    }\n}\n\nimpl wayland_server::Dispatch<xdg_wm_base::XdgWmBase, ()> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        resource: &xdg_wm_base::XdgWmBase,\n        request: xdg_wm_base::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            xdg_wm_base::Request::CreatePositioner { id } => {\n                // Not used yet.\n                data_init.init(id, ());\n            }\n            xdg_wm_base::Request::GetXdgSurface { id, surface } => {\n                let surface_id = surface\n                    .data::<SurfaceKey>()\n                    .expect(\"surface has no userdata\");\n\n                let surface = state\n                    .surfaces\n                    .get(*surface_id)\n                    .expect(\"surface has no entry\");\n\n                if surface.content.is_some() {\n                    resource.post_error(\n                        xdg_surface::Error::AlreadyConstructed,\n                        \"The surface already has a buffer attached.\",\n                    );\n                }\n\n                data_init.init(id, *surface_id);\n            }\n            xdg_wm_base::Request::Pong { .. } => (),\n            xdg_wm_base::Request::Destroy => (),\n            _ => unreachable!(),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<xdg_surface::XdgSurface, SurfaceKey> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        resource: &xdg_surface::XdgSurface,\n        request: xdg_surface::Request,\n        data: &SurfaceKey,\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            xdg_surface::Request::GetToplevel { id } => {\n                let xdg_toplevel = data_init.init(id, *data);\n\n                if !state.set_surface_role(\n                    *data,\n                    SurfaceRole::XdgToplevel {\n                        xdg_surface: resource.clone(),\n                        xdg_toplevel,\n                    },\n                ) {\n                    resource.post_error(xdg_wm_base::Error::Role, \"Surface already has a role.\");\n                }\n            }\n            xdg_surface::Request::GetPopup { id, .. } => {\n                data_init.init(id, ());\n            }\n            xdg_surface::Request::AckConfigure { serial } => {\n                let surface = state.surfaces.get_mut(*data).expect(\"surface has no entry\");\n\n                match surface.pending_configure {\n                    Some(s) if serial == s => {\n                        surface.pending_configure = None;\n                    }\n                    Some(s) if serial < s => (),\n                    _ => resource.post_error(xdg_surface::Error::InvalidSerial, \"Invalid serial.\"),\n                }\n            }\n            xdg_surface::Request::SetWindowGeometry { .. } => (),\n            xdg_surface::Request::Destroy => (),\n            _ => unreachable!(),\n        }\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        resource: &xdg_surface::XdgSurface,\n        data: &SurfaceKey,\n    ) {\n        // Check that there isn't a surface role created from this object.\n        match state\n            .surfaces\n            .get(*data)\n            .and_then(|s| s.role.current.as_ref())\n        {\n            Some(SurfaceRole::XdgToplevel { xdg_surface, .. }) if xdg_surface == resource => {\n                resource.post_error(\n                    xdg_surface::Error::DefunctRoleObject,\n                    \"The role created from this object must be destroyed first.\",\n                );\n            }\n            _ => (),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<xdg_positioner::XdgPositioner, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &xdg_positioner::XdgPositioner,\n        _request: xdg_positioner::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        // TODO we don't support popups at present.\n    }\n}\n\nimpl wayland_server::Dispatch<xdg_popup::XdgPopup, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        resource: &xdg_popup::XdgPopup,\n        request: xdg_popup::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            xdg_popup::Request::Grab { .. } => {\n                // Immediately dismiss the popup, because we don't support popups.\n                // resource.post_error(xdg_popup::Error::InvalidGrab, \"Popups are not\n                // supported.\");\n                resource.popup_done();\n            }\n            xdg_popup::Request::Reposition { .. } => (),\n            xdg_popup::Request::Destroy => (),\n            _ => unreachable!(),\n        }\n        // TODO we don't support popups at present.\n    }\n}\n\nimpl wayland_server::Dispatch<xdg_toplevel::XdgToplevel, SurfaceKey> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &xdg_toplevel::XdgToplevel,\n        request: xdg_toplevel::Request,\n        data: &SurfaceKey,\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            xdg_toplevel::Request::SetParent { .. } => (),\n            xdg_toplevel::Request::SetTitle { title } => {\n                state\n                    .surfaces\n                    .get_mut(*data)\n                    .expect(\"surface has no entry\")\n                    .title = Some(title);\n            }\n            xdg_toplevel::Request::SetAppId { app_id } => {\n                state\n                    .surfaces\n                    .get_mut(*data)\n                    .expect(\"surface has no entry\")\n                    .app_id = Some(app_id);\n            }\n            xdg_toplevel::Request::ShowWindowMenu { .. } => (),\n            xdg_toplevel::Request::Move { .. } => (),\n            xdg_toplevel::Request::Resize { .. } => (),\n            xdg_toplevel::Request::SetMaxSize { .. } => (),\n            xdg_toplevel::Request::SetMinSize { .. } => (),\n            xdg_toplevel::Request::SetMaximized => (),\n            xdg_toplevel::Request::UnsetMaximized => (),\n            xdg_toplevel::Request::SetFullscreen { .. } => (),\n            xdg_toplevel::Request::UnsetFullscreen => (),\n            xdg_toplevel::Request::SetMinimized => (),\n            xdg_toplevel::Request::Destroy => (),\n            _ => unreachable!(),\n        }\n    }\n\n    fn destroyed(\n        state: &mut Self,\n        _client: wayland_server::backend::ClientId,\n        resource: &xdg_toplevel::XdgToplevel,\n        data: &SurfaceKey,\n    ) {\n        let surface = state.surfaces.get_mut(*data);\n        match surface.as_ref().and_then(|s| s.role.current.as_ref()) {\n            Some(SurfaceRole::XdgToplevel { xdg_toplevel, .. }) if xdg_toplevel == resource => {\n                surface.unwrap().role.current = None;\n                state.unmap_surface(*data);\n            }\n            _ => (),\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch/xwayland_shell.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse tracing::trace;\nuse wayland_protocols::xwayland::shell::v1::server::{xwayland_shell_v1, xwayland_surface_v1};\nuse wayland_server::Resource as _;\n\nuse crate::session::compositor::{\n    surface::{SurfaceKey, SurfaceRole},\n    ClientState, Compositor,\n};\n\nimpl wayland_server::GlobalDispatch<xwayland_shell_v1::XwaylandShellV1, ()> for Compositor {\n    fn bind(\n        _state: &mut Self,\n        _handle: &wayland_server::DisplayHandle,\n        _client: &wayland_server::Client,\n        resource: wayland_server::New<xwayland_shell_v1::XwaylandShellV1>,\n        _global_data: &(),\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        data_init.init(resource, ());\n    }\n\n    fn can_view(client: wayland_server::Client, _global_data: &()) -> bool {\n        client\n            .get_data::<ClientState>()\n            .map(|data| data.xwayland)\n            .unwrap_or_default()\n    }\n}\n\nimpl wayland_server::Dispatch<xwayland_shell_v1::XwaylandShellV1, ()> for Compositor {\n    fn request(\n        _state: &mut Self,\n        _client: &wayland_server::Client,\n        _resource: &xwayland_shell_v1::XwaylandShellV1,\n        request: xwayland_shell_v1::Request,\n        _data: &(),\n        _dhandle: &wayland_server::DisplayHandle,\n        data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            xwayland_shell_v1::Request::GetXwaylandSurface { id, surface } => {\n                let surface_id = surface\n                    .data::<SurfaceKey>()\n                    .expect(\"surface has no userdata\");\n\n                data_init.init(id, *surface_id);\n            }\n            xwayland_shell_v1::Request::Destroy => (),\n            _ => unreachable!(),\n        }\n    }\n}\n\nimpl wayland_server::Dispatch<xwayland_surface_v1::XwaylandSurfaceV1, SurfaceKey> for Compositor {\n    fn request(\n        state: &mut Self,\n        _client: &wayland_server::Client,\n        resource: &xwayland_surface_v1::XwaylandSurfaceV1,\n        request: xwayland_surface_v1::Request,\n        data: &SurfaceKey,\n        _dhandle: &wayland_server::DisplayHandle,\n        _data_init: &mut wayland_server::DataInit<'_, Self>,\n    ) {\n        match request {\n            xwayland_surface_v1::Request::SetSerial {\n                serial_lo,\n                serial_hi,\n            } => {\n                let serial = super::make_u64(serial_hi, serial_lo);\n                trace!(serial, \"associating xwindow with surface\");\n\n                if !state.set_surface_role(*data, SurfaceRole::XWayland { serial }) {\n                    resource.post_error(\n                        xwayland_shell_v1::Error::Role,\n                        \"Surface already has a role.\",\n                    );\n                }\n            }\n            xwayland_surface_v1::Request::Destroy => {}\n            _ => unreachable!(),\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/dispatch.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nmod wl_buffer;\nmod wl_compositor;\nmod wl_data_device_manager;\nmod wl_drm;\nmod wl_output;\nmod wl_seat;\nmod wl_shm;\nmod wp_fractional_scale;\nmod wp_linux_dmabuf;\nmod wp_linux_drm_syncobj;\nmod wp_pointer_constraints;\nmod wp_presentation;\nmod wp_relative_pointer;\nmod wp_text_input;\nmod xdg_shell;\nmod xwayland_shell;\n\nfn make_u64(hi: u32, lo: u32) -> u64 {\n    ((hi as u64) << 32) | lo as u64\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/oneshot_render.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse bytes::Bytes;\nuse drm_fourcc::DrmFourcc;\nuse image::ImageEncoder as _;\nuse tracing::instrument;\n\nuse crate::session::compositor::buffers::{fourcc_bpp, PlaneMetadata};\nuse crate::vulkan::VkHostBuffer;\n\n#[instrument(skip_all)]\npub fn shm_to_png(buffer: &VkHostBuffer, format: PlaneMetadata) -> anyhow::Result<Bytes> {\n    // Needs to be updated if we start supporting float shm buffers.\n    match fourcc_bpp(format.format) {\n        Some(4) => (),\n        _ => panic!(\"shm texture has unexpected format\"),\n    }\n\n    let src = unsafe {\n        std::slice::from_raw_parts_mut(\n            buffer.access as *mut u8,\n            (format.stride * format.height) as usize,\n        )\n    };\n\n    let mut buf = vec![0_u8; (format.stride * format.height) as usize];\n    buf.copy_from_slice(src);\n\n    let width = format.width as usize;\n    let height = format.height as usize;\n    let format = format.format;\n\n    // For png, we need rgba8 with no padding.\n    let mut out = Vec::with_capacity(width * height * 4);\n    match format {\n        DrmFourcc::Argb8888 | DrmFourcc::Xrgb8888 => {\n            for row in buf.chunks_exact(width * 4) {\n                for px in row.chunks_exact(4) {\n                    let out_px = [px[2], px[1], px[0], px[3]];\n                    out.extend_from_slice(&out_px);\n                }\n            }\n        }\n        _ => unreachable!(),\n    }\n\n    let mut png = std::io::Cursor::new(Vec::new());\n    image::codecs::png::PngEncoder::new(&mut png).write_image(\n        &out,\n        width as u32,\n        height as u32,\n        image::ExtendedColorType::Rgba8,\n    )?;\n\n    Ok(png.into_inner().into())\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/output.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse wayland_server::{protocol::wl_output, Resource as _};\n\nuse crate::session::compositor::{Compositor, DisplayParams};\n\nimpl Compositor {\n    pub fn emit_output_params(&mut self) {\n        let params = self.display_params;\n        for proxy in &self.output_proxies {\n            configure_output(proxy, params);\n        }\n    }\n}\n\npub fn configure_output(output: &wl_output::WlOutput, params: DisplayParams) {\n    let version = output.version();\n    if version >= 4 {\n        output.name(\"MM\".to_string());\n        output.description(\"Magic Mirror Virtual Display\".to_string());\n    }\n\n    output.geometry(\n        0,\n        0,\n        params.width as i32,\n        params.height as i32,\n        wl_output::Subpixel::None,\n        \"Magic Mirror\".to_string(),\n        \"Virtual Display\".to_string(),\n        wl_output::Transform::Normal,\n    );\n\n    output.mode(\n        wl_output::Mode::Current | wl_output::Mode::Preferred,\n        params.width as i32,\n        params.height as i32,\n        params.framerate as i32 * 1000,\n    );\n\n    if version >= 2 {\n        // In the case of fractional scale, we always send the next integer\n        // (and then scale down for clients that don't support fractional scale).\n        let scale: f64 = params.ui_scale.into();\n        output.scale(scale.ceil() as i32);\n\n        output.done();\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/protocols/wayland-drm.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<protocol name=\"drm\">\n\n  <copyright>\n    Copyright © 2008-2011 Kristian Høgsberg\n    Copyright © 2010-2011 Intel Corporation\n\n    Permission to use, copy, modify, distribute, and sell this\n    software and its documentation for any purpose is hereby granted\n    without fee, provided that\\n the above copyright notice appear in\n    all copies and that both that copyright notice and this permission\n    notice appear in supporting documentation, and that the name of\n    the copyright holders not be used in advertising or publicity\n    pertaining to distribution of the software without specific,\n    written prior permission.  The copyright holders make no\n    representations about the suitability of this software for any\n    purpose.  It is provided \"as is\" without express or implied\n    warranty.\n\n    THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS\n    SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND\n    FITNESS, IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY\n    SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES\n    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN\n    AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,\n    ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF\n    THIS SOFTWARE.\n  </copyright>\n\n  <!-- drm support. This object is created by the server and published\n       using the display's global event. -->\n  <interface name=\"wl_drm\" version=\"2\">\n    <enum name=\"error\">\n      <entry name=\"authenticate_fail\" value=\"0\"/>\n      <entry name=\"invalid_format\" value=\"1\"/>\n      <entry name=\"invalid_name\" value=\"2\"/>\n    </enum>\n\n    <enum name=\"format\">\n      <!-- The drm format codes match the #defines in drm_fourcc.h.\n           The formats actually supported by the compositor will be\n           reported by the format event. New codes must not be added,\n           unless directly taken from drm_fourcc.h. -->\n      <entry name=\"c8\" value=\"0x20203843\"/>\n      <entry name=\"rgb332\" value=\"0x38424752\"/>\n      <entry name=\"bgr233\" value=\"0x38524742\"/>\n      <entry name=\"xrgb4444\" value=\"0x32315258\"/>\n      <entry name=\"xbgr4444\" value=\"0x32314258\"/>\n      <entry name=\"rgbx4444\" value=\"0x32315852\"/>\n      <entry name=\"bgrx4444\" value=\"0x32315842\"/>\n      <entry name=\"argb4444\" value=\"0x32315241\"/>\n      <entry name=\"abgr4444\" value=\"0x32314241\"/>\n      <entry name=\"rgba4444\" value=\"0x32314152\"/>\n      <entry name=\"bgra4444\" value=\"0x32314142\"/>\n      <entry name=\"xrgb1555\" value=\"0x35315258\"/>\n      <entry name=\"xbgr1555\" value=\"0x35314258\"/>\n      <entry name=\"rgbx5551\" value=\"0x35315852\"/>\n      <entry name=\"bgrx5551\" value=\"0x35315842\"/>\n      <entry name=\"argb1555\" value=\"0x35315241\"/>\n      <entry name=\"abgr1555\" value=\"0x35314241\"/>\n      <entry name=\"rgba5551\" value=\"0x35314152\"/>\n      <entry name=\"bgra5551\" value=\"0x35314142\"/>\n      <entry name=\"rgb565\" value=\"0x36314752\"/>\n      <entry name=\"bgr565\" value=\"0x36314742\"/>\n      <entry name=\"rgb888\" value=\"0x34324752\"/>\n      <entry name=\"bgr888\" value=\"0x34324742\"/>\n      <entry name=\"xrgb8888\" value=\"0x34325258\"/>\n      <entry name=\"xbgr8888\" value=\"0x34324258\"/>\n      <entry name=\"rgbx8888\" value=\"0x34325852\"/>\n      <entry name=\"bgrx8888\" value=\"0x34325842\"/>\n      <entry name=\"argb8888\" value=\"0x34325241\"/>\n      <entry name=\"abgr8888\" value=\"0x34324241\"/>\n      <entry name=\"rgba8888\" value=\"0x34324152\"/>\n      <entry name=\"bgra8888\" value=\"0x34324142\"/>\n      <entry name=\"xrgb2101010\" value=\"0x30335258\"/>\n      <entry name=\"xbgr2101010\" value=\"0x30334258\"/>\n      <entry name=\"rgbx1010102\" value=\"0x30335852\"/>\n      <entry name=\"bgrx1010102\" value=\"0x30335842\"/>\n      <entry name=\"argb2101010\" value=\"0x30335241\"/>\n      <entry name=\"abgr2101010\" value=\"0x30334241\"/>\n      <entry name=\"rgba1010102\" value=\"0x30334152\"/>\n      <entry name=\"bgra1010102\" value=\"0x30334142\"/>\n      <entry name=\"yuyv\" value=\"0x56595559\"/>\n      <entry name=\"yvyu\" value=\"0x55595659\"/>\n      <entry name=\"uyvy\" value=\"0x59565955\"/>\n      <entry name=\"vyuy\" value=\"0x59555956\"/>\n      <entry name=\"ayuv\" value=\"0x56555941\"/>\n      <entry name=\"xyuv8888\" value=\"0x56555958\"/>\n      <entry name=\"nv12\" value=\"0x3231564e\"/>\n      <entry name=\"nv21\" value=\"0x3132564e\"/>\n      <entry name=\"nv16\" value=\"0x3631564e\"/>\n      <entry name=\"nv61\" value=\"0x3136564e\"/>\n      <entry name=\"yuv410\" value=\"0x39565559\"/>\n      <entry name=\"yvu410\" value=\"0x39555659\"/>\n      <entry name=\"yuv411\" value=\"0x31315559\"/>\n      <entry name=\"yvu411\" value=\"0x31315659\"/>\n      <entry name=\"yuv420\" value=\"0x32315559\"/>\n      <entry name=\"yvu420\" value=\"0x32315659\"/>\n      <entry name=\"yuv422\" value=\"0x36315559\"/>\n      <entry name=\"yvu422\" value=\"0x36315659\"/>\n      <entry name=\"yuv444\" value=\"0x34325559\"/>\n      <entry name=\"yvu444\" value=\"0x34325659\"/>\n      <entry name=\"abgr16f\" value=\"0x48344241\"/>\n      <entry name=\"xbgr16f\" value=\"0x48344258\"/>\n    </enum>\n\n    <!-- Call this request with the magic received from drmGetMagic().\n         It will be passed on to the drmAuthMagic() or\n         DRIAuthConnection() call.  This authentication must be\n         completed before create_buffer could be used. -->\n    <request name=\"authenticate\">\n      <arg name=\"id\" type=\"uint\"/>\n    </request>\n\n    <!-- Create a wayland buffer for the named DRM buffer.  The DRM\n         surface must have a name using the flink ioctl -->\n    <request name=\"create_buffer\">\n      <arg name=\"id\" type=\"new_id\" interface=\"wl_buffer\"/>\n      <arg name=\"name\" type=\"uint\"/>\n      <arg name=\"width\" type=\"int\"/>\n      <arg name=\"height\" type=\"int\"/>\n      <arg name=\"stride\" type=\"uint\"/>\n      <arg name=\"format\" type=\"uint\"/>\n    </request>\n\n    <!-- Create a wayland buffer for the named DRM buffer.  The DRM\n         surface must have a name using the flink ioctl -->\n    <request name=\"create_planar_buffer\">\n      <arg name=\"id\" type=\"new_id\" interface=\"wl_buffer\"/>\n      <arg name=\"name\" type=\"uint\"/>\n      <arg name=\"width\" type=\"int\"/>\n      <arg name=\"height\" type=\"int\"/>\n      <arg name=\"format\" type=\"uint\"/>\n      <arg name=\"offset0\" type=\"int\"/>\n      <arg name=\"stride0\" type=\"int\"/>\n      <arg name=\"offset1\" type=\"int\"/>\n      <arg name=\"stride1\" type=\"int\"/>\n      <arg name=\"offset2\" type=\"int\"/>\n      <arg name=\"stride2\" type=\"int\"/>\n    </request>\n\n    <!-- Notification of the path of the drm device which is used by\n         the server.  The client should use this device for creating\n         local buffers.  Only buffers created from this device should\n         be be passed to the server using this drm object's\n         create_buffer request. -->\n    <event name=\"device\">\n      <arg name=\"name\" type=\"string\"/>\n    </event>\n\n    <event name=\"format\">\n      <arg name=\"format\" type=\"uint\"/>\n    </event>\n\n    <!-- Raised if the authenticate request succeeded -->\n    <event name=\"authenticated\"/>\n\n    <enum name=\"capability\" since=\"2\">\n      <description summary=\"wl_drm capability bitmask\">\n        Bitmask of capabilities.\n      </description>\n      <entry name=\"prime\" value=\"1\" summary=\"wl_drm prime available\"/>\n    </enum>\n\n    <event name=\"capabilities\">\n      <arg name=\"value\" type=\"uint\"/>\n    </event>\n\n    <!-- Version 2 additions -->\n\n    <!-- Create a wayland buffer for the prime fd.  Use for regular and planar\n         buffers.  Pass 0 for offset and stride for unused planes. -->\n    <request name=\"create_prime_buffer\" since=\"2\">\n      <arg name=\"id\" type=\"new_id\" interface=\"wl_buffer\"/>\n      <arg name=\"name\" type=\"fd\"/>\n      <arg name=\"width\" type=\"int\"/>\n      <arg name=\"height\" type=\"int\"/>\n      <arg name=\"format\" type=\"uint\"/>\n      <arg name=\"offset0\" type=\"int\"/>\n      <arg name=\"stride0\" type=\"int\"/>\n      <arg name=\"offset1\" type=\"int\"/>\n      <arg name=\"stride1\" type=\"int\"/>\n      <arg name=\"offset2\" type=\"int\"/>\n      <arg name=\"stride2\" type=\"int\"/>\n    </request>\n\n  </interface>\n\n</protocol>\n"
  },
  {
    "path": "mm-server/src/session/compositor/protocols/wl_drm.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\n#![allow(non_upper_case_globals)]\n#![allow(non_camel_case_types)]\n\nuse wayland_server;\nuse wayland_server::protocol::*;\n\npub mod __interfaces {\n    use wayland_server::backend as wayland_backend;\n    use wayland_server::protocol::__interfaces::*;\n    wayland_scanner::generate_interfaces!(\"src/session/compositor/protocols/wayland-drm.xml\");\n}\n\nuse self::__interfaces::*;\nwayland_scanner::generate_server_code!(\"src/session/compositor/protocols/wayland-drm.xml\");\n\npub use wl_drm::*;\n"
  },
  {
    "path": "mm-server/src/session/compositor/protocols.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\npub mod wl_drm;\n"
  },
  {
    "path": "mm-server/src/session/compositor/sealed.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    ffi::CStr,\n    fs::File,\n    io::{Seek as _, SeekFrom, Write as _},\n    os::fd::{AsFd, AsRawFd, BorrowedFd},\n};\n\nuse rustix::fs::{fcntl_add_seals, memfd_create, MemfdFlags, SealFlags};\n\npub struct SealedFile {\n    file: File,\n    size: usize,\n}\n\nimpl SealedFile {\n    pub fn new(name: impl AsRef<CStr>, contents: &[u8]) -> anyhow::Result<Self> {\n        let fd = memfd_create(\n            name.as_ref(),\n            MemfdFlags::CLOEXEC | MemfdFlags::ALLOW_SEALING,\n        )?;\n\n        let mut file: File = fd.into();\n        file.write_all(contents)?;\n        file.flush()?;\n        file.seek(SeekFrom::Start(0))?;\n\n        fcntl_add_seals(\n            &file,\n            SealFlags::SEAL | SealFlags::WRITE | SealFlags::SHRINK | SealFlags::GROW,\n        )?;\n\n        Ok(Self {\n            file,\n            size: contents.len(),\n        })\n    }\n\n    pub fn size(&self) -> usize {\n        self.size\n    }\n}\n\nimpl AsRawFd for SealedFile {\n    fn as_raw_fd(&self) -> std::os::unix::prelude::RawFd {\n        self.file.as_raw_fd()\n    }\n}\n\nimpl AsFd for SealedFile {\n    fn as_fd(&self) -> BorrowedFd<'_> {\n        self.file.as_fd()\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/seat.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse bytes::Bytes;\nuse cstr::cstr;\nuse hashbrown::{HashMap, HashSet};\nuse tracing::{debug, warn};\nuse wayland_protocols::wp::{\n    pointer_constraints::zv1::server::zwp_locked_pointer_v1,\n    relative_pointer::zv1::server::zwp_relative_pointer_v1,\n    text_input::zv3::server::zwp_text_input_v3,\n};\nuse wayland_server::{\n    protocol::{wl_keyboard, wl_pointer, wl_surface},\n    Resource as _,\n};\n\nuse super::{surface, ControlMessage, SessionEvent};\nuse crate::session::compositor::{\n    buffers::BufferBacking,\n    oneshot_render::shm_to_png,\n    sealed::SealedFile,\n    serial::Serial,\n    surface::{surface_vector_to_buffer, SurfaceKey, SurfaceRole},\n    Compositor,\n};\nuse crate::session::EPOCH;\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\npub enum KeyState {\n    Pressed,\n    Released,\n    Repeat,\n}\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\npub enum ButtonState {\n    Pressed,\n    Released,\n}\n\nimpl From<ButtonState> for wl_pointer::ButtonState {\n    fn from(value: ButtonState) -> Self {\n        match value {\n            ButtonState::Pressed => wl_pointer::ButtonState::Pressed,\n            ButtonState::Released => wl_pointer::ButtonState::Released,\n        }\n    }\n}\n\n#[derive(Debug)]\nstruct Pointer {\n    client_id: wayland_server::backend::ClientId,\n    pending_frame: bool,\n}\n\n#[derive(Debug)]\nstruct PointerLock {\n    wl_pointer: wl_pointer::WlPointer,\n    wp_locked_pointer: zwp_locked_pointer_v1::ZwpLockedPointerV1,\n    oneshot: bool,\n    defunct: bool,\n}\n\n#[derive(Default, Debug, PartialEq, Eq)]\npub enum Cursor {\n    #[default]\n    Unset,\n    Hidden,\n    Surface {\n        surface: SurfaceKey,\n        needs_render: bool,\n        // Contains the hotspot in physical coords.\n        rendered: Option<(Bytes, glam::UVec2)>,\n        // In surface coords, supplied by the client.\n        hotspot: glam::UVec2,\n    },\n}\n\npub struct Seat {\n    pointers: HashMap<wl_pointer::WlPointer, Pointer>,\n    relative_pointers:\n        HashMap<zwp_relative_pointer_v1::ZwpRelativePointerV1, wl_pointer::WlPointer>,\n    pointer_focus: Option<(wl_surface::WlSurface, glam::DVec2)>,\n    pointer_coords: Option<glam::DVec2>, // Global coords.\n\n    keyboards: HashSet<wl_keyboard::WlKeyboard>,\n    text_inputs: HashSet<zwp_text_input_v3::ZwpTextInputV3>,\n    keyboard_focus: Option<wl_surface::WlSurface>,\n    keymap: SealedFile,\n\n    inactive_pointer_locks: HashMap<wl_surface::WlSurface, PointerLock>,\n    pointer_lock: Option<(wl_surface::WlSurface, PointerLock)>,\n\n    cursor: Cursor,\n}\n\nimpl Default for Seat {\n    fn default() -> Self {\n        let keymap = SealedFile::new(\n            cstr!(\"mm-keymap\"),\n            include_bytes!(concat!(env!(\"OUT_DIR\"), \"/keymaps/iso_us.txt\")),\n        )\n        .expect(\"failed to create keymap sealed fd\");\n\n        Self {\n            pointers: HashMap::default(),\n            relative_pointers: HashMap::default(),\n            pointer_focus: None,\n            pointer_coords: None,\n\n            keyboards: HashSet::default(),\n            text_inputs: HashSet::default(),\n            keyboard_focus: None,\n            keymap,\n\n            inactive_pointer_locks: HashMap::default(),\n            pointer_lock: None,\n\n            cursor: Cursor::default(),\n        }\n    }\n}\n\nimpl Seat {\n    pub fn get_pointer(&mut self, wl_pointer: wl_pointer::WlPointer) {\n        let client_id = wl_pointer.client().expect(\"pointer has no client\").id();\n\n        self.pointers.insert(\n            wl_pointer,\n            Pointer {\n                client_id,\n                pending_frame: false,\n            },\n        );\n    }\n\n    pub fn get_relative_pointer(\n        &mut self,\n        wp_relative_pointer: zwp_relative_pointer_v1::ZwpRelativePointerV1,\n        wl_pointer: wl_pointer::WlPointer,\n    ) {\n        self.relative_pointers\n            .insert(wp_relative_pointer, wl_pointer);\n    }\n\n    pub fn get_keyboard(&mut self, wl_keyboard: wl_keyboard::WlKeyboard) {\n        use std::os::fd::AsFd as _;\n        wl_keyboard.keymap(\n            wl_keyboard::KeymapFormat::XkbV1,\n            self.keymap.as_fd(),\n            self.keymap.size() as u32,\n        );\n\n        // We disable client-side key repeat handling, and instead\n        // simulate it.\n        if wl_keyboard.version() >= 4 {\n            wl_keyboard.repeat_info(0, i32::MAX);\n        }\n\n        self.keyboards.insert(wl_keyboard);\n    }\n\n    pub fn get_text_input(&mut self, wp_text_input: zwp_text_input_v3::ZwpTextInputV3) {\n        self.text_inputs.insert(wp_text_input);\n    }\n\n    pub fn destroy_pointer(&mut self, wl_pointer: &wl_pointer::WlPointer) {\n        self.pointers.remove(wl_pointer);\n        self.inactive_pointer_locks\n            .retain(|_, lock| &lock.wl_pointer != wl_pointer);\n\n        match &mut self.pointer_lock {\n            Some((\n                _,\n                PointerLock {\n                    wl_pointer: p,\n                    defunct,\n                    ..\n                },\n            )) if p == wl_pointer => {\n                *defunct = true;\n            }\n            _ => (),\n        }\n    }\n\n    pub fn destroy_relative_pointer(\n        &mut self,\n        wp_relative_pointer: &zwp_relative_pointer_v1::ZwpRelativePointerV1,\n    ) {\n        self.relative_pointers.remove(wp_relative_pointer);\n    }\n\n    pub fn destroy_keyboard(&mut self, wl_keyboard: &wl_keyboard::WlKeyboard) {\n        self.keyboards.remove(wl_keyboard);\n    }\n\n    pub fn destroy_text_input(&mut self, wp_text_input: &zwp_text_input_v3::ZwpTextInputV3) {\n        self.text_inputs.remove(wp_text_input);\n    }\n\n    pub fn lift_pointer(&mut self, serial: &Serial) {\n        self.pointer_coords = None;\n\n        if let Some((surf, _)) = self.pointer_focus.take() {\n            if let Some(client) = surf.client() {\n                for (wl_pointer, p) in self\n                    .pointers\n                    .iter_mut()\n                    .filter(|(_, p)| p.client_id == client.id())\n                {\n                    p.pending_frame = true;\n                    wl_pointer.leave(serial.next(), &surf);\n                }\n            }\n        }\n    }\n\n    // Moves the pointer to a location.\n    pub fn update_pointer(\n        &mut self,\n        serial: &Serial,\n        focus: wl_surface::WlSurface,\n        surface_coords: impl Into<glam::DVec2>,\n        global_coords: impl Into<glam::DVec2>,\n    ) {\n        if self.pointer_lock.is_some() {\n            return;\n        }\n\n        self.pointer_coords = Some(global_coords.into());\n        let new_coords = surface_coords.into();\n        match self.pointer_focus.as_mut() {\n            Some((surf, coords)) if surf == &focus => {\n                // Round before checking for location equality.\n                if coords.round().as_ivec2() != new_coords.round().as_ivec2() {\n                    for (wl_pointer, p) in self\n                        .pointers\n                        .iter_mut()\n                        .filter(|(p, _)| p.is_alive() && p.id().same_client_as(&surf.id()))\n                    {\n                        p.pending_frame = true;\n                        wl_pointer.motion(\n                            EPOCH.elapsed().as_millis() as u32,\n                            new_coords.x,\n                            new_coords.y,\n                        );\n                    }\n                }\n\n                return;\n            }\n            _ => (),\n        }\n\n        if let Some((surf, _)) = self.pointer_focus.take() {\n            for (wl_pointer, p) in self\n                .pointers\n                .iter_mut()\n                .filter(|(p, _)| p.is_alive() && p.id().same_client_as(&surf.id()))\n            {\n                p.pending_frame = true;\n                wl_pointer.leave(serial.next(), &surf);\n            }\n        }\n\n        for (wl_pointer, p) in self\n            .pointers\n            .iter_mut()\n            .filter(|(p, _)| p.is_alive() && p.id().same_client_as(&focus.id()))\n        {\n            p.pending_frame = true;\n            wl_pointer.enter(serial.next(), &focus, new_coords.x, new_coords.y);\n        }\n\n        self.pointer_focus = Some((focus, new_coords));\n    }\n\n    pub fn relative_pointer_motion(&mut self, surface_vector: impl Into<glam::DVec2>) {\n        if self.pointer_lock.is_none() {\n            return;\n        }\n\n        let Some((focus, _)) = self.pointer_focus.as_ref() else {\n            return;\n        };\n\n        let vector = surface_vector.into();\n\n        let now = EPOCH.elapsed().as_micros() as u64;\n        let utime_hi = (now >> 32) as u32;\n        let utime_lo = (now & 0xffffffff) as u32;\n\n        for (wp_relative_pointer, wl_pointer) in self\n            .relative_pointers\n            .iter()\n            .filter(|(p, _)| p.id().same_client_as(&focus.id()))\n        {\n            wp_relative_pointer\n                .relative_motion(utime_hi, utime_lo, vector.x, vector.y, vector.x, vector.y);\n\n            if let Some(p) = self.pointers.get_mut(wl_pointer) {\n                p.pending_frame = true;\n            }\n        }\n    }\n\n    pub fn pointer_axis(&mut self, surface_vector: impl Into<glam::DVec2>) {\n        let vector = surface_vector.into();\n        let now = EPOCH.elapsed().as_millis() as u32;\n        for (wl_pointer, p) in self.focused_pointers() {\n            if vector.x != 0.0 {\n                wl_pointer.axis(now, wl_pointer::Axis::HorizontalScroll, vector.x);\n                p.pending_frame = true;\n            }\n\n            if vector.y != 0.0 {\n                wl_pointer.axis(now, wl_pointer::Axis::VerticalScroll, vector.y);\n                p.pending_frame = true;\n            }\n        }\n    }\n\n    pub fn pointer_axis_discrete(&mut self, vector: impl Into<glam::DVec2>) {\n        let vector = vector.into();\n        for (wl_pointer, p) in self.focused_pointers() {\n            if vector.x != 0.0 {\n                send_axis_discrete(wl_pointer, wl_pointer::Axis::HorizontalScroll, vector.x);\n                p.pending_frame = true;\n            }\n\n            if vector.y != 0.0 {\n                send_axis_discrete(wl_pointer, wl_pointer::Axis::VerticalScroll, vector.y);\n                p.pending_frame = true;\n            }\n        }\n    }\n\n    pub fn pointer_input(\n        &mut self,\n        serial: &Serial,\n        surface: wl_surface::WlSurface,\n        surface_coords: impl Into<glam::DVec2>,\n        global_coords: impl Into<glam::DVec2>,\n        button_code: u32,\n        state: ButtonState,\n    ) {\n        let coords = surface_coords.into();\n        self.update_pointer(serial, surface.clone(), coords, global_coords);\n\n        for (wl_pointer, p) in self.focused_pointers() {\n            p.pending_frame = true;\n            wl_pointer.button(\n                serial.next(),\n                EPOCH.elapsed().as_millis() as u32,\n                button_code,\n                state.into(),\n            );\n        }\n    }\n\n    pub fn pointer_frame(&mut self) {\n        for (wl_pointer, p) in self.pointers.iter_mut() {\n            if p.pending_frame {\n                if wl_pointer.version() >= 5 {\n                    wl_pointer.frame();\n                }\n\n                p.pending_frame = false;\n            }\n        }\n    }\n\n    fn focused_pointers(&mut self) -> impl Iterator<Item = (&wl_pointer::WlPointer, &mut Pointer)> {\n        let client_id = self\n            .pointer_focus\n            .as_ref()\n            .and_then(|(focus, _)| focus.client())\n            .map(|c| c.id());\n\n        self.pointers\n            .iter_mut()\n            .filter(move |(p, _)| p.is_alive() && p.client().map(|c| c.id()) == client_id)\n    }\n\n    pub fn set_keyboard_focus(&mut self, serial: &Serial, surface: Option<wl_surface::WlSurface>) {\n        if self.keyboard_focus == surface {\n            return;\n        }\n\n        if let Some(old_surf) = self.keyboard_focus.take() {\n            for wl_keyboard in self\n                .keyboards\n                .iter()\n                .filter(|k| k.id().same_client_as(&old_surf.id()))\n            {\n                wl_keyboard.leave(serial.next(), &old_surf);\n            }\n\n            for wp_text_input in self\n                .text_inputs\n                .iter()\n                .filter(|ti| ti.id().same_client_as(&old_surf.id()))\n            {\n                wp_text_input.leave(&old_surf);\n            }\n        }\n\n        if let Some(new_surf) = surface.as_ref() {\n            for wl_keyboard in self\n                .keyboards\n                .iter()\n                .filter(|k| k.id().same_client_as(&new_surf.id()))\n            {\n                wl_keyboard.enter(serial.next(), new_surf, Vec::new());\n                // TODO we're responsible for sending the list of depressed\n                // modifiers. For our use case, this isn't very important.\n                wl_keyboard.modifiers(serial.next(), 0, 0, 0, 0);\n            }\n\n            for wp_text_input in self\n                .text_inputs\n                .iter()\n                .filter(|ti| ti.id().same_client_as(&new_surf.id()))\n            {\n                wp_text_input.enter(new_surf);\n            }\n        }\n\n        self.keyboard_focus = surface;\n    }\n\n    pub fn keyboard_input(&mut self, serial: &Serial, scancode: u32, state: KeyState) {\n        let state = match state {\n            KeyState::Pressed => wl_keyboard::KeyState::Pressed,\n            KeyState::Released => wl_keyboard::KeyState::Released,\n            KeyState::Repeat => unreachable!(),\n        };\n\n        for wl_keyboard in self.focused_keyboards() {\n            wl_keyboard.key(\n                serial.next(),\n                EPOCH.elapsed().as_millis() as u32,\n                scancode,\n                state,\n            );\n        }\n    }\n\n    pub fn focused_keyboards(&self) -> impl Iterator<Item = &wl_keyboard::WlKeyboard> {\n        let client_id = self\n            .keyboard_focus\n            .as_ref()\n            .and_then(|focus| focus.client())\n            .map(|c| c.id());\n\n        self.keyboards\n            .iter()\n            .filter(move |k| k.is_alive() && k.client().map(|c| c.id()) == client_id)\n    }\n\n    pub fn has_text_input(&mut self) -> bool {\n        self.focused_text_inputs().count() > 0\n    }\n\n    pub fn text_input_char(&mut self, serial: &Serial, ch: char) {\n        if let Some(focus) = self.keyboard_focus.as_ref() {\n            for wp_text_input in self\n                .text_inputs\n                .iter()\n                .filter(|ti| ti.id().same_client_as(&focus.id()))\n            {\n                wp_text_input.commit_string(Some(ch.into()));\n                wp_text_input.done(serial.next())\n            }\n        }\n    }\n\n    fn focused_text_inputs(&mut self) -> impl Iterator<Item = &zwp_text_input_v3::ZwpTextInputV3> {\n        let client_id = self\n            .keyboard_focus\n            .as_ref()\n            .and_then(|focus| focus.client())\n            .map(|c| c.id());\n\n        self.text_inputs\n            .iter()\n            .filter(move |ti| ti.is_alive() && ti.client().map(|c| c.id()) == client_id)\n    }\n\n    pub fn pointer_focus(&self) -> Option<wl_surface::WlSurface> {\n        self.pointer_focus.as_ref().map(|(surf, _)| surf).cloned()\n    }\n\n    #[allow(dead_code)]\n    pub fn keyboard_focus(&self) -> Option<wl_surface::WlSurface> {\n        self.keyboard_focus.clone()\n    }\n\n    pub fn pointer_coords(&self) -> Option<glam::DVec2> {\n        self.pointer_coords\n    }\n\n    pub fn pointer_locked(&self) -> Option<glam::DVec2> {\n        if self.pointer_lock.is_some() {\n            Some(self.pointer_coords.unwrap_or_default())\n        } else {\n            None\n        }\n    }\n\n    pub fn has_lock(&self, wl_surface: &wl_surface::WlSurface) -> bool {\n        if self\n            .pointer_lock\n            .as_ref()\n            .is_some_and(|(surf, lock)| surf == wl_surface && !lock.defunct)\n        {\n            return true;\n        }\n\n        // Check for inactive locks that aren't already destroyed.\n        self.inactive_pointer_locks\n            .get(wl_surface)\n            .is_some_and(|lock| !lock.defunct)\n    }\n\n    pub fn create_lock(\n        &mut self,\n        wl_pointer: wl_pointer::WlPointer,\n        wl_surface: wl_surface::WlSurface,\n        wp_locked_pointer: zwp_locked_pointer_v1::ZwpLockedPointerV1,\n        oneshot: bool,\n    ) {\n        if self\n            .inactive_pointer_locks\n            .insert(\n                wl_surface,\n                PointerLock {\n                    wp_locked_pointer,\n                    wl_pointer,\n                    oneshot,\n                    defunct: false,\n                },\n            )\n            .is_some()\n        {\n            panic!(\"constraint already exists for surface\");\n        }\n    }\n\n    pub fn destroy_lock(&mut self, wp_locked_pointer: &zwp_locked_pointer_v1::ZwpLockedPointerV1) {\n        self.inactive_pointer_locks\n            .retain(|_, lock| &lock.wp_locked_pointer != wp_locked_pointer);\n\n        match &mut self.pointer_lock {\n            Some((\n                _,\n                PointerLock {\n                    wp_locked_pointer: lock,\n                    defunct,\n                    ..\n                },\n            )) if lock == wp_locked_pointer => {\n                // Cleared in update_pointer_lock.\n                *defunct = true;\n            }\n            _ => (),\n        }\n    }\n}\n\nimpl Compositor {\n    pub fn handle_input_event(&mut self, ev: ControlMessage) {\n        match ev {\n            ControlMessage::KeyboardInput {\n                key_code,\n                char,\n                state,\n            } => {\n                // Attempt to send the char via text-input, then fall back to\n                // sending the keypress.\n                match char {\n                    Some(c) if self.default_seat.has_text_input() => {\n                        if matches!(state, KeyState::Pressed | KeyState::Repeat) {\n                            self.default_seat.text_input_char(&self.serial, c);\n                        }\n                    }\n                    _ => {\n                        let mut state = state;\n\n                        // Simulate a press and release on repeat.\n                        if state == KeyState::Repeat {\n                            self.default_seat.keyboard_input(\n                                &self.serial,\n                                key_code,\n                                KeyState::Released,\n                            );\n\n                            state = KeyState::Pressed\n                        }\n\n                        self.default_seat\n                            .keyboard_input(&self.serial, key_code, state);\n                    }\n                }\n            }\n            ControlMessage::PointerInput {\n                x,\n                y,\n                button_code,\n                state,\n            } => {\n                if let Some((id, surface_coords)) = self.surface_under((x, y)) {\n                    let wl_surface = self.surfaces[id].wl_surface.clone();\n\n                    self.default_seat.pointer_input(\n                        &self.serial,\n                        wl_surface,\n                        surface_coords,\n                        (x, y),\n                        button_code,\n                        state,\n                    );\n                } else {\n                    self.default_seat.lift_pointer(&self.serial);\n                }\n            }\n\n            ControlMessage::PointerMotion(x, y) => {\n                if let Some((id, surface_coords)) = self.surface_under((x, y)) {\n                    let wl_surface = self.surfaces[id].wl_surface.clone();\n\n                    self.default_seat.update_pointer(\n                        &self.serial,\n                        wl_surface,\n                        surface_coords,\n                        (x, y),\n                    );\n                } else {\n                    self.default_seat.lift_pointer(&self.serial);\n                }\n            }\n            ControlMessage::RelativePointerMotion(x, y) => {\n                let scale = self\n                    .default_seat\n                    .pointer_focus()\n                    .and_then(|wl_surface| wl_surface.data().copied())\n                    .and_then(|id| self.surfaces.get(id))\n                    .map(|surf| surf.effective_scale())\n                    .unwrap_or_default();\n\n                let vector = surface::buffer_vector_to_surface((x, y), scale);\n                self.default_seat.relative_pointer_motion(vector);\n            }\n            ControlMessage::PointerAxis(x, y) => {\n                let scale = self\n                    .default_seat\n                    .pointer_focus()\n                    .and_then(|wl_surface| wl_surface.data().copied())\n                    .and_then(|id| self.surfaces.get(id))\n                    .map(|surf| surf.effective_scale())\n                    .unwrap_or_default();\n\n                // Note that the protocol and wayland use inverted vectors.\n                let vector = surface::buffer_vector_to_surface((-x, -y), scale);\n                self.default_seat.pointer_axis(vector);\n            }\n            ControlMessage::PointerAxisDiscrete(x, y) => {\n                self.default_seat.pointer_axis_discrete((-x, -y));\n            }\n            ControlMessage::PointerEntered => {\n                // Nothing to do - we update focus when the pointer moves.\n            }\n            ControlMessage::PointerLeft => {\n                self.default_seat.lift_pointer(&self.serial);\n            }\n            _ => unreachable!(),\n        }\n    }\n\n    pub fn update_pointer_lock(&mut self) {\n        let seat = &mut self.default_seat;\n        let focus = seat.pointer_focus();\n\n        if let Some((wl_surface, lock)) = &seat.pointer_lock {\n            if !lock.defunct\n                && lock.wp_locked_pointer.is_alive()\n                && Some(wl_surface) == focus.as_ref()\n            {\n                // Same surface, active lock, nothing to do.\n                return;\n            }\n        }\n\n        let prev_lock = if let Some((surf, lock)) = seat.pointer_lock.take() {\n            lock.wp_locked_pointer.unlocked();\n\n            let lock_clone = lock.wp_locked_pointer.clone();\n            if !lock.defunct && !lock.oneshot && lock.wp_locked_pointer.is_alive() {\n                seat.inactive_pointer_locks.insert(surf, lock);\n            }\n\n            Some(lock_clone)\n        } else {\n            None\n        };\n\n        if let Some((wl_surface, lock)) = focus\n            .as_ref()\n            .and_then(|s| seat.inactive_pointer_locks.remove_entry(s))\n        {\n            lock.wp_locked_pointer.locked();\n            seat.pointer_lock = Some((wl_surface, lock));\n            let (x, y) = seat.pointer_coords().unwrap_or_default().into();\n\n            debug!(surface = ?focus, x, y, \"activating pointer lock\");\n            self.session_handle\n                .dispatch(SessionEvent::PointerLocked(x, y));\n        } else if let Some(wp_locked_pointer) = prev_lock {\n            wp_locked_pointer.unlocked();\n\n            debug!(\"pointer lock released\");\n            self.session_handle.dispatch(SessionEvent::PointerReleased);\n        }\n    }\n\n    pub fn set_cursor(&mut self, wl_pointer: &wl_pointer::WlPointer, cursor: Cursor) {\n        if !self\n            .default_seat\n            .pointer_focus\n            .as_ref()\n            .is_some_and(|(wl_surface, _)| wl_surface.id().same_client_as(&wl_pointer.id()))\n        {\n            return;\n        }\n\n        match cursor {\n            Cursor::Unset => unreachable!(),\n            Cursor::Surface { surface: id, .. } => {\n                let Some(surface) = self.surfaces.get_mut(id) else {\n                    return;\n                };\n\n                if surface.role.current.is_some()\n                    && surface.role.current != Some(SurfaceRole::Cursor)\n                {\n                    debug!(\n                        ?surface,\n                        \"ignoring cursor role for surface with preexisting role\"\n                    );\n\n                    return;\n                }\n\n                surface.role.current = Some(SurfaceRole::Cursor);\n            }\n            _ => (),\n        }\n\n        let old_cursor = std::mem::replace(&mut self.default_seat.cursor, cursor);\n        if let Cursor::Surface { surface: id, .. } = old_cursor {\n            if let Some(surface) = self.surfaces.get_mut(id) {\n                surface.role.current = None;\n                self.unmap_surface(id);\n            }\n        }\n\n        self.dispatch_cursor();\n    }\n\n    pub fn dispatch_cursor(&mut self) {\n        match &mut self.default_seat.cursor {\n            Cursor::Unset => (),\n            Cursor::Surface {\n                needs_render,\n                rendered: Some((img, hotspot)),\n                ..\n            } if !*needs_render => {\n                self.session_handle.dispatch(SessionEvent::CursorUpdate {\n                    image: Some(img.clone()),\n                    icon: None,\n                    hotspot_x: hotspot.x,\n                    hotspot_y: hotspot.y,\n                });\n            }\n            Cursor::Surface { .. } => {\n                // The cursor will be dispatched after it's rendered during the\n                // next frame.\n            }\n            Cursor::Hidden => self.session_handle.dispatch(SessionEvent::CursorUpdate {\n                image: None,\n                icon: None,\n                hotspot_x: 0,\n                hotspot_y: 0,\n            }),\n        }\n    }\n\n    pub fn render_cursor(&mut self) -> anyhow::Result<()> {\n        let Cursor::Surface {\n            surface,\n            hotspot,\n            needs_render,\n            rendered,\n        } = &mut self.default_seat.cursor\n        else {\n            return Ok(());\n        };\n\n        if !*needs_render {\n            return Ok(());\n        }\n\n        let surface = &mut self.surfaces[*surface];\n        let buffer = surface.content.as_ref().map(|c| &self.buffers[c.buffer]);\n\n        let image = match buffer.map(|b| &b.backing) {\n            None => return Ok(()), // No content yet, try again later.\n            Some(BufferBacking::Dmabuf { .. }) => {\n                warn!(\"ignoring dmabuf cursor texture\");\n\n                // TODO: for now, we set the cursor to the default.\n                *needs_render = false;\n                self.session_handle.dispatch(SessionEvent::CursorUpdate {\n                    image: None,\n                    icon: Some(cursor_icon::CursorIcon::Default),\n                    hotspot_x: 0,\n                    hotspot_y: 0,\n                });\n\n                return Ok(());\n            }\n            Some(BufferBacking::Shm {\n                format,\n                staging_buffer,\n                ..\n            }) => {\n                debug!(\"rendering cursor to png\");\n                shm_to_png(staging_buffer, *format)?\n            }\n        };\n\n        let scale = surface.effective_scale();\n        let hotspot = surface_vector_to_buffer(*hotspot, scale).as_uvec2();\n\n        self.session_handle.dispatch(SessionEvent::CursorUpdate {\n            image: Some(image.clone()),\n            icon: None,\n            hotspot_x: hotspot.x,\n            hotspot_y: hotspot.y,\n        });\n\n        *rendered = Some((image, hotspot));\n        *needs_render = false;\n        if let Some(cb) = surface.frame_callback.current.take() {\n            cb.done(EPOCH.elapsed().as_millis() as u32);\n        }\n\n        Ok(())\n    }\n}\n\nfn send_axis_discrete(pointer: &wl_pointer::WlPointer, axis: wl_pointer::Axis, value: f64) {\n    let version = pointer.version();\n    if (5..8).contains(&version) {\n        pointer.axis_discrete(axis, value.trunc() as i32);\n    } else if version >= 8 {\n        pointer.axis_value120(axis, (value * 120.0).round() as i32);\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/serial.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::sync::atomic::{AtomicU32, Ordering};\n\npub struct Serial(AtomicU32);\n\nconst START: u32 = 1000;\n\nimpl Serial {\n    pub fn new() -> Self {\n        Self(AtomicU32::new(START))\n    }\n\n    pub fn next(&self) -> u32 {\n        // Wrap around, but skip zero.\n        let _ = self\n            .0\n            .compare_exchange(0, START, Ordering::AcqRel, Ordering::SeqCst);\n\n        self.0.fetch_add(1, Ordering::AcqRel)\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/shm.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    os::fd::{AsFd, OwnedFd},\n    sync::{Arc, RwLock},\n};\n\nuse anyhow::bail;\nuse rustix::mm::{mmap, munmap, MapFlags, ProtFlags};\nuse wayland_server::protocol::{wl_shm, wl_shm_pool};\n\n// TODO: malicious or broken clients can cause us to crash with SIGBUS. We\n// should handle that with a exception handler.\n\nslotmap::new_key_type! { pub struct ShmPoolKey; }\n\npub struct ShmPool {\n    pub _wl_shm: wl_shm::WlShm,\n    pub _wl_shm_pool: wl_shm_pool::WlShmPool,\n    pub pool: Arc<RwLock<Pool>>,\n}\n\n#[derive(Debug)]\npub struct Pool {\n    fd: OwnedFd,\n    ptr: *mut u8,\n    pub size: usize,\n}\n\nimpl Pool {\n    pub fn new(fd: OwnedFd, size: usize) -> anyhow::Result<Self> {\n        let ptr = unsafe { map(&fd, size)? };\n\n        Ok(Pool { fd, size, ptr })\n    }\n\n    pub fn data(&self, offset: usize, len: usize) -> &[u8] {\n        assert!(offset + len <= self.size);\n        unsafe { std::slice::from_raw_parts(self.ptr.add(offset), len) }\n    }\n\n    pub fn resize(&mut self, new_size: usize) -> anyhow::Result<()> {\n        if self.ptr.is_null() {\n            bail!(\"mmap defunct\");\n        }\n\n        self.unmap();\n\n        let ptr = unsafe { map(&self.fd, new_size)? };\n        self.ptr = ptr;\n        self.size = new_size;\n\n        Ok(())\n    }\n\n    fn unmap(&mut self) {\n        assert!(!self.ptr.is_null());\n\n        unsafe { munmap(self.ptr as *mut _, self.size).expect(\"munmap failed\") }\n        self.ptr = std::ptr::null_mut();\n        self.size = 0;\n    }\n}\n\nunsafe impl Send for Pool {}\n\nunsafe impl Sync for Pool {}\n\nunsafe fn map(fd: impl AsFd, size: usize) -> anyhow::Result<*mut u8> {\n    if size == 0 {\n        bail!(\"zero-sized mmap\");\n    }\n\n    let ptr = mmap(\n        std::ptr::null_mut(),\n        size,\n        ProtFlags::READ | ProtFlags::WRITE,\n        MapFlags::SHARED,\n        fd,\n        0,\n    )?;\n\n    Ok(ptr as *mut u8)\n}\n\nimpl Drop for Pool {\n    fn drop(&mut self) {\n        if !self.ptr.is_null() {\n            self.unmap();\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/stack.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse tracing::{debug, trace};\nuse wayland_server::Resource as _;\n\nuse crate::session::compositor::{\n    buffers::BufferKey,\n    surface::{self, SurfaceKey, SurfaceRole},\n    Compositor,\n};\n\nimpl Compositor {\n    /// Displays the surface, if it has not yet been displayed.\n    pub fn map_surface(&mut self, id: SurfaceKey, buffer_id: BufferKey) {\n        if self.surface_stack.contains(&id) {\n            return;\n        }\n\n        let surface = &self.surfaces[id];\n        let config = surface.configuration.expect(\"mapping unconfigured surface\");\n\n        let buffer = &self.buffers[buffer_id];\n        let buffer_size = buffer.dimensions();\n\n        trace!(?surface, ?buffer_size, \"mapping surface\");\n        if buffer_size != config.size {\n            debug!(\n                expected = ?(config.size.x, config.size.y),\n                actual = ?(buffer_size.x, buffer_size.y),\n                \"unexpected buffer dimensions\"\n            );\n        }\n\n        for wl_output in self\n            .output_proxies\n            .iter()\n            .filter(|wl_output| wl_output.id().same_client_as(&surface.wl_surface.id()))\n        {\n            surface.wl_surface.enter(wl_output);\n        }\n\n        trace!(?surface, \"surface mapped\");\n        self.surface_stack.push(id);\n    }\n\n    /// Removes any configuration and attached buffer from a surface. This\n    /// happens if a nil buffer is committed or the role object is destroyed\n    /// by the client.\n    pub fn unmap_surface(&mut self, id: SurfaceKey) {\n        let surface = &mut self.surfaces[id];\n        trace!(?surface, \"surface unmapped\");\n\n        surface.content = None;\n        surface.pending_configure = None;\n        surface.configuration = None;\n        surface.sent_configuration = None;\n\n        self.surface_stack.retain(|v| *v != id);\n    }\n\n    /// Raises an X11 window to the top.\n    pub fn raise_x11_surface(&mut self, serial: u64) {\n        let stack_position = self\n            .xwayland_surface_lookup\n            .get(&serial)\n            .and_then(|surface_id| self.surface_stack.iter().rposition(|id| surface_id == id));\n\n        if let Some(pos) = stack_position {\n            self.raise_surface_at(pos);\n        }\n    }\n\n    fn raise_surface_at(&mut self, position: usize) {\n        let id = self.surface_stack.remove(position);\n\n        if tracing::event_enabled!(tracing::Level::TRACE) {\n            trace!(surf = ?&self.surfaces[id], \"raising surface\");\n        }\n\n        self.surface_stack.push(id);\n    }\n\n    /// Updates focus and surface configurations based on any changes made to\n    /// the stack order, mapping and unmapping of surfaces, etc.\n    pub fn update_focus_and_visibility(&mut self, active: bool) -> anyhow::Result<()> {\n        let top_surface = if active {\n            self.surface_stack.last().cloned()\n        } else {\n            None\n        };\n\n        if top_surface == self.active_surface {\n            return Ok(());\n        }\n\n        // Mark the old active surface as occluded.\n        if let Some(conf) = self\n            .active_surface\n            .take()\n            .and_then(|id| self.surfaces.get_mut(id))\n            .and_then(|surf| surf.configuration.as_mut())\n        {\n            conf.visibility = surface::Visibility::Occluded;\n        }\n\n        if let Some(focus) = top_surface {\n            let surf = &mut self.surfaces[focus];\n            trace!(active, focus = ?surf, \"setting focus\");\n\n            let conf = surf\n                .configuration\n                .as_mut()\n                .expect(\"mapped surface with no configuration\");\n            let is_fullscreen = conf.fullscreen;\n            conf.visibility = surface::Visibility::Active;\n\n            self.active_surface = Some(focus);\n            self.default_seat\n                .set_keyboard_focus(&self.serial, Some(surf.wl_surface.clone()));\n\n            // Xwayland maintains its own focus.\n            if let Some(SurfaceRole::XWayland { serial }) = &surf.role.current {\n                let xwm = self.xwm.as_mut().unwrap();\n                let id = xwm.xwindow_for_serial(*serial).map(|xwin| xwin.id);\n                xwm.set_focus(id)?;\n            } else if let Some(xwm) = &mut self.xwm {\n                // The xwayland window is occluded by a wayland window.\n                xwm.set_focus(None)?;\n            }\n\n            trace!(?surf, depth = self.surface_stack.len(), \"focus changed\");\n\n            // The surface under the cursor could be different from the top one.\n            if let Some(coords) = self.default_seat.pointer_coords() {\n                if let Some((pointer_focus, surface_coords)) = self.surface_under(coords) {\n                    let wl_surface = self.surfaces[pointer_focus].wl_surface.clone();\n\n                    self.default_seat.update_pointer(\n                        &self.serial,\n                        wl_surface,\n                        surface_coords,\n                        coords,\n                    );\n                }\n            }\n\n            // If the top window isn't covering the entire output, make sure we\n            // uncover the windows below.\n            if !is_fullscreen {\n                for surface_id in self.surface_stack.iter().rev().skip(1) {\n                    let conf = self.surfaces[*surface_id]\n                        .configuration\n                        .as_mut()\n                        .expect(\"mapped surface with no configuration\");\n\n                    conf.visibility = surface::Visibility::Visible;\n                    if conf.fullscreen {\n                        break;\n                    }\n                }\n            }\n        } else {\n            self.default_seat.set_keyboard_focus(&self.serial, None);\n            self.default_seat.lift_pointer(&self.serial);\n\n            if let Some(xwm) = &mut self.xwm {\n                xwm.set_focus(None)?;\n            }\n        }\n\n        Ok(())\n    }\n\n    pub fn surface_under(\n        &mut self,\n        coords: impl Into<glam::DVec2>,\n    ) -> Option<(SurfaceKey, glam::DVec2)> {\n        let coords = coords.into();\n\n        for id in self.surface_stack.iter().rev() {\n            let surf = &self.surfaces[*id];\n\n            if let Some(surface_coords) = surf.surface_coords(coords.round().as_uvec2()) {\n                return Some((*id, surface_coords));\n            }\n        }\n\n        None\n    }\n\n    /// Returns true if all visible surfaces have settled (with no configure\n    /// pending) and have content.\n    pub fn surfaces_ready(&self) -> bool {\n        if self.surface_stack.is_empty() {\n            return false;\n        }\n\n        // Iterate backwards to find the first fullscreen window.\n        let first_visible_idx = self.surface_stack.iter().rposition(|id| {\n            self.surfaces[*id]\n                .configuration\n                .is_some_and(|conf| conf.fullscreen)\n        });\n\n        for id in &self.surface_stack[first_visible_idx.unwrap_or_default()..] {\n            let surf = &self.surfaces[*id];\n            if surf.content.is_none() || surf.pending_configure.is_some() {\n                debug!(\n                    ?surf,\n                    content_is_some = surf.content.is_some(),\n                    pending_configure = ?surf.pending_configure,\n                    \"surface not ready!\"\n                );\n                return false;\n            }\n        }\n\n        true\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/surface.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::time;\n\nuse tracing::{debug, trace, warn};\nuse wayland_protocols::{\n    wp::{\n        fractional_scale::v1::server::wp_fractional_scale_v1,\n        linux_drm_syncobj::v1::server::wp_linux_drm_syncobj_surface_v1,\n        presentation_time::server::wp_presentation_feedback,\n    },\n    xdg::shell::server::{xdg_surface, xdg_toplevel},\n};\nuse wayland_server::{\n    protocol::{wl_callback, wl_surface},\n    Resource as _,\n};\n\nuse super::buffers::SyncobjTimelinePoint;\nuse crate::{\n    pixel_scale::PixelScale,\n    session::compositor::{\n        buffers::{BufferBacking, BufferKey},\n        xwayland, Compositor, DisplayParams,\n    },\n    vulkan::VkTimelinePoint,\n};\n\nslotmap::new_key_type! { pub struct SurfaceKey; }\n\n#[derive(Clone)]\npub struct Surface {\n    pub wl_surface: wl_surface::WlSurface,\n\n    pub wp_fractional_scale: Option<wp_fractional_scale_v1::WpFractionalScaleV1>,\n\n    pub pending_buffer: Option<PendingBuffer>,\n    pub pending_feedback: Option<wp_presentation_feedback::WpPresentationFeedback>,\n    pub frame_callback: DoubleBuffered<wl_callback::WlCallback>,\n    pub buffer_scale: DoubleBuffered<PixelScale>,\n    pub content: Option<ContentUpdate>,\n\n    pub wp_syncobj_surface: Option<wp_linux_drm_syncobj_surface_v1::WpLinuxDrmSyncobjSurfaceV1>,\n    pub pending_acquire_point: Option<SyncobjTimelinePoint>,\n    pub pending_release_point: Option<SyncobjTimelinePoint>,\n\n    pub role: DoubleBuffered<SurfaceRole>,\n    pub sent_configuration: Option<SurfaceConfiguration>,\n    pub configuration: Option<SurfaceConfiguration>,\n    pub pending_configure: Option<u32>,\n\n    pub title: Option<String>,\n    pub app_id: Option<String>,\n}\n\nimpl Surface {\n    pub fn new(wl_surface: wl_surface::WlSurface) -> Self {\n        Self {\n            wl_surface,\n            wp_fractional_scale: None,\n\n            pending_buffer: None,\n            pending_feedback: None,\n            frame_callback: DoubleBuffered::default(),\n            buffer_scale: DoubleBuffered::default(),\n            content: None,\n\n            wp_syncobj_surface: None,\n            pending_acquire_point: None,\n            pending_release_point: None,\n\n            role: DoubleBuffered::default(),\n            sent_configuration: None,\n            configuration: None,\n            pending_configure: None,\n\n            title: None,\n            app_id: None,\n        }\n    }\n\n    pub fn reconfigure(&mut self, params: DisplayParams, xwin: Option<&xwayland::XWindow>) {\n        // Keep current visibility, or start new windows visible.\n        let visibility = self\n            .configuration\n            .map_or(Visibility::Visible, |c| c.visibility);\n\n        let conf = match self.role.current {\n            None | Some(SurfaceRole::Cursor) => None,\n            Some(SurfaceRole::XdgToplevel { .. }) => Some(SurfaceConfiguration {\n                topleft: glam::UVec2::ZERO,\n                size: (params.width, params.height).into(),\n                scale: params.ui_scale,\n                visibility,\n                fullscreen: true,\n            }),\n            Some(SurfaceRole::XWayland { .. }) => {\n                match xwin {\n                    None => None,\n                    Some(xwayland::XWindow {\n                        x,\n                        y,\n                        width,\n                        height,\n                        override_redirect,\n                        ..\n                    }) if *override_redirect => Some(SurfaceConfiguration {\n                        topleft: (*x, *y).into(),\n                        size: (*width, *height).into(),\n                        scale: PixelScale::ONE,\n                        visibility,\n                        fullscreen: false,\n                    }),\n                    Some(_) => {\n                        Some(SurfaceConfiguration {\n                            topleft: glam::UVec2::ZERO,\n                            size: (params.width, params.height).into(),\n                            scale: PixelScale::ONE, // XWayland always uses scale one.\n                            visibility,\n                            fullscreen: true,\n                        })\n                    }\n                }\n            }\n        };\n\n        self.configuration = conf;\n    }\n\n    /// Takes a point in the physical configuration space, and returns\n    /// wayland-specific logical surface coordinates.\n    pub fn surface_coords(&self, coords: impl Into<glam::DVec2>) -> Option<glam::DVec2> {\n        let conf = self.configuration?;\n        let buffer_size = self\n            .content\n            .as_ref()\n            .map(|content| content.dimensions.as_dvec2())?;\n\n        let coords = coords.into();\n        let topleft = conf.topleft.as_dvec2();\n        let bottomright = topleft + conf.size.as_dvec2();\n\n        if conf.fullscreen\n            || (coords.x >= topleft.x\n                && coords.y >= topleft.y\n                && coords.x < bottomright.x\n                && coords.y < bottomright.y)\n        {\n            let offset_coords = coords - conf.topleft.as_dvec2();\n\n            let buffer_coords = offset_coords * (buffer_size / conf.size.as_dvec2());\n            Some(buffer_vector_to_surface(\n                buffer_coords,\n                self.effective_scale(),\n            ))\n        } else {\n            None\n        }\n    }\n\n    pub fn effective_scale(&self) -> PixelScale {\n        self.buffer_scale.current.unwrap_or_default()\n    }\n}\n\nimpl std::fmt::Debug for Surface {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        let name = self\n            .title\n            .as_ref()\n            .or(self.app_id.as_ref())\n            .map(|s| s.as_str())\n            .unwrap_or(\"Untitled\");\n\n        let (role, id, extra) = match &self.role.current {\n            None => (\"wl_surface\", self.wl_surface.id().protocol_id() as u64, \"\"),\n            Some(SurfaceRole::Cursor) => (\n                \"wl_surface\",\n                self.wl_surface.id().protocol_id() as u64,\n                \" [CURSOR]\",\n            ),\n            Some(SurfaceRole::XdgToplevel { xdg_toplevel, .. }) => {\n                (\"xdg_toplevel\", xdg_toplevel.id().protocol_id() as u64, \"\")\n            }\n            Some(SurfaceRole::XWayland { serial }) => (\"xwayland\", *serial, \"\"),\n        };\n\n        write!(f, \"<{:?} {}@{}{}>\", name, role, id, extra)?;\n\n        Ok(())\n    }\n}\n\n#[derive(Debug, Clone, Eq, PartialEq)]\npub struct DoubleBuffered<T: Clone + Eq + PartialEq> {\n    pub pending: Option<T>,\n    pub current: Option<T>,\n}\n\nimpl<T: Clone + Eq + PartialEq> Default for DoubleBuffered<T> {\n    fn default() -> Self {\n        Self {\n            pending: None,\n            current: None,\n        }\n    }\n}\n\n#[derive(Debug)]\npub enum CommitResult<T> {\n    NoChange,\n    Added(T),\n    Replaced(T, T),\n}\n\nimpl<T: Clone + Eq + PartialEq> DoubleBuffered<T> {\n    pub fn promote(&mut self) -> CommitResult<T> {\n        if self.pending.is_none() || self.pending == self.current {\n            self.pending = None;\n            return CommitResult::NoChange;\n        }\n\n        match (self.pending.take(), self.current.take()) {\n            (Some(v), None) => {\n                self.current = Some(v.clone());\n                CommitResult::Added(v)\n            }\n            (Some(new), Some(old)) if new != old => {\n                self.current = Some(new.clone());\n                CommitResult::Replaced(old, new)\n            }\n            _ => unreachable!(),\n        }\n    }\n}\n\n#[derive(Debug, Clone, Eq, PartialEq)]\npub enum SurfaceRole {\n    XdgToplevel {\n        xdg_surface: xdg_surface::XdgSurface,\n        xdg_toplevel: xdg_toplevel::XdgToplevel,\n    },\n    XWayland {\n        serial: u64,\n    },\n    Cursor,\n}\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\npub enum Visibility {\n    Occluded,\n    Visible,\n    Active,\n}\n\n/// The configuration to be sent to the surface.\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\npub struct SurfaceConfiguration {\n    // x, y, width, and height are in the \"physical\" coordinate space. x and y\n    // are not relevant to xdg_shell surfaces.\n    pub topleft: glam::UVec2,\n    pub size: glam::UVec2,\n    pub scale: PixelScale,\n    pub fullscreen: bool,\n    pub visibility: Visibility,\n}\n\nimpl SurfaceConfiguration {}\n\n#[derive(Debug, Clone, Eq, PartialEq)]\npub enum PendingBuffer {\n    Attach(BufferKey),\n    Detach,\n}\n\n#[derive(Clone)]\npub struct ContentUpdate {\n    pub buffer: BufferKey,\n\n    /// Whether the client is waiting on a buffer.release().\n    pub needs_release: bool,\n\n    /// Used for explicit sync.\n    pub explicit_sync: Option<(SyncobjTimelinePoint, SyncobjTimelinePoint)>,\n\n    /// If the content update is in use, this timeline point indicates when it\n    /// will be free.\n    pub tp_done: Option<VkTimelinePoint>,\n\n    /// The real dimensions of the buffer. This is how surface coordinates are\n    /// determined in wayland.\n    pub dimensions: glam::UVec2,\n    pub wp_presentation_feedback: Option<wp_presentation_feedback::WpPresentationFeedback>,\n}\n\npub struct PendingPresentationFeedback(\n    pub wp_presentation_feedback::WpPresentationFeedback,\n    pub VkTimelinePoint,\n);\n\npub struct CommitError(pub xdg_surface::Error, pub String);\n\nimpl Compositor {\n    /// Handles wl_surface.commit.\n    pub fn surface_commit(&mut self, id: SurfaceKey) -> Result<(), CommitError> {\n        let display_params = self.display_params;\n        let surface = &mut self.surfaces[id];\n\n        // Buffer swap happens first. We handle it a bit differently because\n        // buffers can be removed, not just overwritten.\n        let mut feedback = surface.pending_feedback.take();\n        match surface.pending_buffer.take() {\n            Some(PendingBuffer::Detach) => {\n                self.unmap_surface(id);\n                return Ok(());\n            }\n            Some(PendingBuffer::Attach(buffer_id)) => {\n                // Creates a content update.\n                let buffer = &mut self.buffers[buffer_id];\n\n                // If we haven't yet sent a configure, it's an error to\n                // manipulate a buffer.\n                if (matches!(surface.role.current, Some(SurfaceRole::XdgToplevel { .. }))\n                    && surface.sent_configuration.is_none())\n                    || surface.role.pending.is_some()\n                {\n                    return Err(CommitError(\n                        xdg_surface::Error::UnconfiguredBuffer,\n                        \"The surface must be configured prior to attaching a buffer.\".to_string(),\n                    ));\n                }\n\n                // If we're waiting on an ack_configure, poke the client again.\n                if surface.pending_configure.is_some() {\n                    debug!(pending_configure = ?surface.pending_configure, \"pending configure, resending frame callback\");\n                    if let Some(fb) = feedback.take() {\n                        fb.discarded();\n                    }\n\n                    if let Some(cb) = surface.frame_callback.pending.take() {\n                        cb.done(self.serial.next());\n                    }\n                }\n\n                // In the case of shm buffer, we do a copy and immediately release it.\n                let mut needs_release = true;\n                if let BufferBacking::Shm {\n                    staging_buffer,\n                    format,\n                    pool,\n                    dirty,\n                    ..\n                } = &mut buffer.backing\n                {\n                    // A large shm buffer is probably a sign that something has gone wrong.\n                    if format.width > 500 && format.height > 500 && surface.content.is_none() {\n                        warn!(\n                            \"client appears to be using software rendering; performance may be \\\n                             degraded\"\n                        );\n                    }\n\n                    let len = (format.stride * format.height) as usize;\n                    let pool = pool.read().unwrap();\n                    let contents = pool.data(format.offset as usize, len);\n\n                    staging_buffer.copy_from_slice(contents);\n                    *dirty = true;\n                    needs_release = false;\n                    buffer.wl_buffer.release();\n                }\n\n                // Check for explicit sync.\n                let explicit_sync =\n                    surface\n                        .wp_syncobj_surface\n                        .as_ref()\n                        .and_then(|wp_syncobj_surface| {\n                            let Some(acquire_point) = surface.pending_acquire_point.take() else {\n                                wp_syncobj_surface.post_error(\n                                    wp_linux_drm_syncobj_surface_v1::Error::NoAcquirePoint,\n                                    \"No acquire point set.\",\n                                );\n                                return None;\n                            };\n\n                            let Some(release_point) = surface.pending_release_point.take() else {\n                                wp_syncobj_surface.post_error(\n                                    wp_linux_drm_syncobj_surface_v1::Error::NoReleasePoint,\n                                    \"No release point set.\",\n                                );\n                                return None;\n                            };\n\n                            Some((acquire_point, release_point))\n                        });\n\n                if needs_release && explicit_sync.is_some() {\n                    // No need for release events if explicit sync is used.\n                    needs_release = false;\n                }\n\n                let old_content = surface.content.replace(ContentUpdate {\n                    buffer: buffer_id,\n                    needs_release,\n                    explicit_sync,\n                    tp_done: None,\n                    dimensions: buffer.dimensions(),\n                    wp_presentation_feedback: feedback,\n                });\n\n                if let Some(old_content) = old_content {\n                    // Enqueue the buffer for release.\n                    self.in_flight_buffers.push(old_content);\n                }\n            }\n            None => (),\n        }\n\n        // Configure surfaces which have a newly applied role.\n        match surface.role.promote() {\n            CommitResult::Replaced(_, _) => panic!(\"surface already has a role\"),\n            CommitResult::Added(role) => {\n                let xwin = if let SurfaceRole::XWayland { serial } = role {\n                    self.xwayland_surface_lookup.insert(serial, id);\n                    self.xwm.as_ref().unwrap().xwindow_for_serial(serial)\n                } else {\n                    None\n                };\n\n                surface.reconfigure(display_params, xwin);\n            }\n            _ => (),\n        }\n\n        surface.buffer_scale.promote();\n        surface.frame_callback.promote();\n\n        trace!(?surface, \"surface commit\");\n\n        // Map the surface, if we've fulfilled all requirements.\n        let is_mappable = match surface.role.current {\n            None | Some(SurfaceRole::Cursor) => false,\n            Some(SurfaceRole::XdgToplevel { .. }) => {\n                surface.pending_configure.is_none() && surface.content.is_some()\n            }\n            Some(SurfaceRole::XWayland { serial }) => {\n                if surface.content.is_none() {\n                    false\n                } else if let Some(xwin) = self.xwm.as_mut().unwrap().xwindow_for_serial(serial) {\n                    // Copy over title and app_id.\n                    surface.title = xwin.title.clone();\n                    surface.app_id = xwin.app_id.clone();\n\n                    xwin.mapped\n                } else {\n                    false\n                }\n            }\n        };\n\n        if is_mappable {\n            if let Some(ContentUpdate { buffer, .. }) = surface.content {\n                self.map_surface(id, buffer);\n            }\n        }\n\n        Ok(())\n    }\n\n    /// Cleans up for a surface destroyed by the client.\n    pub fn surface_destroyed(&mut self, id: SurfaceKey) {\n        self.unmap_surface(id);\n\n        let surf = self.surfaces.remove(id);\n        if let Some(SurfaceRole::XWayland { serial }) = surf.and_then(|s| s.role.current) {\n            self.xwayland_surface_lookup.remove(&serial);\n        }\n    }\n\n    /// Sets a pending role for the surface. Returns false if the surface\n    /// already has a role or no longer exists.\n    pub fn set_surface_role(&mut self, id: SurfaceKey, role: SurfaceRole) -> bool {\n        match self.surfaces.get_mut(id) {\n            Some(ref mut surf) if surf.role.current.is_none() => {\n                surf.role.pending = Some(role);\n                true\n            }\n            _ => false,\n        }\n    }\n\n    /// Checks if any surfaces have outdated configuration, and sends a\n    /// configure event.\n    pub fn configure_surfaces(&mut self) -> anyhow::Result<()> {\n        for (_id, surface) in self.surfaces.iter_mut() {\n            if surface.configuration.is_none()\n                || surface.configuration == surface.sent_configuration\n            {\n                continue;\n            }\n\n            trace!(?surface, conf = ?surface.configuration, \"configuring surface\");\n\n            let conf = surface.configuration.unwrap();\n            match &surface.role.current {\n                None => panic!(\"surface configured without role\"),\n                Some(SurfaceRole::XdgToplevel {\n                    xdg_surface,\n                    xdg_toplevel,\n                }) => {\n                    if conf.scale.is_fractional() {\n                        warn!(\n                            scale = ?conf.scale,\n                            \"fractional scale not supported, using next integer\"\n                        )\n                    }\n\n                    let scale = conf.scale.ceil();\n                    if surface.wl_surface.version() >= 6 {\n                        let scale: f64 = scale.into();\n                        surface.wl_surface.preferred_buffer_scale(scale as i32);\n                    }\n\n                    if let Some(wp_fractional_scale) = &surface.wp_fractional_scale {\n                        wp_fractional_scale.preferred_scale((f64::from(scale) * 120.0) as u32);\n                    }\n\n                    let mut states = match conf.visibility {\n                        Visibility::Occluded if xdg_toplevel.version() >= 6 => {\n                            vec![xdg_toplevel::State::Suspended]\n                        }\n                        Visibility::Occluded => vec![],\n                        Visibility::Visible => vec![],\n                        Visibility::Active => vec![xdg_toplevel::State::Activated],\n                    };\n\n                    if conf.fullscreen {\n                        states.push(xdg_toplevel::State::Fullscreen);\n                    }\n\n                    let raw_states = states\n                        .into_iter()\n                        .flat_map(|st| {\n                            let v: u32 = st.into();\n                            v.to_ne_bytes()\n                        })\n                        .collect::<Vec<u8>>();\n\n                    // Wayland wants the \"logical\" width and height to be\n                    // pre-scaling. That means if we want a 1200x600 buffer\n                    // at 2x ui scale, we need to configure it for 600x300.\n                    let scaled: glam::IVec2 = buffer_vector_to_surface(conf.size, scale).as_ivec2();\n\n                    let serial = self.serial.next();\n                    xdg_toplevel.configure(scaled.x, scaled.y, raw_states);\n                    xdg_surface.configure(serial);\n\n                    surface.sent_configuration = Some(conf);\n                    surface.pending_configure = Some(serial);\n                }\n                Some(SurfaceRole::XWayland { serial }) => {\n                    let xwm = self.xwm.as_mut().unwrap();\n                    match xwm.xwindow_for_serial(*serial) {\n                        Some(xwayland::XWindow {\n                            id,\n                            override_redirect,\n                            ..\n                        }) if !override_redirect => {\n                            xwm.configure_window(*id, conf)?;\n                        }\n                        _ => (),\n                    }\n\n                    surface.sent_configuration = Some(conf);\n                }\n                Some(SurfaceRole::Cursor) => unreachable!(),\n            }\n        }\n\n        Ok(())\n    }\n\n    /// Sends complete presentation feedback. Note that since this is called as\n    /// an idle operation, the timestamps are only accurate if the compositor\n    /// thread is woken within a reasonable timeframe.\n    pub fn send_presentation_feedback(&mut self) -> anyhow::Result<()> {\n        let time = rustix::time::clock_gettime(rustix::time::ClockId::Monotonic);\n        let tv_sec_hi = (time.tv_sec >> 32) as u32;\n        let tv_sec_lo = (time.tv_sec & 0xFFFFFFFF) as u32;\n        let tv_nsec = time.tv_nsec as u32;\n\n        let framerate = self.display_params.framerate;\n        let refresh = time::Duration::from_secs_f64(1.0 / framerate as f64).as_nanos() as u32;\n\n        let mut still_pending = Vec::with_capacity(self.pending_presentation_feedback.len());\n        for PendingPresentationFeedback(fb, tp) in self.pending_presentation_feedback.drain(..) {\n            if unsafe { !tp.poll()? } {\n                still_pending.push(PendingPresentationFeedback(fb, tp));\n                continue;\n            }\n\n            for wl_output in self\n                .output_proxies\n                .iter()\n                .filter(|wl_output| wl_output.id().same_client_as(&fb.id()))\n            {\n                fb.sync_output(wl_output);\n            }\n\n            fb.presented(\n                tv_sec_hi,\n                tv_sec_lo,\n                tv_nsec,\n                refresh,\n                0, // seq_hi\n                0, // seq_lo\n                wp_presentation_feedback::Kind::empty(),\n            );\n        }\n\n        self.pending_presentation_feedback = still_pending;\n        Ok(())\n    }\n}\n\n/// Converts a vector of pixels into surface-local or \"logical\" coordinates\n/// as wayland expects them.\npub fn buffer_vector_to_surface(coords: impl Into<glam::DVec2>, scale: PixelScale) -> glam::DVec2 {\n    let scale: f64 = scale.into();\n    coords.into() / scale\n}\n\n/// Converts a surface-local vector (sometimes called \"logical\" coordinates)\n/// into pixels.\npub fn surface_vector_to_buffer(coords: impl Into<glam::DVec2>, scale: PixelScale) -> glam::DVec2 {\n    let scale: f64 = scale.into();\n    coords.into() * scale\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/xwayland/xwm.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    collections::BTreeMap,\n    os::fd::{AsFd as _, BorrowedFd},\n};\n\nuse hashbrown::HashSet;\nuse tracing::{debug, trace};\nuse x11rb::{\n    connection::Connection as _,\n    cookie::VoidCookie,\n    protocol::{\n        self,\n        composite::ConnectionExt as _,\n        xproto::{self, ConnectionExt as _},\n    },\n    rust_connection::{ConnectionError, DefaultStream, RustConnection as X11Connection},\n    wrapper::ConnectionExt as _,\n};\n\nuse crate::{\n    pixel_scale::PixelScale,\n    session::compositor::{\n        surface::{self, SurfaceConfiguration},\n        Compositor,\n    },\n};\n\nx11rb::atom_manager! {\n    /// Atoms used by the XWM and X11Surface types\n    pub Atoms:\n    AtomsCookie {\n        WL_SURFACE_SERIAL,\n\n        UTF8_STRING,\n\n        WM_HINTS,\n        WM_PROTOCOLS,\n        WM_TAKE_FOCUS,\n        WM_CHANGE_STATE,\n        _NET_WM_NAME,\n        _NET_WM_MOVERESIZE,\n        _NET_WM_STATE_MODAL,\n\n        WM_S0,\n        WM_STATE,\n        _NET_WM_CM_S0,\n        _NET_SUPPORTED,\n        _NET_ACTIVE_WINDOW,\n        _NET_CLIENT_LIST,\n        _NET_CLIENT_LIST_STACKING,\n        _NET_WM_STATE,\n        _NET_WM_STATE_MAXIMIZED_VERT,\n        _NET_WM_STATE_MAXIMIZED_HORZ,\n        _NET_WM_STATE_HIDDEN,\n        _NET_WM_STATE_FULLSCREEN,\n        _NET_WM_STATE_FOCUSED,\n        _NET_SUPPORTING_WM_CHECK,\n    }\n}\n\npub struct XWindow {\n    pub id: u32,\n\n    pub serial: Option<u64>,\n    pub title: Option<String>,\n    pub app_id: Option<String>,\n\n    pub x: u32,\n    pub y: u32,\n    pub width: u32,\n    pub height: u32,\n\n    pub states: HashSet<xproto::Atom>,\n\n    pub protocols: HashSet<xproto::Atom>,\n\n    pub hint_input: bool,\n    pub override_redirect: bool,\n    pub mapped: bool, // Whether MapRequest/MapNotify has been recieved.\n}\n\nimpl std::fmt::Debug for XWindow {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        let mut title = self.title.as_deref().unwrap_or(\"Untitled\");\n        if title.is_empty() {\n            title = \"Untitled\";\n        }\n\n        let serial = if let Some(s) = self.serial {\n            format!(\" serial={}\", s)\n        } else {\n            \"\".to_string()\n        };\n\n        let override_redirect = if self.override_redirect { \" [OR]\" } else { \"\" };\n\n        write!(\n            f,\n            \"<{} \\\"{}\\\"{}{}>\",\n            self.id, title, serial, override_redirect\n        )?;\n        Ok(())\n    }\n}\n\npub struct Xwm {\n    conn: X11Connection,\n    atoms: Atoms,\n    wm_id: u32,\n\n    screen: xproto::Screen,\n    client_list: Vec<u32>,\n    client_list_stacking: Vec<u32>,\n\n    pub xwindows: BTreeMap<u32, XWindow>,\n    pub serials: BTreeMap<u64, u32>,\n}\n\nimpl Xwm {\n    pub fn new(x11_socket: mio::net::UnixStream) -> anyhow::Result<Self> {\n        let stream = DefaultStream::from_unix_stream(x11_socket.into())?.0;\n        let conn = X11Connection::connect_to_stream(stream, 0)?;\n        let atoms = Atoms::new(&conn)?.reply()?;\n\n        let screen = conn.setup().roots[0].clone();\n\n        {\n            let font = xproto::FontWrapper::open_font(&conn, \"cursor\".as_bytes())?;\n            let cursor = xproto::CursorWrapper::create_glyph_cursor(\n                &conn,\n                font.font(),\n                font.font(),\n                68,\n                69,\n                0,\n                0,\n                0,\n                u16::MAX,\n                u16::MAX,\n                u16::MAX,\n            )?;\n\n            conn.change_window_attributes(\n                screen.root,\n                &xproto::ChangeWindowAttributesAux::default()\n                    .event_mask(\n                        xproto::EventMask::SUBSTRUCTURE_REDIRECT\n                            | xproto::EventMask::SUBSTRUCTURE_NOTIFY\n                            | xproto::EventMask::PROPERTY_CHANGE,\n                        // | xproto::EventMask::FOCUS_CHANGE,\n                    )\n                    .cursor(cursor.cursor()),\n            )?;\n        }\n\n        let wm_id = conn.generate_id()?;\n        conn.create_window(\n            screen.root_depth,\n            wm_id,\n            screen.root,\n            0,\n            0,\n            10,\n            10,\n            0,\n            xproto::WindowClass::INPUT_OUTPUT,\n            x11rb::COPY_FROM_PARENT,\n            &Default::default(),\n        )?;\n\n        conn.set_selection_owner(wm_id, atoms.WM_S0, x11rb::CURRENT_TIME)?;\n        conn.set_selection_owner(wm_id, atoms._NET_WM_CM_S0, x11rb::CURRENT_TIME)?;\n        conn.composite_redirect_subwindows(screen.root, protocol::composite::Redirect::MANUAL)?;\n\n        conn.change_property32(\n            xproto::PropMode::REPLACE,\n            screen.root,\n            atoms._NET_SUPPORTED,\n            xproto::AtomEnum::ATOM,\n            &[\n                atoms._NET_WM_STATE,\n                atoms._NET_WM_STATE_MAXIMIZED_HORZ,\n                atoms._NET_WM_STATE_MAXIMIZED_VERT,\n                atoms._NET_WM_STATE_HIDDEN,\n                atoms._NET_WM_STATE_FULLSCREEN,\n                atoms._NET_WM_STATE_MODAL,\n                atoms._NET_WM_STATE_FOCUSED,\n                atoms._NET_ACTIVE_WINDOW,\n                atoms._NET_WM_MOVERESIZE,\n                atoms._NET_CLIENT_LIST,\n                atoms._NET_CLIENT_LIST_STACKING,\n            ],\n        )?;\n\n        replace_window_list(&conn, screen.root, atoms._NET_ACTIVE_WINDOW, [0])?;\n        replace_window_list(&conn, screen.root, atoms._NET_SUPPORTING_WM_CHECK, [wm_id])?;\n        replace_window_list(&conn, wm_id, atoms._NET_SUPPORTING_WM_CHECK, [wm_id])?;\n\n        conn.change_property8(\n            xproto::PropMode::REPLACE,\n            wm_id,\n            atoms._NET_WM_NAME,\n            atoms.UTF8_STRING,\n            \"Magic Mirror XWM\".as_bytes(),\n        )?;\n\n        conn.flush()?;\n\n        Ok(Self {\n            conn,\n            atoms,\n            wm_id,\n\n            screen,\n            client_list: Vec::new(),\n            client_list_stacking: Vec::new(),\n\n            xwindows: BTreeMap::new(),\n            serials: BTreeMap::new(),\n        })\n    }\n\n    pub fn display_fd(&self) -> BorrowedFd {\n        self.conn.stream().as_fd()\n    }\n\n    pub fn xwindow_for_serial(&self, serial: u64) -> Option<&XWindow> {\n        self.serials\n            .get(&serial)\n            .and_then(|id| self.xwindows.get(id))\n    }\n\n    pub fn configure_window(\n        &mut self,\n        window: u32,\n        conf: SurfaceConfiguration,\n    ) -> anyhow::Result<()> {\n        if let Some(xwin) = self.xwindows.get_mut(&window) {\n            trace!(?xwin, ?conf, \"configuring xwindow\");\n\n            self.conn.configure_window(\n                window,\n                &xproto::ConfigureWindowAux::default()\n                    .x(conf.topleft.x as i32)\n                    .y(conf.topleft.y as i32)\n                    .width(conf.size.x)\n                    .height(conf.size.y)\n                    .border_width(0)\n                    .stack_mode(xproto::StackMode::ABOVE),\n            )?;\n\n            self.conn.send_event(\n                false,\n                window,\n                xproto::EventMask::STRUCTURE_NOTIFY,\n                xproto::ConfigureNotifyEvent {\n                    response_type: xproto::CONFIGURE_NOTIFY_EVENT,\n                    sequence: 0,\n                    event: window,\n                    window,\n                    above_sibling: x11rb::NONE,\n                    x: conf.topleft.x as i16,\n                    y: conf.topleft.y as i16,\n                    width: conf.size.x as u16,\n                    height: conf.size.y as u16,\n                    border_width: 0,\n                    override_redirect: false,\n                },\n            )?;\n\n            let old_states = xwin.states.clone();\n\n            match conf.visibility {\n                surface::Visibility::Occluded => {\n                    xwin.states.insert(self.atoms._NET_WM_STATE_HIDDEN);\n                    xwin.states.remove(&self.atoms._NET_WM_STATE_FOCUSED);\n                }\n                surface::Visibility::Visible => {\n                    xwin.states.remove(&self.atoms._NET_WM_STATE_FOCUSED);\n                    xwin.states.remove(&self.atoms._NET_WM_STATE_HIDDEN);\n                }\n                surface::Visibility::Active => {\n                    xwin.states.remove(&self.atoms._NET_WM_STATE_HIDDEN);\n                    xwin.states.insert(self.atoms._NET_WM_STATE_FOCUSED);\n                }\n            }\n\n            if conf.fullscreen {\n                xwin.states.insert(self.atoms._NET_WM_STATE_FULLSCREEN);\n            } else {\n                xwin.states.remove(&self.atoms._NET_WM_STATE_FULLSCREEN);\n            }\n\n            if xwin.states != old_states {\n                let values = xwin.states.iter().copied().collect::<Vec<_>>();\n\n                if tracing::event_enabled!(tracing::Level::TRACE) {\n                    let names = values\n                        .iter()\n                        .copied()\n                        .map(|atom| get_atom_name(&self.conn, atom))\n                        .collect::<Result<Vec<_>, _>>()?;\n                    trace!(?xwin, ?names, \"setting states\");\n                }\n\n                self.conn.change_property32(\n                    xproto::PropMode::REPLACE,\n                    xwin.id,\n                    self.atoms._NET_WM_STATE,\n                    xproto::AtomEnum::ATOM,\n                    &values,\n                )?;\n            }\n\n            self.conn.flush()?;\n        } else {\n            debug!(window, \"skipping configure for dead window\")\n        }\n\n        Ok(())\n    }\n\n    pub fn set_focus(&self, window: Option<u32>) -> anyhow::Result<()> {\n        let Some(xwin) = window.and_then(|id| self.xwindows.get(&id)) else {\n            trace!(\"removing input focus\");\n            self.conn.set_input_focus(\n                xproto::InputFocus::NONE,\n                x11rb::NONE,\n                x11rb::CURRENT_TIME,\n            )?;\n            self.conn.flush()?;\n            return Ok(());\n        };\n\n        replace_window_list(\n            &self.conn,\n            self.screen.root,\n            self.atoms._NET_ACTIVE_WINDOW,\n            [xwin.id],\n        )?;\n\n        // \"Passive and Locally Active clients set the input field of WM_HINTS\n        // to True, which indicates that they require window manager assistance\n        // in acquiring the input focus.\"\n        // TODO: for some reason this seems to cause problems, for example for\n        // steam context menus, which flicker out immediately.\n        if xwin.hint_input {\n            trace!(?xwin, \"setting input focus\");\n            self.conn.set_input_focus(\n                xproto::InputFocus::POINTER_ROOT,\n                xwin.id,\n                x11rb::CURRENT_TIME,\n            )?;\n        }\n\n        // \"Windows with the atom WM_TAKE_FOCUS in their WM_PROTOCOLS property\n        // may receive a ClientMessage event from the window manager with\n        // WM_TAKE_FOCUS...\"\n        if xwin.protocols.contains(&self.atoms.WM_TAKE_FOCUS) {\n            trace!(?xwin, \"sending TAKE_FOCUS\");\n\n            let event = xproto::ClientMessageEvent::new(\n                32,\n                xwin.id,\n                self.atoms.WM_PROTOCOLS,\n                [self.atoms.WM_TAKE_FOCUS, x11rb::CURRENT_TIME, 0, 0, 0],\n            );\n            self.conn\n                .send_event(false, xwin.id, xproto::EventMask::NO_EVENT, event)?;\n        }\n\n        self.conn.flush()?;\n        Ok(())\n    }\n}\n\nimpl Compositor {\n    /// Adds xwayland as a client, returning a pollable FD for the xwm.\n    pub fn insert_xwayland(\n        &mut self,\n        socket: mio::net::UnixStream,\n    ) -> anyhow::Result<BorrowedFd<'_>> {\n        debug!(\"starting xwm\");\n        let xwm = Xwm::new(socket)?;\n        Ok(self.xwm.insert(xwm).display_fd())\n    }\n\n    pub fn dispatch_xwm(&mut self) -> anyhow::Result<()> {\n        loop {\n            match self.xwm.as_mut().unwrap().conn.poll_for_event()? {\n                Some(ev) => handle_event(self, ev)?,\n                None => return Ok(()),\n            }\n        }\n    }\n\n    pub fn delayed_map_xwin(&mut self, serial: u64) {\n        let Some(xwin) = self.xwm.as_ref().unwrap().xwindow_for_serial(serial) else {\n            return;\n        };\n\n        let Some(surface_id) = self.xwayland_surface_lookup.get(&serial) else {\n            return;\n        };\n\n        let display_params = self.display_params;\n\n        let surf = &mut self.surfaces[*surface_id];\n        surf.title = xwin.title.clone();\n        surf.app_id = xwin.app_id.clone();\n        surf.reconfigure(display_params, Some(xwin));\n\n        if let Some(surface::ContentUpdate { buffer, .. }) = surf.content {\n            self.map_surface(*surface_id, buffer);\n        }\n    }\n}\n\nfn handle_event(state: &mut Compositor, ev: protocol::Event) -> anyhow::Result<()> {\n    trace!(?ev, \"x11 event\");\n    let display_params = state.display_params;\n    let xwm = state.xwm.as_mut().unwrap();\n\n    use protocol::Event::*;\n    match ev {\n        CreateNotify(msg) => {\n            if msg.window == xwm.wm_id {\n                return Ok(());\n            }\n\n            // Track property changes (such as the window title).\n            xwm.conn.change_window_attributes(\n                msg.window,\n                &xproto::ChangeWindowAttributesAux::new()\n                    .event_mask(xproto::EventMask::PROPERTY_CHANGE),\n            )?;\n            xwm.conn.flush()?;\n\n            let title = fetch_string_property(&xwm.conn, msg.window, xwm.atoms._NET_WM_NAME)?;\n            let app_id = fetch_class(&xwm.conn, msg.window)?;\n            let hints = fetch_hints(&xwm.conn, msg.window)?;\n            let protocols = fetch_protocols(&xwm.conn, xwm.atoms.WM_PROTOCOLS, msg.window)?;\n\n            trace!(?hints, ?protocols, \"fetched state\");\n\n            let xwin = XWindow {\n                id: msg.window,\n                serial: None,\n\n                title,\n                app_id,\n\n                x: msg.x as u32,\n                y: msg.y as u32,\n                width: msg.width as u32,\n                height: msg.height as u32,\n\n                states: HashSet::new(),\n\n                protocols,\n\n                hint_input: hints.and_then(|h| h.input).unwrap_or_default(),\n                override_redirect: msg.override_redirect,\n                mapped: false,\n            };\n\n            trace!(?xwin, \"xwindow created\");\n\n            xwm.xwindows.insert(msg.window, xwin);\n            xwm.conn.flush()?;\n        }\n        MapRequest(xproto::MapRequestEvent { window, .. }) => {\n            if let Some(xwin) = xwm.xwindows.get_mut(&window) {\n                // We already map the window on the X11 side; otherwise clients\n                // just hang there.\n                trace!(?xwin, \"mapping xwindow\");\n                xwm.conn.map_window(window)?;\n\n                let property = [1, 0]; // NORMAL, NONE\n                xwm.conn.change_property32(\n                    xproto::PropMode::REPLACE,\n                    window,\n                    xwm.atoms.WM_STATE,\n                    xwm.atoms.WM_STATE,\n                    &property,\n                )?;\n\n                xwm.conn.flush()?;\n                xwin.mapped = true;\n            }\n        }\n        MapNotify(xproto::MapNotifyEvent { window, .. }) => {\n            if let Some(xwin) = xwm.xwindows.get_mut(&window) {\n                trace!(?xwin, \"map notify\");\n                xwin.mapped = true;\n\n                if xwin.override_redirect {\n                    // Do nothing.\n                } else {\n                    xwm.client_list.push(window);\n                    xwm.client_list_stacking.push(window);\n\n                    xwm.conn.change_property32(\n                        xproto::PropMode::APPEND,\n                        xwm.screen.root,\n                        xwm.atoms._NET_CLIENT_LIST,\n                        xproto::AtomEnum::WINDOW,\n                        &[window],\n                    )?;\n\n                    xwm.conn.change_property32(\n                        xproto::PropMode::APPEND,\n                        xwm.screen.root,\n                        xwm.atoms._NET_CLIENT_LIST_STACKING,\n                        xproto::AtomEnum::WINDOW,\n                        &[window],\n                    )?;\n\n                    xwm.conn.flush()?;\n                }\n\n                if let Some(serial) = xwin.serial {\n                    state.raise_x11_surface(serial)\n                }\n            } else {\n                trace!(window, \"MapNotify for missing surface\");\n            }\n        }\n        ConfigureRequest(msg) => {\n            trace!(\n                width = msg.width,\n                height = msg.height,\n                x = msg.x,\n                y = msg.y,\n                parent = msg.parent,\n                sibling = msg.sibling,\n                stack_mode = ?msg.stack_mode,\n                mask = ?msg.value_mask,\n                \"configuration request\"\n            );\n\n            let serial = xwm\n                .serials\n                .iter()\n                .find_map(|(k, v)| if *v == msg.window { Some(k) } else { None });\n\n            if let Some(surf) = serial\n                .and_then(|serial| state.xwayland_surface_lookup.get(serial))\n                .and_then(|id| state.surfaces.get_mut(*id))\n            {\n                if let Some(conf) = surf.configuration {\n                    xwm.configure_window(msg.window, conf)?;\n                    surf.sent_configuration = Some(conf);\n                    surf.pending_configure = None;\n                }\n            } else if let Some(xwin) = xwm.xwindows.get_mut(&msg.window) {\n                trace!(\"sending synthetic configure\");\n                // Create a synthetic configuration event based on what the\n                // window requested.\n\n                if msg.value_mask.contains(xproto::ConfigWindow::X) {\n                    xwin.x = msg.x as u32;\n                }\n\n                if msg.value_mask.contains(xproto::ConfigWindow::Y) {\n                    xwin.y = msg.y as u32;\n                }\n\n                if msg.value_mask.contains(xproto::ConfigWindow::WIDTH) {\n                    xwin.width = msg.width as u32;\n                }\n\n                if msg.value_mask.contains(xproto::ConfigWindow::HEIGHT) {\n                    xwin.height = msg.height as u32;\n                }\n\n                let conf = SurfaceConfiguration {\n                    topleft: (xwin.x, xwin.y).into(),\n                    size: (xwin.width, xwin.height).into(),\n                    scale: PixelScale::ONE,\n                    visibility: surface::Visibility::Visible,\n                    fullscreen: false,\n                };\n\n                xwm.configure_window(msg.window, conf)?;\n            }\n        }\n        ConfigureNotify(msg) => {\n            if let Some(xwin) = xwm.xwindows.get_mut(&msg.window) {\n                trace!(\n                    ?xwin,\n                    x = msg.x,\n                    y = msg.y,\n                    width = msg.width,\n                    height = msg.height,\n                    above = msg.above_sibling,\n                    or = msg.override_redirect,\n                    \"configure notify\"\n                );\n\n                xwin.x = msg.x as u32;\n                xwin.y = msg.y as u32;\n                xwin.width = msg.width as u32;\n                xwin.height = msg.height as u32;\n                xwin.override_redirect = msg.override_redirect;\n\n                if let Some(surf) = xwin\n                    .serial\n                    .and_then(|serial| state.xwayland_surface_lookup.get(&serial))\n                    .and_then(|id| state.surfaces.get_mut(*id))\n                {\n                    surf.reconfigure(display_params, Some(xwin));\n                }\n            }\n        }\n        UnmapNotify(msg) => {\n            if let Some(xwin) = xwm.xwindows.get_mut(&msg.window) {\n                trace!(?xwin, \"unmap notify\");\n                xwin.mapped = false;\n\n                xwm.client_list.retain(|id| *id != xwin.id);\n                xwm.client_list_stacking.retain(|id| *id != xwin.id);\n\n                replace_window_list(\n                    &xwm.conn,\n                    xwm.screen.root,\n                    xwm.atoms._NET_CLIENT_LIST,\n                    &xwm.client_list,\n                )?;\n\n                replace_window_list(\n                    &xwm.conn,\n                    xwm.screen.root,\n                    xwm.atoms._NET_CLIENT_LIST_STACKING,\n                    &xwm.client_list_stacking,\n                )?;\n            }\n        }\n        DestroyNotify(msg) => {\n            if let Some(xwin) = xwm.xwindows.remove(&msg.window) {\n                xwm.client_list.retain(|id| *id != xwin.id);\n                xwm.client_list_stacking.retain(|id| *id != xwin.id);\n                xwm.serials.retain(|_, id| *id != xwin.id);\n\n                replace_window_list(\n                    &xwm.conn,\n                    xwm.screen.root,\n                    xwm.atoms._NET_CLIENT_LIST,\n                    &xwm.client_list,\n                )?;\n\n                replace_window_list(\n                    &xwm.conn,\n                    xwm.screen.root,\n                    xwm.atoms._NET_CLIENT_LIST_STACKING,\n                    &xwm.client_list_stacking,\n                )?;\n            }\n        }\n        ClientMessage(msg) if msg.type_ == xwm.atoms.WL_SURFACE_SERIAL => {\n            let [lo, hi, ..] = msg.data.as_data32();\n            let serial = ((hi as u64) << 32) | lo as u64;\n\n            xwm.serials.insert(serial, msg.window);\n            if let Some(xwin) = xwm.xwindows.get_mut(&msg.window) {\n                xwin.serial = Some(serial);\n                trace!(?xwin, \"WL_SURFACE_SERIAL set\");\n\n                // This sometimes happens after the surface is committed.\n                if xwin.mapped {\n                    state.delayed_map_xwin(serial);\n                }\n            }\n        }\n        ClientMessage(msg) if msg.type_ == xwm.atoms._NET_WM_STATE => {\n            let [action, a, b, ..] = msg.data.as_data32();\n\n            if let Some(xwin) = xwm.xwindows.get_mut(&msg.window) {\n                let old_states = xwin.states.clone();\n                for value in [a, b] {\n                    const REMOVE: u32 = 0;\n                    const ADD: u32 = 1;\n                    const TOGGLE: u32 = 2;\n\n                    match (action, value) {\n                        (_, x11rb::NONE) => (),\n                        (REMOVE, v) => {\n                            xwin.states.remove(&v);\n                        }\n                        (ADD, v) => {\n                            xwin.states.insert(v);\n                        }\n                        (TOGGLE, v) => {\n                            if xwin.states.contains(&v) {\n                                xwin.states.remove(&v);\n                            } else {\n                                xwin.states.insert(v);\n                            }\n                        }\n                        _ => (),\n                    }\n                }\n\n                if xwin.states != old_states {\n                    let values = xwin.states.iter().copied().collect::<Vec<_>>();\n\n                    if tracing::event_enabled!(tracing::Level::TRACE) {\n                        let names = values\n                            .iter()\n                            .copied()\n                            .map(|atom| get_atom_name(&xwm.conn, atom))\n                            .collect::<Result<Vec<_>, _>>()?;\n                        trace!(?xwin, ?names, \"setting states\");\n                    }\n\n                    xwm.conn.change_property32(\n                        xproto::PropMode::REPLACE,\n                        xwin.id,\n                        xwm.atoms._NET_WM_STATE,\n                        xproto::AtomEnum::ATOM,\n                        &values,\n                    )?;\n                }\n            }\n        }\n        ClientMessage(msg) if msg.type_ == xwm.atoms._NET_ACTIVE_WINDOW => {\n            if let Some(target) = xwm.xwindows.get(&msg.window) {\n                trace!(?target, \"_NET_ACTIVE_WINDOW request\");\n                replace_window_list(\n                    &xwm.conn,\n                    xwm.screen.root,\n                    xwm.atoms._NET_ACTIVE_WINDOW,\n                    [target.id],\n                )?;\n            }\n        }\n        ClientMessage(msg) => {\n            if tracing::event_enabled!(tracing::Level::TRACE) {\n                let name = get_atom_name(&xwm.conn, msg.type_)?;\n                trace!(window = ?msg.window, atom = name, \"ignoring ClientMessage\")\n            }\n        }\n        PropertyNotify(msg) => {\n            if tracing::event_enabled!(tracing::Level::TRACE) {\n                let name = get_atom_name(&xwm.conn, msg.atom)?;\n                trace!(xwin = msg.window, state = ?msg.state, atom = name, \"property changed\");\n            }\n\n            if let Some(xwin) = xwm.xwindows.get_mut(&msg.window) {\n                match msg.atom {\n                    v if v == xwm.atoms._NET_WM_NAME => {\n                        xwin.title = fetch_string_property(&xwm.conn, msg.window, v)?;\n                        trace!(?xwin, \"title changed\");\n                    }\n                    v if v == u32::from(xproto::AtomEnum::WM_CLASS) => {\n                        xwin.app_id = fetch_class(&xwm.conn, msg.window)?;\n                        trace!(?xwin, class = xwin.app_id, \"class changed\");\n                    }\n                    v if v == xwm.atoms.WM_HINTS => {\n                        let hints = fetch_hints(&xwm.conn, msg.window)?;\n                        trace!(?xwin, ?hints, \"hints changed\");\n                        xwin.hint_input = hints.and_then(|h| h.input).unwrap_or_default();\n                    }\n                    v if v == xwm.atoms.WM_PROTOCOLS => {\n                        let protocols =\n                            fetch_protocols(&xwm.conn, xwm.atoms.WM_PROTOCOLS, msg.window)?;\n                        trace!(?xwin, ?protocols, \"protocols changed\");\n                    }\n                    _ => (),\n                }\n            }\n        }\n        _ => (),\n    }\n\n    Ok(())\n}\n\nfn fetch_string_property(\n    conn: &X11Connection,\n    window: xproto::Window,\n    atom: impl Into<xproto::Atom>,\n) -> Result<Option<String>, ConnectionError> {\n    let atom = atom.into();\n    let reply = match conn\n        .get_property(false, window, atom, xproto::AtomEnum::ANY, 0, 1024)?\n        .reply_unchecked()\n    {\n        Ok(Some(reply)) => reply,\n        Ok(None) | Err(ConnectionError::ParseError(_)) => return Ok(None),\n        Err(err) => return Err(err),\n    };\n\n    let Some(bytes) = reply.value8() else {\n        return Ok(None);\n    };\n\n    match String::from_utf8(bytes.collect()) {\n        Ok(v) => Ok(Some(v)),\n        Err(_) => {\n            trace!(?atom, \"invalid string property\");\n            Ok(None)\n        }\n    }\n}\n\nfn fetch_class(\n    conn: &X11Connection,\n    window: xproto::Window,\n) -> Result<Option<String>, ConnectionError> {\n    let reply = match x11rb::properties::WmClass::get(conn, window)?.reply_unchecked() {\n        Ok(Some(reply)) => reply,\n        Ok(None) | Err(ConnectionError::ParseError(_)) => return Ok(None),\n        Err(err) => return Err(err),\n    };\n\n    match std::str::from_utf8(reply.class()) {\n        Ok(v) => Ok(Some(v.to_owned())),\n        Err(_) => {\n            trace!(\"WM_CLASS property is invalid string\");\n            Ok(None)\n        }\n    }\n}\n\nfn fetch_hints(\n    conn: &X11Connection,\n    window: xproto::Window,\n) -> Result<Option<x11rb::properties::WmHints>, ConnectionError> {\n    match x11rb::properties::WmHints::get(conn, window)?.reply_unchecked() {\n        Ok(Some(reply)) => Ok(Some(reply)),\n        Ok(None) | Err(ConnectionError::ParseError(_)) => Ok(None),\n        Err(err) => Err(err),\n    }\n}\n\nfn fetch_protocols(\n    conn: &X11Connection,\n    atom: impl Into<xproto::Atom>,\n    window: xproto::Window,\n) -> Result<HashSet<xproto::Atom>, ConnectionError> {\n    let reply = match conn\n        .get_property(false, window, atom, xproto::AtomEnum::ATOM, 0, 1024)?\n        .reply_unchecked()\n    {\n        Ok(Some(reply)) => reply,\n        Ok(None) | Err(ConnectionError::ParseError(_)) => return Ok(HashSet::default()),\n        Err(err) => return Err(err),\n    };\n\n    let Some(vals) = reply.value32() else {\n        return Ok(HashSet::default());\n    };\n\n    Ok(vals.collect())\n}\n\nfn replace_window_list(\n    conn: &X11Connection,\n    win: xproto::Window,\n    a: impl Into<xproto::Atom>,\n    list: impl AsRef<[u32]>,\n) -> Result<VoidCookie<X11Connection>, ConnectionError> {\n    conn.change_property32(\n        xproto::PropMode::REPLACE,\n        win,\n        a,\n        xproto::AtomEnum::WINDOW,\n        list.as_ref(),\n    )\n}\n\nfn get_atom_name(\n    conn: &X11Connection,\n    atom: impl Into<xproto::Atom>,\n) -> Result<String, ConnectionError> {\n    if let Some(reply) = conn.get_atom_name(atom.into())?.reply_unchecked()? {\n        Ok(String::from_utf8(reply.name).unwrap_or(\"<invalid string>\".to_string()))\n    } else {\n        Ok(\"<unknown>\".to_string())\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor/xwayland.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nmod xwm;\nuse std::{\n    io::{self, Read as _},\n    os::fd::{AsFd, AsRawFd as _},\n    path::{Path, PathBuf},\n    sync::Arc,\n};\n\nuse anyhow::{anyhow, bail, Context as _};\nuse pathsearch::find_executable_in_path;\nuse tracing::{debug, trace};\npub use xwm::*;\n\nuse crate::{\n    config::HomeIsolationMode,\n    container::{Container, ContainerHandle},\n    session::compositor::ClientState,\n};\n\npub struct XWayland {\n    pub display_socket: DisplaySocket,\n    pub displayfd_recv: mio::unix::pipe::Receiver,\n    pub child: ContainerHandle,\n\n    extern_socket_dir: PathBuf,\n    xwm_socket: Option<mio::net::UnixStream>,\n}\n\n// Where the socket gets mounted inside the container.\nconst CONONICAL_DISPLAY_PATH: &str = \"/tmp/.X11-unix\";\n\npub struct DisplaySocket(u32);\n\nimpl DisplaySocket {\n    fn pick_unused() -> anyhow::Result<Self> {\n        use rustix::net::*;\n\n        // Because we're using a mount namespace, we don't need to worry about\n        // system sockets in /tmp leaking into our container. However, because we\n        // don't use a network namespace, it is possible for system abstract sockets\n        // to be available. We can ensure that isn't the case by attempting to\n        // bind the abstract socket.\n        let mut display = 1;\n        let sock = socket(AddressFamily::UNIX, SocketType::STREAM, None)?;\n\n        loop {\n            let dp = DisplaySocket(display);\n\n            match rustix::net::bind(\n                &sock,\n                // By convention, the name is the same as the path.\n                &SocketAddrUnix::new_abstract_name(dp.inner_path().as_os_str().as_encoded_bytes())?,\n            ) {\n                Ok(()) => return Ok(dp), // Discard the abstract socket.\n                Err(e) if e.kind() == io::ErrorKind::AddrInUse => display += 1,\n                Err(e) => return Err(e.into()),\n            }\n        }\n    }\n\n    pub fn display(&self) -> String {\n        format!(\":{}\", self.0)\n    }\n\n    pub fn inner_path(&self) -> PathBuf {\n        Path::new(CONONICAL_DISPLAY_PATH).join(format!(\"X{}\", self.0))\n    }\n}\n\nimpl XWayland {\n    pub fn spawn(\n        dh: &mut wayland_server::DisplayHandle,\n        xdg_runtime_dir: impl AsRef<Path>,\n        stdio: impl AsFd,\n    ) -> anyhow::Result<Self> {\n        let (xwm_xwayland, xwm_compositor) = mio::net::UnixStream::pair()?;\n        let (wayland_xwayland, wayland_compositor) = mio::net::UnixStream::pair()?;\n\n        // XWayland writes the the display number and a newline to this pipe when\n        // it's ready.\n        let (displayfd_send, displayfd_recv) = mio::unix::pipe::new()?;\n\n        let display_socket = DisplaySocket::pick_unused()?;\n\n        // Put the socket in a folder, so we can bind-mount that to\n        // /tmp/.X11-unix inside the (app) container.\n        let extern_socket_path = xdg_runtime_dir\n            .as_ref()\n            .join(display_socket.inner_path().strip_prefix(\"/\").unwrap());\n        let extern_socket_dir = extern_socket_path.parent().unwrap();\n\n        std::fs::create_dir_all(extern_socket_dir)?;\n        let socket = mio::net::UnixListener::bind(&extern_socket_path)?;\n\n        let exe = find_executable_in_path(\"Xwayland\")\n            .ok_or(anyhow!(\"Xwayland not in PATH\"))?\n            .as_os_str()\n            .to_owned();\n        let args = vec![\n            exe,\n            \"-verbose\".into(),\n            \"-rootless\".into(),\n            \"-terminate\".into(),\n            \"-force-xrandr-emulation\".into(),\n            \"-wm\".into(),\n            xwm_xwayland.as_raw_fd().to_string().into(),\n            \"-displayfd\".into(),\n            displayfd_send.as_raw_fd().to_string().into(),\n            \"-listenfd\".into(),\n            socket.as_raw_fd().to_string().into(),\n        ];\n\n        let mut container = Container::new(args, HomeIsolationMode::Tmpfs)?;\n\n        container.set_env(\n            \"WAYLAND_SOCKET\",\n            format!(\"{}\", wayland_xwayland.as_raw_fd()),\n        );\n\n        container.set_stdout(stdio.as_fd())?;\n        container.set_stderr(stdio.as_fd())?;\n\n        unsafe {\n            container.pre_exec(move || {\n                // unset the CLOEXEC flag from the sockets we need to pass\n                // to xwayland.\n                unset_cloexec(&wayland_xwayland)?;\n                unset_cloexec(&xwm_xwayland)?;\n                unset_cloexec(&displayfd_send)?;\n                unset_cloexec(&socket)?;\n\n                Ok(())\n            });\n        }\n\n        let child = container.spawn().context(\"failed to spawn XWayland\")?;\n        debug!(x11_socket = ?extern_socket_path, \"spawned Xwayland instance\");\n\n        // Insert the client into the display handle. The order is important\n        // here; XWayland never starts up at all unless it can roundtrip with\n        // wayland.\n        let _client = dh.insert_client(\n            wayland_compositor.into(),\n            Arc::new(ClientState { xwayland: true }),\n        )?;\n\n        Ok(Self {\n            display_socket,\n            displayfd_recv,\n            child,\n\n            extern_socket_dir: extern_socket_dir.to_owned(),\n            xwm_socket: Some(xwm_compositor),\n        })\n    }\n\n    pub fn poll_ready(&mut self) -> anyhow::Result<Option<mio::net::UnixStream>> {\n        if self.xwm_socket.is_none() {\n            bail!(\"XWayland already marked as ready\")\n        }\n\n        let mut buf = [0; 64];\n\n        match self.displayfd_recv.read(&mut buf) {\n            Ok(len) => {\n                if (buf[..len]).contains(&b'\\n') {\n                    trace!(\"Xwayland ready\");\n                    return Ok(self.xwm_socket.take());\n                } else {\n                    // Not ready yet.\n                }\n            }\n            Err(err) if err.kind() == std::io::ErrorKind::WouldBlock => (),\n            Err(err) => return Err(err).context(\"reading from xwayland pipe failed\"),\n        }\n\n        Ok(None)\n    }\n\n    pub fn prepare_socket(&self, container: &mut Container) {\n        container.bind_mount(&self.extern_socket_dir, Path::new(CONONICAL_DISPLAY_PATH));\n        container.set_env(\"DISPLAY\", self.display_socket.display());\n    }\n}\n\nfn unset_cloexec(socket_fd: impl AsFd) -> Result<(), rustix::io::Errno> {\n    rustix::io::fcntl_setfd(socket_fd, rustix::io::FdFlags::empty())\n}\n"
  },
  {
    "path": "mm-server/src/session/compositor.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{collections::BTreeMap, sync::Arc};\n\nuse protocols::*;\nuse slotmap::SlotMap;\nuse tracing::{debug, instrument, trace};\nuse wayland_protocols::{\n    wp::{\n        fractional_scale::v1::server::wp_fractional_scale_manager_v1,\n        linux_dmabuf::zv1::server::zwp_linux_dmabuf_v1,\n        linux_drm_syncobj::v1::server::wp_linux_drm_syncobj_manager_v1,\n        pointer_constraints::zv1::server::zwp_pointer_constraints_v1,\n        presentation_time::server::wp_presentation,\n        relative_pointer::zv1::server::zwp_relative_pointer_manager_v1,\n        text_input::zv3::server::zwp_text_input_manager_v3,\n    },\n    xdg::shell::server::xdg_wm_base,\n    xwayland::shell::v1::server::xwayland_shell_v1,\n};\nuse wayland_server::{\n    protocol::{self, wl_output, wl_shm},\n    Resource as _,\n};\n\nuse crate::{\n    session::{\n        control::*,\n        video::{self, TextureSync},\n        SessionHandle,\n    },\n    vulkan::VkContext,\n};\n\npub mod buffers;\nmod dispatch;\nmod oneshot_render;\nmod output;\nmod protocols;\nmod sealed;\nmod seat;\nmod serial;\nmod shm;\nmod stack;\npub mod surface;\npub mod xwayland;\n\npub use seat::{ButtonState, KeyState};\n\nuse super::EPOCH;\n\npub struct Compositor {\n    serial: serial::Serial,\n\n    surfaces: SlotMap<surface::SurfaceKey, surface::Surface>,\n    buffers: SlotMap<buffers::BufferKey, buffers::Buffer>,\n    shm_pools: SlotMap<shm::ShmPoolKey, shm::ShmPool>,\n    cached_dmabuf_feedback: buffers::CachedDmabufFeedback,\n    imported_syncobj_timelines: SlotMap<buffers::SyncobjTimelineKey, buffers::SyncobjTimeline>,\n    in_flight_buffers: Vec<surface::ContentUpdate>,\n    pending_presentation_feedback: Vec<surface::PendingPresentationFeedback>,\n\n    surface_stack: Vec<surface::SurfaceKey>,\n    active_surface: Option<surface::SurfaceKey>,\n\n    output_proxies: Vec<wl_output::WlOutput>,\n\n    // TODO: one seat per operator\n    pub default_seat: seat::Seat,\n\n    display_params: DisplayParams,\n    session_handle: SessionHandle,\n\n    xwm: Option<xwayland::Xwm>,\n    xwayland_surface_lookup: BTreeMap<u64, surface::SurfaceKey>,\n\n    // At the bottom for drop order.\n    vk: Arc<VkContext>,\n}\n\nimpl Compositor {\n    pub fn new(\n        vk: Arc<VkContext>,\n        handle: SessionHandle,\n        display_params: DisplayParams,\n    ) -> anyhow::Result<Self> {\n        let cached_dmabuf_feedback = buffers::CachedDmabufFeedback::new(vk.clone())?;\n\n        Ok(Self {\n            serial: serial::Serial::new(),\n\n            surfaces: SlotMap::default(),\n            buffers: SlotMap::default(),\n            shm_pools: SlotMap::default(),\n            cached_dmabuf_feedback,\n            imported_syncobj_timelines: SlotMap::default(),\n            in_flight_buffers: Vec::new(),\n            pending_presentation_feedback: Vec::new(),\n\n            surface_stack: Vec::new(),\n            active_surface: None,\n\n            output_proxies: Vec::new(),\n\n            default_seat: seat::Seat::default(),\n\n            display_params,\n            session_handle: handle.clone(),\n\n            xwm: None,\n            xwayland_surface_lookup: BTreeMap::default(),\n\n            vk,\n        })\n    }\n\n    pub fn update_display_params(\n        &mut self,\n        display_params: DisplayParams,\n        active: bool,\n    ) -> anyhow::Result<()> {\n        let now = EPOCH.elapsed().as_millis() as u32;\n\n        // Reconfigure all surfaces to be the right size.\n        for surface in &self.surface_stack {\n            let surf = &mut self.surfaces[*surface];\n\n            let xwin = surf.role.current.as_ref().and_then(|role| {\n                if let surface::SurfaceRole::XWayland { serial } = role {\n                    self.xwm.as_ref().unwrap().xwindow_for_serial(*serial)\n                } else {\n                    None\n                }\n            });\n\n            surf.reconfigure(display_params, xwin);\n\n            if display_params.width != self.display_params.width\n                || display_params.height != self.display_params.height\n                || display_params.ui_scale != self.display_params.ui_scale\n            {\n                // Try to trick the surface into thinking it's moving to a\n                // different monitor. This helps some games adjust to mode\n                // changes.\n                for wl_output in &self.output_proxies {\n                    if wl_output.client() == surf.wl_surface.client() {\n                        surf.wl_surface.leave(wl_output);\n                        surf.wl_surface.enter(wl_output);\n                    }\n                }\n\n                // Discharge any pending frame callbacks, since we won't\n                // render the current content, and some clients get stuck\n                // otherwise.\n                if let Some(cb) = surf.frame_callback.current.take() {\n                    cb.done(now);\n                }\n            }\n        }\n\n        self.update_focus_and_visibility(active)?;\n        self.display_params = display_params;\n        self.emit_output_params();\n\n        Ok(())\n    }\n\n    #[instrument(skip_all)]\n    pub fn composite_frame(\n        &mut self,\n        video_pipeline: &mut video::EncodePipeline,\n    ) -> anyhow::Result<()> {\n        let now = EPOCH.elapsed().as_millis() as u32;\n        let ready = unsafe { video_pipeline.begin()? };\n        if !ready {\n            debug!(\"dropped frame because of backpressure\");\n            return Ok(());\n        }\n\n        // Iterate backwards to find the first fullscreen window.\n        let first_visible_idx = self\n            .surface_stack\n            .iter()\n            .rposition(|id| {\n                self.surfaces[*id]\n                    .configuration\n                    .map_or(true, |conf| conf.fullscreen)\n            })\n            .unwrap_or_default();\n\n        let num_surfaces = self.surface_stack.len() - first_visible_idx;\n        let mut presentation_feedback = Vec::with_capacity(num_surfaces);\n\n        for id in self.surface_stack[first_visible_idx..].iter() {\n            let surface = &mut self.surfaces[*id];\n\n            let conf = surface\n                .configuration\n                .expect(\"mapped surface has no configuration\");\n\n            let content = surface\n                .content\n                .as_mut()\n                .expect(\"mapped surface has no content\");\n\n            let buffer = &mut self.buffers[content.buffer];\n\n            let sync = match &mut buffer.backing {\n                buffers::BufferBacking::Dmabuf { .. } => {\n                    if let Some((acquire, _)) = content.explicit_sync.as_ref() {\n                        Some(TextureSync::Explicit(acquire.clone()))\n                    } else {\n                        Some(TextureSync::ImplicitInterop)\n                    }\n                }\n                _ => None,\n            };\n\n            unsafe { content.tp_done = video_pipeline.composite_surface(buffer, sync, conf)? };\n            if let Some(callback) = surface.frame_callback.current.take().as_mut() {\n                callback.done(now);\n            }\n\n            if let Some(fb) = content.wp_presentation_feedback.take() {\n                presentation_feedback.push(fb);\n            }\n\n            trace!(?surface, ?conf, \"compositing surface\");\n        }\n\n        let tp_render = unsafe { video_pipeline.end_and_submit()? };\n        for fb in presentation_feedback.drain(..) {\n            self.pending_presentation_feedback\n                .push(surface::PendingPresentationFeedback(fb, tp_render.clone()));\n        }\n\n        Ok(())\n    }\n\n    pub fn idle(&mut self, active: bool) -> anyhow::Result<()> {\n        // Update the window stack, if it changed.\n        self.update_focus_and_visibility(active)?;\n\n        // Send any pending surface configures.\n        self.configure_surfaces()?;\n\n        // Check if the pointer is locked.\n        self.update_pointer_lock();\n\n        // Send pending pointer frames.\n        self.default_seat.pointer_frame();\n\n        // Release any unused buffers.\n        self.release_buffers()?;\n\n        // Send presentation feedback.\n        self.send_presentation_feedback()?;\n\n        Ok(())\n    }\n}\n\n#[derive(Debug, Default)]\npub struct ClientState {\n    xwayland: bool,\n}\n\nimpl wayland_server::backend::ClientData for ClientState {\n    fn initialized(&self, _client_id: wayland_server::backend::ClientId) {}\n    fn disconnected(\n        &self,\n        _client_id: wayland_server::backend::ClientId,\n        _reason: wayland_server::backend::DisconnectReason,\n    ) {\n    }\n}\n\npub fn create_globals(dh: &wayland_server::DisplayHandle) {\n    create_global::<protocol::wl_compositor::WlCompositor>(dh, 6);\n    create_global::<protocol::wl_output::WlOutput>(dh, 4);\n    create_global::<xdg_wm_base::XdgWmBase>(dh, 6);\n    create_global::<wp_fractional_scale_manager_v1::WpFractionalScaleManagerV1>(dh, 1);\n\n    create_global::<protocol::wl_seat::WlSeat>(dh, 9);\n    create_global::<protocol::wl_data_device_manager::WlDataDeviceManager>(dh, 3);\n    create_global::<zwp_pointer_constraints_v1::ZwpPointerConstraintsV1>(dh, 1);\n    create_global::<zwp_relative_pointer_manager_v1::ZwpRelativePointerManagerV1>(dh, 1);\n    create_global::<zwp_text_input_manager_v3::ZwpTextInputManagerV3>(dh, 1);\n\n    create_global::<wl_shm::WlShm>(dh, 1);\n    create_global::<zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1>(dh, 5);\n    create_global::<wp_presentation::WpPresentation>(dh, 1);\n    create_global::<wp_linux_drm_syncobj_manager_v1::WpLinuxDrmSyncobjManagerV1>(dh, 1);\n\n    create_global::<xwayland_shell_v1::XwaylandShellV1>(dh, 1);\n    create_global::<wl_drm::WlDrm>(dh, 2);\n}\n\nfn create_global<G: wayland_server::Resource + 'static>(\n    dh: &wayland_server::DisplayHandle,\n    version: u32,\n) where\n    Compositor: wayland_server::GlobalDispatch<G, ()>,\n{\n    let _ = dh.create_global::<Compositor, G, ()>(version, ());\n}\n"
  },
  {
    "path": "mm-server/src/session/control.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse crossbeam_channel::Sender;\n\nuse crate::{\n    codec::{AudioCodec, VideoCodec},\n    color::VideoProfile,\n    pixel_scale::PixelScale,\n    server::stream::StreamWriter,\n    session::compositor::{self, ButtonState},\n};\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\npub struct DisplayParams {\n    pub width: u32,\n    pub height: u32,\n    pub framerate: u32,\n    pub ui_scale: PixelScale,\n}\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\npub struct VideoStreamParams {\n    pub width: u32,\n    pub height: u32,\n    pub codec: VideoCodec,\n    pub preset: u32,\n    pub profile: VideoProfile,\n}\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\npub struct AudioStreamParams {\n    pub sample_rate: u32,\n    pub channels: u32,\n    pub codec: AudioCodec,\n}\n\npub enum ControlMessage {\n    Stop,\n    Attach {\n        id: u64,\n        sender: Sender<SessionEvent>,\n        video_params: VideoStreamParams,\n        audio_params: AudioStreamParams,\n        stream_writer: StreamWriter,\n        ready: oneshot::Sender<()>,\n    },\n    Detach(u64),\n    RefreshVideo,\n    UpdateDisplayParams(DisplayParams),\n    KeyboardInput {\n        key_code: u32,\n        state: compositor::KeyState,\n        char: Option<char>,\n    },\n    PointerEntered,\n    PointerLeft,\n    PointerMotion(f64, f64),\n    RelativePointerMotion(f64, f64),\n    PointerInput {\n        x: f64,\n        y: f64,\n        button_code: u32,\n        state: ButtonState,\n    },\n    PointerAxis(f64, f64),\n    PointerAxisDiscrete(f64, f64),\n    GamepadAvailable(u64),\n    GamepadUnavailable(u64),\n    GamepadAxis {\n        id: u64,\n        axis_code: u32,\n        value: f64,\n    },\n    GamepadTrigger {\n        id: u64,\n        trigger_code: u32,\n        value: f64,\n    },\n    GamepadInput {\n        id: u64,\n        button_code: u32,\n        state: ButtonState,\n    },\n}\n\n#[derive(Debug, Clone)]\npub enum SessionEvent {\n    DisplayParamsChanged {\n        params: DisplayParams,\n        reattach: bool,\n    },\n    VideoFrame {\n        stream_seq: u64,\n        seq: u64,\n        frame: bytes::Bytes,\n    },\n    AudioFrame {\n        _stream_seq: u64,\n        seq: u64,\n        frame: bytes::Bytes,\n    },\n    CursorUpdate {\n        image: Option<bytes::Bytes>,\n        icon: Option<cursor_icon::CursorIcon>,\n        hotspot_x: u32,\n        hotspot_y: u32,\n    },\n    PointerLocked(f64, f64),\n    PointerReleased,\n    Shutdown,\n}\n"
  },
  {
    "path": "mm-server/src/session/handle.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{collections::BTreeMap, sync::Arc};\n\nuse crossbeam_channel as crossbeam;\nuse parking_lot::Mutex;\n\nuse super::control::SessionEvent;\nuse crate::server::stream::StreamWriter;\n\nstruct Client {\n    events: crossbeam::Sender<SessionEvent>,\n    writer: StreamWriter,\n}\n\nstruct Inner {\n    attachments: BTreeMap<u64, Client>,\n}\n\n#[derive(Clone)]\npub struct SessionHandle(Arc<Mutex<Inner>>, Arc<mio::Waker>);\n\nimpl SessionHandle {\n    pub fn new(waker: Arc<mio::Waker>) -> Self {\n        Self(\n            Arc::new(Mutex::new(Inner {\n                attachments: BTreeMap::new(),\n            })),\n            waker,\n        )\n    }\n\n    pub fn insert_client(\n        &self,\n        id: u64,\n        events: crossbeam::Sender<SessionEvent>,\n        writer: StreamWriter,\n    ) {\n        self.0\n            .lock()\n            .attachments\n            .insert(id, Client { events, writer });\n    }\n\n    pub fn remove_client(&self, id: u64) {\n        self.0.lock().attachments.remove(&id);\n    }\n\n    pub fn remove_all(&self) {\n        self.0.lock().attachments.clear();\n    }\n\n    pub fn dispatch(&self, event: SessionEvent) {\n        let attachments = &self.0.lock().attachments;\n        for (_, client) in attachments.iter() {\n            let _ = client.events.send(event.clone());\n        }\n    }\n\n    pub fn dispatch_audio_frame(&self, pts: u64, frame: bytes::Bytes, stream_restart: bool) {\n        let attachments = &mut self.0.lock().attachments;\n        for (_, client) in attachments.iter_mut() {\n            let (stream_seq, seq) =\n                client\n                    .writer\n                    .write_audio_frame(pts, frame.clone(), stream_restart);\n            let _ = client.events.send(SessionEvent::AudioFrame {\n                _stream_seq: stream_seq,\n                seq,\n                frame: frame.clone(),\n            });\n        }\n    }\n\n    pub fn dispatch_video_frame(\n        &self,\n        pts: u64,\n        frame: bytes::Bytes,\n        hierarchical_layer: u32,\n        stream_restart: bool,\n    ) {\n        let attachments = &mut self.0.lock().attachments;\n        for (_, client) in attachments.iter_mut() {\n            let (stream_seq, seq) = client.writer.write_video_frame(\n                pts,\n                frame.clone(),\n                hierarchical_layer,\n                stream_restart,\n            );\n\n            let _ = client.events.send(SessionEvent::VideoFrame {\n                stream_seq,\n                seq,\n                frame: frame.clone(),\n            });\n        }\n    }\n\n    pub fn wake(&self) -> std::io::Result<()> {\n        self.1.wake()\n    }\n\n    pub fn kick_clients(&self) {\n        let attachments = &mut self.0.lock().attachments;\n        for (_, client) in std::mem::take(attachments) {\n            let _ = client.events.send(SessionEvent::Shutdown);\n        }\n    }\n\n    pub fn num_attachments(&self) -> usize {\n        self.0.lock().attachments.len()\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/input/udevfs.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    collections::BTreeMap,\n    ffi::OsStr,\n    path::{Path, PathBuf},\n    sync::Arc,\n    time,\n};\n\nuse fuser as fuse;\nuse libc::EBADF;\nuse parking_lot::Mutex;\nuse tracing::{debug, trace};\n\nuse super::DeviceState;\n\nconst ENOENT: i32 = rustix::io::Errno::NOENT.raw_os_error();\n\nconst UDEV_INPUT_DATA: &[u8] = r#\"E:ID_INPUT=1\nE:ID_INPUT_JOYSTICK=1\nE:ID_BUS=usb\nG:seat\nG:uaccess\nQ:seat\nQ:uaccess\nV:1\n\"#\n.as_bytes();\n\nconst ZERO_TTL: time::Duration = time::Duration::ZERO;\n\n#[derive(Debug, Clone)]\nstruct Entry {\n    path: PathBuf,\n    attr: fuse::FileAttr,\n    /// The associated device ID.\n    dev: Option<u64>,\n}\n\nstruct InodeCache {\n    inodes: BTreeMap<u64, Entry>,\n    next_inode: u64,\n    ctime: time::SystemTime,\n}\n\nimpl InodeCache {\n    fn get_or_insert(\n        &mut self,\n        p: impl AsRef<Path>,\n        mut attr: fuse::FileAttr,\n        dev: Option<u64>,\n    ) -> fuse::FileAttr {\n        for entry in self.inodes.values() {\n            if entry.path == p.as_ref() {\n                return entry.attr;\n            }\n        }\n\n        let ino = self.next_inode;\n        self.next_inode += 1;\n\n        attr.ino = ino;\n        self.inodes.insert(\n            ino,\n            Entry {\n                path: p.as_ref().to_owned(),\n                attr,\n                dev,\n            },\n        );\n\n        attr\n    }\n\n    fn lookup_name(&self, inode: u64) -> Option<(PathBuf, Option<u64>)> {\n        if inode == fuse::FUSE_ROOT_ID {\n            return Some((Path::new(\"/\").to_owned(), None));\n        }\n\n        self.inodes\n            .get(&inode)\n            .map(|entry| (entry.path.clone(), entry.dev))\n    }\n\n    fn reply_add_dirs<P>(\n        &self,\n        mut reply: fuse::ReplyDirectory,\n        names: impl IntoIterator<Item = P>,\n        skip: usize,\n    ) where\n        P: AsRef<Path>,\n    {\n        let mut offset = 1_i64;\n        for name in names.into_iter().skip(skip) {\n            for (ino, entry) in &self.inodes {\n                if entry.path == name.as_ref() {\n                    if reply.add(\n                        *ino,\n                        offset,\n                        entry.attr.kind,\n                        entry.path.file_name().unwrap().to_str().unwrap(),\n                    ) {\n                        return reply.ok();\n                    };\n\n                    offset += 1;\n                }\n            }\n        }\n\n        reply.ok()\n    }\n\n    fn cache_dir(&mut self, p: impl AsRef<Path>, dev: Option<u64>) -> fuse::FileAttr {\n        let attr = fuse::FileAttr {\n            ino: 0,\n            size: 0,\n            blocks: 0,\n            atime: self.ctime,\n            mtime: self.ctime,\n            ctime: self.ctime,\n            crtime: time::SystemTime::UNIX_EPOCH,\n            kind: fuse::FileType::Directory,\n            perm: 0o777,\n            nlink: 1,\n            uid: 0,\n            gid: 0,\n            rdev: 0,\n            blksize: 512,\n            flags: 0,\n        };\n\n        self.get_or_insert(p, attr, dev)\n    }\n\n    fn cache_file(&mut self, p: impl AsRef<Path>, dev: Option<u64>, len: usize) -> fuse::FileAttr {\n        let attr = fuse::FileAttr {\n            ino: 0,\n            size: len as u64,\n            blocks: 0,\n            atime: time::UNIX_EPOCH,\n            mtime: time::UNIX_EPOCH,\n            ctime: time::UNIX_EPOCH,\n            crtime: time::UNIX_EPOCH,\n            kind: fuse::FileType::RegularFile,\n            perm: 0o777,\n            nlink: 1,\n            uid: 0,\n            gid: 0,\n            rdev: 0,\n            blksize: 512,\n            flags: 0,\n        };\n\n        self.get_or_insert(p, attr, dev)\n    }\n\n    fn cache_symlink(&mut self, p: impl AsRef<Path>, dev: Option<u64>) -> fuse::FileAttr {\n        let attr = fuse::FileAttr {\n            ino: 0,\n            size: 0,\n            blocks: 0,\n            atime: self.ctime,\n            mtime: self.ctime,\n            ctime: self.ctime,\n            crtime: time::SystemTime::UNIX_EPOCH,\n            kind: fuse::FileType::Symlink,\n            perm: 0o777,\n            nlink: 1,\n            uid: 0,\n            gid: 0,\n            rdev: 0,\n            blksize: 512,\n            flags: 0,\n        };\n\n        self.get_or_insert(p, attr, dev)\n    }\n}\n\n/// A FUSE filesystem designed to fool libudev. All incoming paths are intended\n/// to be absolute. The following paths are emulated:\n///   - /sys/devices/virtual/input: contains folders for each virtual input\n///     device. Contains both a top-level folder, inputX, and an eventX folder\n///     for the evdev node.\n///   - /sys/class/input: contains symlinks to the above device entries.\n///   - /sys/class/hidraw: empty, so that no hidraw devices can be found\n///   - /run/udev/control: an empty file that indicates udev is running\n///   - /run/udev/data: contains \"c{major}:{minor}\" files with metadata on each\n///     device.\npub struct UdevFs {\n    state: Arc<Mutex<super::InputManagerState>>,\n    tree: InodeCache,\n}\n\nimpl UdevFs {\n    pub fn new(state: Arc<Mutex<super::InputManagerState>>) -> Self {\n        Self {\n            state,\n            tree: InodeCache {\n                inodes: Default::default(),\n                next_inode: fuse::FUSE_ROOT_ID + 1,\n                ctime: time::SystemTime::now(),\n            },\n        }\n    }\n}\n\nimpl fuse::Filesystem for UdevFs {\n    fn lookup(\n        &mut self,\n        _req: &fuse::Request<'_>,\n        parent: u64,\n        name: &std::ffi::OsStr,\n        reply: fuse::ReplyEntry,\n    ) {\n        let Some(name) = name.to_str() else {\n            debug!(?name, \"invalid lookup name\");\n            return reply.error(ENOENT);\n        };\n\n        let inodes = &mut self.tree;\n        let Some((parent_path, dev)) = inodes.lookup_name(parent) else {\n            debug!(?parent, ?name, \"lookup failed\");\n            return reply.error(ENOENT);\n        };\n\n        trace!(?parent_path, ?name, dev, \"lookup\");\n        match (parent_path.to_str().unwrap(), name, dev) {\n            (\"/\", \"sys\", _) => reply.entry(&ZERO_TTL, &inodes.cache_dir(\"/sys\", None), 0),\n            (\"/sys\", \"class\", _) => {\n                reply.entry(&ZERO_TTL, &inodes.cache_dir(\"/sys/class\", None), 0)\n            }\n            (\"/sys/class\", \"input\", _) => {\n                reply.entry(&ZERO_TTL, &inodes.cache_dir(\"/sys/class/input\", None), 0)\n            }\n            (\"/sys/class/input\", name, _) => {\n                let Some(dev) = self\n                    .state\n                    .lock()\n                    .device_by_eventname(name)\n                    .map(|dev| dev.id)\n                else {\n                    debug!(name, \"device not found in /sys/class/input\");\n                    return reply.error(ENOENT);\n                };\n\n                reply.entry(\n                    &ZERO_TTL,\n                    &inodes.cache_symlink(parent_path.join(name), Some(dev)),\n                    0,\n                );\n            }\n            (\"/sys/class\", \"hidraw\", _) => {\n                reply.entry(&ZERO_TTL, &inodes.cache_dir(\"/sys/class/hidraw\", None), 0)\n            }\n            (\"/sys\", \"devices\", _) => {\n                reply.entry(&ZERO_TTL, &inodes.cache_dir(\"/sys/devices\", None), 0)\n            }\n            (\"/sys/devices\", \"virtual\", _) => reply.entry(\n                &ZERO_TTL,\n                &inodes.cache_dir(\"/sys/devices/virtual\", None),\n                0,\n            ),\n            (\"/sys/devices/virtual\", \"input\", _) => reply.entry(\n                &ZERO_TTL,\n                &inodes.cache_dir(\"/sys/devices/virtual/input\", None),\n                0,\n            ),\n            (\"/sys/devices/virtual/input\", name, _) => {\n                let Some(dev) = self.state.lock().device_by_devname(name).map(|dev| dev.id) else {\n                    debug!(name, \"device not found in /sys/devices/virtual/input\");\n                    return reply.error(ENOENT);\n                };\n\n                reply.entry(\n                    &ZERO_TTL,\n                    &inodes.cache_dir(parent_path.join(name), Some(dev)),\n                    0,\n                );\n            }\n            (p, \"uevent\", Some(dev)) if p.starts_with(\"/sys/devices/virtual/input\") => {\n                let guard = self.state.lock();\n                let Some(dev) = guard.device_by_id(dev) else {\n                    debug!(?p, dev, \"device not found in /sys/devices/virtual/input\");\n                    return reply.error(ENOENT);\n                };\n\n                // Inside the device directory, there are two levels of subdirectories.\n                let path = parent_path\n                    .strip_prefix(Path::new(\"/sys/devices/virtual/input\"))\n                    .unwrap();\n                if path.as_os_str().is_empty() {\n                    unreachable!() // Handled by the case above this one.\n                }\n\n                // Distinguish between the inputX uevent and the eventX uevent.\n                let content = if path.to_str().unwrap() == dev.devname {\n                    make_input_uevent(dev)\n                } else if path\n                    .file_name()\n                    .unwrap()\n                    .to_str()\n                    .unwrap()\n                    .starts_with(\"event\")\n                {\n                    make_evdev_uevent(dev)\n                } else {\n                    debug!(?parent_path, \"unrecognized uevent path\");\n                    return reply.error(ENOENT);\n                };\n\n                reply.entry(\n                    &ZERO_TTL,\n                    &self\n                        .tree\n                        .cache_file(parent_path.join(\"uevent\"), Some(dev.id), content.len()),\n                    0,\n                );\n            }\n            (p, \"subsystem\", Some(dev)) if p.starts_with(\"/sys/devices/virtual/input\") => {\n                reply.entry(\n                    &ZERO_TTL,\n                    &self\n                        .tree\n                        .cache_symlink(parent_path.join(\"subsystem\"), Some(dev)),\n                    0,\n                );\n            }\n            (p, name, Some(dev))\n                if p.starts_with(\"/sys/devices/virtual/input\") && name.starts_with(\"event\") =>\n            {\n                // This is /sys/devices/virtual/input/inputX/eventX.\n                reply.entry(\n                    &ZERO_TTL,\n                    &inodes.cache_dir(parent_path.join(name), Some(dev)),\n                    0,\n                );\n            }\n            (\"/\", \"run\", _) => reply.entry(&ZERO_TTL, &inodes.cache_dir(\"/run\", None), 0),\n            (\"/run\", \"udev\", _) => reply.entry(&ZERO_TTL, &inodes.cache_dir(\"/run/udev\", None), 0),\n            (\"/run/udev\", \"control\", _) => reply.entry(\n                &ZERO_TTL,\n                &inodes.cache_file(\"/run/udev/control\", None, 0),\n                0,\n            ),\n            (\"/run/udev\", \"data\", _) => {\n                reply.entry(&ZERO_TTL, &inodes.cache_dir(\"/run/udev/data\", None), 0)\n            }\n            (\"/run/udev\", \"udev.conf.d\", _) => reply.error(ENOENT),\n\n            (\"/run/udev/data\", name, _) => {\n                let guard = self.state.lock();\n                for dev in &guard.devices {\n                    if name == format!(\"c13:{}\", dev.counter) {\n                        return reply.entry(\n                            &ZERO_TTL,\n                            &inodes.cache_file(\n                                parent_path.join(name),\n                                Some(dev.id),\n                                UDEV_INPUT_DATA.len(),\n                            ),\n                            0,\n                        );\n                    }\n                }\n\n                debug!(?name, \"no device found in /run/udev/data\");\n                reply.error(ENOENT);\n            }\n            (parent_name, name, dev) => {\n                debug!(parent_name, name, dev, \"udevfs lookup failed\");\n                reply.error(ENOENT);\n            }\n        }\n    }\n\n    fn getattr(\n        &mut self,\n        _req: &fuse::Request<'_>,\n        ino: u64,\n        _fh: Option<u64>,\n        reply: fuse::ReplyAttr,\n    ) {\n        let Some(entry) = self.tree.inodes.get(&ino) else {\n            debug!(ino, \"lookup failed\");\n            return reply.error(ENOENT);\n        };\n\n        reply.attr(&ZERO_TTL, &entry.attr);\n    }\n\n    fn readlink(&mut self, _req: &fuse::Request<'_>, ino: u64, reply: fuse::ReplyData) {\n        let Some(entry) = self.tree.inodes.get(&ino) else {\n            debug!(ino, \"lookup failed\");\n            return reply.error(ENOENT);\n        };\n\n        trace!(path = ?entry.path, \"readlink\");\n        if let Some(name) = matches_prefix_with_name(&entry.path, \"/sys/class/input\") {\n            let guard = self.state.lock();\n            let Some(dev) = guard.device_by_eventname(name) else {\n                debug!(eventname = ?name, \"device not found in /sys/devices/virtual/input\");\n                return reply.error(ENOENT);\n            };\n\n            let dst = Path::new(\"/sys/devices/virtual/input\")\n                .join(&dev.devname)\n                .join(name);\n            reply.data(dst.as_os_str().as_encoded_bytes());\n        } else if entry.path.starts_with(\"/sys/devices\")\n            && entry.path.file_name() == Some(Path::new(\"subsystem\").as_os_str())\n        {\n            reply.data(b\"/sys/class/input\");\n        } else {\n            debug!(path = ?entry.path, dev = ?entry.dev, \"readlink failed\");\n            reply.error(ENOENT);\n        }\n    }\n\n    fn read(\n        &mut self,\n        _req: &fuse::Request<'_>,\n        ino: u64,\n        _fh: u64,\n        _offset: i64,\n        _size: u32,\n        _flags: i32,\n        _lock_owner: Option<u64>,\n        reply: fuse::ReplyData,\n    ) {\n        let Some(entry) = self.tree.inodes.get(&ino) else {\n            debug!(ino, \"lookup failed\");\n            return reply.error(EBADF);\n        };\n\n        trace!(path = ?entry.path, \"read\");\n\n        if entry.path.starts_with(\"/run/udev/data\") {\n            reply.data(UDEV_INPUT_DATA);\n        } else if entry.dev.is_some()\n            && entry.path.starts_with(\"/sys/devices\")\n            && entry.path.file_name() == Some(Path::new(\"uevent\").as_os_str())\n        {\n            let guard = self.state.lock();\n            let Some(dev) = guard.device_by_id(entry.dev.unwrap()) else {\n                debug!(dev = ?entry.dev, \"device lookup failed\");\n                return reply.error(EBADF);\n            };\n\n            let mut parent_path = entry.path.clone();\n            parent_path.pop();\n\n            if parent_path.file_name() == Some(&dev.eventname) {\n                reply.data(&make_evdev_uevent(dev))\n            } else if parent_path.file_name() == Some(&dev.devname) {\n                reply.data(&make_input_uevent(dev))\n            } else {\n                debug!(?entry.path, \"bad uevent path\");\n                reply.error(EBADF);\n            }\n        } else {\n            debug!(path = ?entry.path, dev = entry.dev, \"read failed\");\n            reply.error(EBADF);\n        }\n    }\n\n    fn readdir(\n        &mut self,\n        _req: &fuse::Request<'_>,\n        ino: u64,\n        _fh: u64,\n        skip: i64,\n        mut reply: fuse::ReplyDirectory,\n    ) {\n        let inodes = &mut self.tree;\n        let Some(Entry { path, dev, .. }) = inodes.inodes.get(&ino).cloned() else {\n            debug!(ino, \"lookup failed\");\n            return reply.error(EBADF);\n        };\n\n        trace!(?path, ?dev, \"readdir\");\n\n        let skip = skip as usize;\n        match (path.to_str().unwrap(), dev) {\n            (\"/\", _) => inodes.reply_add_dirs(reply, [\"sys\", \"run\"], skip),\n            (\"/sys\", _) => inodes.reply_add_dirs(reply, [\"class\", \"devices\"], skip),\n            (\"/sys/class\", _) => inodes.reply_add_dirs(reply, [\"input\", \"hidraw\"], skip),\n            (\"/sys/class/input\", _) => {\n                let guard = self.state.lock();\n\n                trace!(\"udev is enumerating devices in /sys/class/input\");\n                for (idx, DeviceState { id, eventname, .. }) in\n                    guard.devices.iter().skip(skip).enumerate()\n                {\n                    let attr = inodes.cache_symlink(path.join(eventname), Some(*id));\n\n                    if reply.add(\n                        attr.ino,\n                        (idx as i64) + 1,\n                        fuse::FileType::Symlink,\n                        eventname,\n                    ) {\n                        break;\n                    }\n                }\n\n                reply.ok();\n            }\n            (\"/sys/class/hidraw\", _) => {\n                reply.ok() // Empty.\n            }\n            (\"/sys/devices\", _) => inodes.reply_add_dirs(reply, [\"virtual\"], skip),\n            (\"/sys/devices/virtual\", _) => inodes.reply_add_dirs(reply, [\"input\"], skip),\n            (\"/sys/devices/virtual/input\", _) => {\n                let guard = self.state.lock();\n\n                trace!(\"udev is enumerating devices in /sys/devices/virtual/input\");\n                for (idx, DeviceState { id, devname, .. }) in\n                    guard.devices.iter().skip(skip).enumerate()\n                {\n                    let attr = inodes.cache_dir(path.join(devname), Some(*id));\n\n                    if reply.add(\n                        attr.ino,\n                        (idx as i64) + 1,\n                        fuse::FileType::Directory,\n                        devname,\n                    ) {\n                        break;\n                    }\n                }\n\n                reply.ok();\n            }\n            (_p, Some(_))\n                if matches_prefix_with_name(&path, \"/sys/devices/virtual/input\").is_some() =>\n            {\n                // Note: this seems not to happen.\n                // inodes.reply_add_dirs(reply, [\"subsystem\", \"capabilities\",\n                // \"uevent\"], skip)\n            }\n            (\"/run\", _) => inodes.reply_add_dirs(reply, [\"udev\"], skip),\n            (\"/run/udev\", _) => inodes.reply_add_dirs(reply, [\"control\", \"data\"], skip),\n            (\"/run/udev/data\", _) => {\n                // Note: this seems not to happen.\n            }\n            _ => {\n                debug!(?path, ?dev, \"readdir failed\");\n                reply.error(ENOENT);\n            }\n        }\n    }\n\n    fn access(&mut self, _req: &fuse::Request<'_>, _ino: u64, _mask: i32, reply: fuse::ReplyEmpty) {\n        reply.ok()\n    }\n\n    fn release(\n        &mut self,\n        _req: &fuse::Request<'_>,\n        _ino: u64,\n        _fh: u64,\n        _flags: i32,\n        _lock_owner: Option<u64>,\n        _flush: bool,\n        reply: fuse::ReplyEmpty,\n    ) {\n        reply.ok()\n    }\n}\n\nfn make_input_uevent(_dev: &DeviceState) -> Vec<u8> {\n    // TODO hack\n    br#\"PRODUCT=3/45e/2ea/408\nNAME=\"Magic Mirror Emulated Controller\"\nEV=20000b\nKEY=7fdb000000000000 0 0 0 0\nABS=3003f\nUNIQ=\"d0:bc:c1:db:1d:2f\"\n\"#\n    .to_vec()\n}\n\nfn make_evdev_uevent(dev: &DeviceState) -> Vec<u8> {\n    format!(\n        \"MAJOR=13\\nMINOR={}\\nDEVNAME=input/{}\\n\",\n        dev.counter,\n        dev.eventname.to_str().unwrap()\n    )\n    .as_bytes()\n    .to_vec()\n}\n\nfn matches_prefix_with_name(p: &Path, prefix: impl AsRef<Path>) -> Option<&OsStr> {\n    match p.strip_prefix(prefix).ok()?.components().next() {\n        Some(std::path::Component::Normal(devname)) => Some(devname),\n        _ => None,\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/input.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    ffi::{OsStr, OsString},\n    path::Path,\n    sync::Arc,\n};\n\nuse fuser as fuse;\nuse parking_lot::Mutex;\nuse southpaw::{\n    sys::{EV_ABS, EV_KEY},\n    AbsAxis, AbsInfo, InputEvent, KeyCode,\n};\nuse tracing::{debug, error};\n\nuse crate::container::Container;\n\nmod udevfs;\nuse udevfs::*;\n\nuse super::compositor::ButtonState;\n\n/// A simulated gamepad layout.\n#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]\npub enum GamepadLayout {\n    #[default]\n    GenericDualStick,\n}\n\n/// Manages input devices (mostly gamepads) n a container using a variety of\n/// well-intentioned but horrible hacks.\npub struct InputDeviceManager {\n    southpaw: southpaw::DeviceTree,\n    state: Arc<Mutex<InputManagerState>>,\n}\n\nstruct DeviceState {\n    id: u64,\n    counter: u16,\n    devname: OsString,   // inputX\n    eventname: OsString, // eventX\n}\n\n#[derive(Default)]\nstruct InputManagerState {\n    counter: u16,\n    devices: Vec<DeviceState>,\n}\n\nimpl InputManagerState {\n    fn device_by_id(&self, id: u64) -> Option<&DeviceState> {\n        self.devices.iter().find(|dev| dev.id == id)\n    }\n\n    fn device_by_devname(&self, name: impl AsRef<OsStr>) -> Option<&DeviceState> {\n        self.devices.iter().find(|dev| dev.devname == name.as_ref())\n    }\n\n    fn device_by_eventname(&self, name: impl AsRef<OsStr>) -> Option<&DeviceState> {\n        self.devices\n            .iter()\n            .find(|dev| dev.eventname == name.as_ref())\n    }\n}\n\n/// A handle for a plugged gamepad.\npub struct GamepadHandle {\n    device: southpaw::Device,\n    ev_buffer: Vec<southpaw::InputEvent>,\n    pub permanent: bool,\n}\nimpl GamepadHandle {\n    pub(crate) fn axis(&mut self, axis_code: u32, value: f64) {\n        let value = value.clamp(-1.0, 1.0) * 128.0 + 128.0;\n        self.ev_buffer.push(InputEvent::new(\n            EV_ABS,\n            axis_code as u16,\n            value.floor() as i32,\n        ));\n    }\n\n    pub(crate) fn trigger(&mut self, trigger_code: u32, value: f64) {\n        let value = value.clamp(0.0, 1.0) * 256.0;\n        self.ev_buffer.push(InputEvent::new(\n            EV_ABS,\n            trigger_code as u16,\n            value.floor() as i32,\n        ))\n    }\n\n    pub(crate) fn input(&mut self, button_code: u32, state: ButtonState) {\n        let value = match state {\n            super::compositor::ButtonState::Pressed => 1,\n            super::compositor::ButtonState::Released => 0,\n        };\n\n        // The DualSense sends D-pad buttons as ABS_HAT0{X,Y}.\n        let key_code = southpaw::KeyCode::try_from(button_code as u16);\n        if let Some((axis, direction)) = match key_code {\n            Ok(KeyCode::BtnDpadUp) => Some((AbsAxis::HAT0Y, -1)),\n            Ok(KeyCode::BtnDpadDown) => Some((AbsAxis::HAT0Y, 1)),\n            Ok(KeyCode::BtnDpadLeft) => Some((AbsAxis::HAT0X, -1)),\n            Ok(KeyCode::BtnDpadRight) => Some((AbsAxis::HAT0X, 1)),\n            _ => None,\n        } {\n            // Simulate a press and release, each in a frame.\n            self.ev_buffer\n                .push(InputEvent::new(EV_ABS, axis, value * direction));\n            return;\n        }\n\n        self.ev_buffer\n            .push(InputEvent::new(EV_KEY, button_code as u16, value));\n    }\n\n    pub(crate) fn frame(&mut self) {\n        if let Err(err) = self.device.publish_packet(&self.ev_buffer) {\n            error!(?err, \"failed to publish event packet to device\");\n        }\n\n        self.ev_buffer.clear();\n    }\n}\n\nimpl InputDeviceManager {\n    pub fn new(container: &mut Container) -> anyhow::Result<Self> {\n        let state = Arc::new(Mutex::new(InputManagerState::default()));\n\n        let udevfs_path = container.intern_run_path().join(\".udevfs\");\n        let southpaw_path = container.intern_run_path().join(\".southpaw\");\n\n        let udevfs = UdevFs::new(state.clone());\n        let udevfs_path_clone = udevfs_path.clone();\n\n        let southpaw = southpaw::DeviceTree::new();\n        let southpaw_clone = southpaw.clone();\n        let southpaw_path_clone = southpaw_path.clone();\n\n        container.setup_hook(move |c| {\n            let mode = 0o755 | rustix::fs::FileType::Directory.as_raw_mode();\n\n            let device_fd = c.fuse_mount(udevfs_path_clone, \"udevfs\", mode)?;\n            let mut session = fuse::Session::from_fd(udevfs, device_fd, fuse::SessionACL::Owner);\n            std::thread::spawn(move || session.run());\n\n            let device_fd = c.fuse_mount(southpaw_path_clone, \"southpaw\", mode)?;\n            southpaw_clone.wrap_fd(device_fd);\n\n            // Headless servers won't have /sys/devices/virtual/input, and we\n            // can't mkdir the mount point, because it's sysfs.\n            if !Path::new(\"/sys/devices/virtual/input\").exists() {\n                c.fs_mount(\n                    \"/sys/devices/virtual\",\n                    \"tmpfs\",\n                    rustix::mount::MountAttrFlags::empty(),\n                    [(c\"mode\", c\"0777\")],\n                )?;\n            }\n\n            Ok(())\n        });\n\n        container.internal_bind_mount(\n            udevfs_path.join(\"sys/devices/virtual/input\"),\n            \"/sys/devices/virtual/input\",\n        );\n        container.internal_bind_mount(udevfs_path.join(\"sys/class/input\"), \"/sys/class/input\");\n        container.internal_bind_mount(udevfs_path.join(\"run/udev\"), \"/run/udev\");\n        container.internal_bind_mount(southpaw_path, \"/dev/input\");\n\n        // Shadow /sys/class/hidraw.\n        if Path::new(\"/sys/class/hidraw\").exists() {\n            container\n                .internal_bind_mount(udevfs_path.join(\"sys/class/hidraw\"), \"/sys/class/hidraw\");\n        }\n\n        // Without this, udev refuses to accept our FUSE filesystem.\n        container.set_env(\"SYSTEMD_DEVICE_VERIFY_SYSFS\", \"false\");\n\n        Ok(Self { state, southpaw })\n    }\n\n    pub fn plug_gamepad(\n        &mut self,\n        id: u64,\n        _layout: GamepadLayout,\n        permanent: bool,\n    ) -> anyhow::Result<GamepadHandle> {\n        debug!(id, ?_layout, \"gamepad plugged\");\n\n        let mut guard = self.state.lock();\n\n        guard.counter += 1;\n        let counter = guard.counter;\n        let devname = OsStr::new(&format!(\"input{counter}\")).to_owned();\n        let eventname = OsStr::new(&format!(\"event{counter}\")).to_owned();\n\n        let xy_absinfo = AbsInfo {\n            value: 128,\n            minimum: 0,\n            maximum: 255,\n            ..Default::default()\n        };\n\n        let trigger_absinfo = AbsInfo {\n            value: 0,\n            minimum: 0,\n            maximum: 255,\n            ..Default::default()\n        };\n\n        let dpad_absinfo = AbsInfo {\n            value: 0,\n            minimum: -1,\n            maximum: 1,\n            ..Default::default()\n        };\n\n        let device = southpaw::Device::builder()\n            .name(\"Magic Mirror Emulated Controller\")\n            .id(southpaw::BusType::Usb, 1234, 4567, 111)\n            .supported_key_codes([\n                KeyCode::BtnSouth,\n                KeyCode::BtnNorth,\n                KeyCode::BtnEast,\n                KeyCode::BtnWest,\n                KeyCode::BtnTl,\n                KeyCode::BtnTr,\n                KeyCode::BtnTl2,\n                KeyCode::BtnTr2,\n                KeyCode::BtnSelect,\n                KeyCode::BtnStart,\n                KeyCode::BtnMode,\n                KeyCode::BtnThumbl,\n                KeyCode::BtnThumbr,\n            ])\n            .supported_absolute_axis(AbsAxis::X, xy_absinfo)\n            .supported_absolute_axis(AbsAxis::Y, xy_absinfo)\n            .supported_absolute_axis(AbsAxis::RX, xy_absinfo)\n            .supported_absolute_axis(AbsAxis::RY, xy_absinfo)\n            .supported_absolute_axis(AbsAxis::Z, trigger_absinfo)\n            .supported_absolute_axis(AbsAxis::RZ, trigger_absinfo)\n            .supported_absolute_axis(AbsAxis::HAT0X, dpad_absinfo)\n            .supported_absolute_axis(AbsAxis::HAT0Y, dpad_absinfo)\n            .add_to_tree(&mut self.southpaw, &eventname)?;\n\n        guard.devices.push(DeviceState {\n            id,\n            counter,\n            devname,\n            eventname,\n        });\n\n        Ok(GamepadHandle {\n            device,\n            ev_buffer: Vec::new(),\n            permanent,\n        })\n    }\n}\n\n#[cfg(test)]\nmod test {\n    use std::{fs::File, io::Read as _};\n\n    use rustix::pipe::{pipe_with, PipeFlags};\n\n    use super::{GamepadLayout, InputDeviceManager};\n    use crate::{config::HomeIsolationMode, container::Container};\n\n    fn run_in_container_with_gamepads<T>(cmd: impl AsRef<[T]>) -> anyhow::Result<String>\n    where\n        T: AsRef<str>,\n    {\n        let command = cmd\n            .as_ref()\n            .iter()\n            .map(|s| s.as_ref().to_owned().into())\n            .collect();\n\n        let mut container = Container::new(command, HomeIsolationMode::Tmpfs)?;\n        let (pipe_rx, pipe_tx) = pipe_with(PipeFlags::CLOEXEC)?;\n        container.set_stdout(pipe_tx)?;\n\n        container.set_env(\"SYSTEMD_LOG_LEVEL\", \"debug\");\n        let mut input_manager = InputDeviceManager::new(&mut container)?;\n\n        let mut child = container.spawn()?;\n\n        let _ = input_manager.plug_gamepad(1234, GamepadLayout::GenericDualStick, false)?;\n        let _ = input_manager.plug_gamepad(5678, GamepadLayout::GenericDualStick, false)?;\n        let _ = child.wait();\n\n        let mut buf = String::new();\n        File::from(pipe_rx).read_to_string(&mut buf)?;\n\n        Ok(buf)\n    }\n\n    #[test_log::test]\n    fn list_devices_subsystem() -> anyhow::Result<()> {\n        let output = run_in_container_with_gamepads([\n            \"udevadm\",\n            \"--debug\",\n            \"trigger\",\n            \"--dry-run\",\n            \"--verbose\",\n            \"--subsystem-match\",\n            \"input\",\n        ])?;\n\n        let mut expected = String::new();\n        for path in [\n            \"/sys/devices/virtual/input/input1\",\n            \"/sys/devices/virtual/input/input1/event1\",\n            \"/sys/devices/virtual/input/input2\",\n            \"/sys/devices/virtual/input/input2/event2\",\n        ] {\n            expected.push_str(path);\n            expected.push('\\n');\n        }\n\n        pretty_assertions::assert_eq!(output, expected);\n        Ok(())\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/reactor.rs",
    "content": "use std::{\n    collections::BTreeMap,\n    ffi::{OsStr, OsString},\n    fs::File,\n    io::{BufRead, BufReader},\n    os::fd::AsRawFd,\n    path::{Path, PathBuf},\n    sync::Arc,\n    time,\n};\n\nuse anyhow::{bail, Context as _};\nuse crossbeam_channel as crossbeam;\nuse lazy_static::lazy_static;\nuse tracing::{debug, trace, trace_span};\n\nuse super::{\n    audio,\n    compositor::{self, xwayland, Compositor},\n    control::{AudioStreamParams, ControlMessage, DisplayParams, SessionEvent, VideoStreamParams},\n    input, video, GamepadLayout, SessionHandle,\n};\nuse crate::{\n    config::AppConfig,\n    container::{Container, ContainerHandle},\n    pixel_scale::PixelScale,\n    server::stream::StreamWriter,\n    vulkan::VkContext,\n    waking_sender::WakingSender,\n};\n\nlazy_static! {\n    pub static ref EPOCH: std::time::Instant = std::time::Instant::now();\n}\n\nconst READY_TIMEOUT: std::time::Duration = time::Duration::from_secs(30);\n\nconst DISPLAY: mio::Token = mio::Token(0);\nconst ACCEPT: mio::Token = mio::Token(1);\nconst CHILD: mio::Token = mio::Token(2);\nconst WAKER: mio::Token = mio::Token(3);\nconst TIMER: mio::Token = mio::Token(4);\n\nconst XDISPLAY: mio::Token = mio::Token(10);\nconst XWAYLAND: mio::Token = mio::Token(11);\nconst XWAYLAND_READY: mio::Token = mio::Token(12);\n\npub struct Reactor {\n    poll: mio::Poll,\n    waker: Arc<mio::Waker>,\n\n    compositor: Compositor,\n    session_handle: SessionHandle,\n\n    listening_socket: wayland_server::ListeningSocket,\n    wayland_display: wayland_server::Display<Compositor>,\n\n    app_config: AppConfig,\n    child: ContainerHandle,\n    child_debug_log: Option<File>,\n\n    display_params: DisplayParams,\n    new_display_params: Option<DisplayParams>,\n\n    audio_pipeline: audio::EncodePipeline,\n    video_pipeline: Option<video::EncodePipeline>,\n    new_video_stream_params: Option<VideoStreamParams>,\n\n    input_manager: input::InputDeviceManager,\n    gamepads: BTreeMap<u64, input::GamepadHandle>,\n\n    xwayland: Option<xwayland::XWayland>,\n    xwayland_debug_log: Option<File>,\n\n    pending_attachments: Vec<ControlMessage>,\n\n    ready_once: Option<oneshot::Sender<WakingSender<ControlMessage>>>,\n    timer: mio_timerfd::TimerFd,\n    sleeping: bool,\n    shutting_down: bool,\n\n    vk: Arc<VkContext>,\n}\n\nimpl Reactor {\n    pub fn run(\n        vk: Arc<VkContext>,\n        app_config: AppConfig,\n        display_params: DisplayParams,\n        permanent_gamepads: Vec<(u64, GamepadLayout)>,\n        bug_report_dir: Option<PathBuf>,\n        ready_send: oneshot::Sender<WakingSender<ControlMessage>>,\n    ) -> anyhow::Result<()> {\n        let mut display = wayland_server::Display::new().context(\"failed to create display\")?;\n\n        let ui_scale = if app_config.force_1x_scale {\n            PixelScale::ONE\n        } else {\n            display_params.ui_scale\n        };\n\n        trace!(\n            %ui_scale,\n            width = display_params.width,\n            height = display_params.height,\n            \"configuring virtual display\"\n        );\n\n        // Create wayland globals.\n        let dh = display.handle();\n        compositor::create_globals(&dh);\n\n        let mut container = Container::new(\n            app_config.command.clone(),\n            app_config.home_isolation_mode.clone(),\n        )\n        .context(\"initializing container\")?;\n\n        for (k, v) in &app_config.env {\n            container.set_env(k, v);\n        }\n\n        let poll = mio::Poll::new()?;\n        let waker = Arc::new(mio::Waker::new(poll.registry(), WAKER)?);\n        let handle = SessionHandle::new(waker.clone());\n\n        let display_fd = display.backend().poll_fd().as_raw_fd();\n        poll.registry().register(\n            &mut mio::unix::SourceFd(&display_fd),\n            DISPLAY,\n            mio::Interest::READABLE,\n        )?;\n\n        // Bind the listening socket.\n        let socket_name = gen_socket_name();\n        let socket_path = container.extern_run_path().join(&socket_name);\n        let listening_socket = wayland_server::ListeningSocket::bind_absolute(socket_path.clone())?;\n        trace!(?socket_path, \"bound wayland socket\");\n\n        let listener_fd = listening_socket.as_raw_fd();\n        poll.registry().register(\n            &mut mio::unix::SourceFd(&listener_fd),\n            ACCEPT,\n            mio::Interest::READABLE,\n        )?;\n\n        // Set up the pulse audio server.\n        let audio_pipeline =\n            audio::EncodePipeline::new(handle.clone(), container.extern_run_path())?;\n\n        // Set up compositor state.\n        let compositor = compositor::Compositor::new(\n            vk.clone(),\n            handle.clone(),\n            DisplayParams {\n                ui_scale, // Overridden by force_1x_scale.\n                ..display_params\n            },\n        )?;\n\n        // Set up input emulation (this is just for gamepads).\n        let mut input_manager = input::InputDeviceManager::new(&mut container)?;\n        let mut gamepads = BTreeMap::new();\n\n        for (pad_id, layout) in permanent_gamepads {\n            let dev = input_manager.plug_gamepad(pad_id, layout, true)?;\n            gamepads.insert(pad_id, dev);\n        }\n\n        // Spawn Xwayland, if we're using it.\n        let (xwayland, xwayland_recv, xwayland_debug_log) = if app_config.xwayland {\n            let mut xwayland_debug_log = if let Some(bug_report_dir) = bug_report_dir.as_ref() {\n                let path = bug_report_dir.join(\"xwayland.log\");\n                Some(std::fs::File::create(path).context(\"failed to create xwayland logfile\")?)\n            } else {\n                None\n            };\n\n            let (output_send, mut output_recv) = mio::unix::pipe::new()?;\n            let mut xwayland = match xwayland::XWayland::spawn(\n                &mut display.handle(),\n                container.extern_run_path(),\n                output_send,\n            ) {\n                Ok(xw) => xw,\n                Err(e) => {\n                    // Make sure we save any errors.\n                    dump_child_output(\n                        &mut BufReader::new(&mut output_recv),\n                        &mut xwayland_debug_log,\n                    );\n\n                    return Err(e).context(\"spawning Xwayland\");\n                }\n            };\n\n            // Xwayland writes to this pipe when it's ready.\n            poll.registry().register(\n                &mut xwayland.displayfd_recv,\n                XWAYLAND_READY,\n                mio::Interest::READABLE,\n            )?;\n\n            // Stderr/stdout of the xwayland process.\n            poll.registry()\n                .register(&mut output_recv, XWAYLAND, mio::Interest::READABLE)?;\n\n            (Some(xwayland), Some(output_recv), xwayland_debug_log)\n        } else {\n            (None, None, None)\n        };\n\n        // Spawn the client with a pipe as stdout/stderr.\n        let (pipe_send, mut pipe_recv) = mio::unix::pipe::new()?;\n        container.set_stdout(&pipe_send)?;\n        container.set_stderr(&pipe_send)?;\n        drop(pipe_send);\n\n        // Set the wayland socket and X11 sockets. The wayland socket is a\n        // relative path inside XDG_RUNTIME_DIR. The X11 socket is special\n        // and has to be in a specific location for XCB to work on all systems.\n        container.set_env(\"WAYLAND_DISPLAY\", &socket_name);\n        if let Some(xwayland) = &xwayland {\n            xwayland.prepare_socket(&mut container);\n        }\n\n        // Shadow pipewire, just in case.\n        container.set_env(\"PIPEWIRE_REMOTE\", \"(null)\");\n\n        let child = match container.spawn() {\n            Ok(ch) => ch,\n            Err(e) => {\n                // Make sure we pump the child stdio and catch any container-related\n                // error.\n                let mut debug_log = bug_report_dir\n                    .as_ref()\n                    .and_then(|dir| std::fs::File::create(dir.join(\"child.log\")).ok());\n                let mut child_output = BufReader::new(&mut pipe_recv);\n                dump_child_output(&mut child_output, &mut debug_log);\n                return Err(e).context(\"starting application container\");\n            }\n        };\n\n        poll.registry().register(\n            &mut mio::unix::SourceFd(&child.pidfd().as_raw_fd()),\n            CHILD,\n            mio::Interest::READABLE,\n        )?;\n\n        poll.registry()\n            .register(&mut pipe_recv, CHILD, mio::Interest::READABLE)?;\n\n        // Use `glxinfo` and `eglinfo` to generate more debugging help.\n        if let Some(bug_report_dir) = bug_report_dir.as_ref() {\n            let p = bug_report_dir.to_owned();\n            let wayland_socket = socket_name.clone();\n            let x11_socket = xwayland\n                .as_ref()\n                .map(|x| x.display_socket.inner_path().clone());\n\n            std::thread::spawn(move || {\n                save_glxinfo_eglinfo(\n                    &p,\n                    &wayland_socket,\n                    x11_socket.as_ref().map(|p| p.as_os_str()),\n                );\n            });\n        }\n\n        // If bug report mode is enabled, save the stdout/stderr of the child to\n        // a logfile.\n        let child_debug_log = if let Some(bug_report_dir) = bug_report_dir.as_ref() {\n            let path = bug_report_dir.join(format!(\"child-{}.log\", child.pid().as_raw_nonzero()));\n            Some(std::fs::File::create(path).context(\"failed to create child logfile\")?)\n        } else {\n            None\n        };\n\n        // Framerate timer (simulates vblank).\n        let mut timer = mio_timerfd::TimerFd::new(mio_timerfd::ClockId::Monotonic)?;\n\n        poll.registry()\n            .register(&mut timer, TIMER, mio::Interest::READABLE)?;\n\n        let mut reactor = Self {\n            poll,\n            waker,\n\n            wayland_display: display,\n            compositor,\n\n            session_handle: handle,\n            listening_socket,\n\n            app_config,\n            child,\n            child_debug_log,\n\n            display_params,\n            new_display_params: None,\n\n            audio_pipeline,\n            video_pipeline: None,\n            new_video_stream_params: None,\n\n            input_manager,\n            gamepads,\n\n            pending_attachments: Vec::new(),\n\n            xwayland,\n            xwayland_debug_log,\n\n            ready_once: Some(ready_send),\n            timer,\n            sleeping: false,\n            shutting_down: false,\n\n            vk,\n        };\n\n        reactor.main_loop(pipe_recv, xwayland_recv)\n    }\n\n    fn main_loop(\n        &mut self,\n        mut child_pipe: mio::unix::pipe::Receiver,\n        mut xwayland_pipe: Option<mio::unix::pipe::Receiver>,\n    ) -> Result<(), anyhow::Error> {\n        let mut events = mio::Events::with_capacity(64);\n\n        let (control_send, control_recv) = crossbeam::unbounded();\n        let control_send = WakingSender::new(self.waker.clone(), control_send);\n\n        let start = time::Instant::now();\n        let mut child_output = BufReader::new(&mut child_pipe);\n        let mut xwayland_output = xwayland_pipe.as_mut().map(BufReader::new);\n\n        loop {\n            trace_span!(\"poll\").in_scope(|| self.poll.poll(&mut events, None))?;\n\n            for event in events.iter() {\n                match event.token() {\n                    ACCEPT => {\n                        if let Some(client_stream) = self.listening_socket.accept()? {\n                            let _client = self.wayland_display.handle().insert_client(\n                                client_stream,\n                                Arc::new(compositor::ClientState::default()),\n                            )?;\n\n                            debug!(\"client app connected\");\n                        }\n                    }\n                    CHILD if event.is_read_closed() => {\n                        self.child.wait()?;\n                        self.session_handle.kick_clients();\n\n                        if self.ready_once.is_some() {\n                            // The client exited immediately, which is an error.\n                            bail!(\"client exited without doing anything\");\n                        } else {\n                            return Ok(());\n                        }\n                    }\n                    CHILD if event.is_readable() => {\n                        dump_child_output(&mut child_output, &mut self.child_debug_log)\n                    }\n                    WAKER => loop {\n                        match control_recv.try_recv() {\n                            Ok(ControlMessage::Stop) => {\n                                self.session_handle.kick_clients();\n                                self.shutting_down = true;\n                                trace!(\"shutting down\");\n\n                                // Usually, TERM doesn't work, because the\n                                // process is PID 1 in the container.\n                                self.child.signal(rustix::process::Signal::KILL)?;\n                            }\n                            Ok(msg) => self.handle_control_message(msg)?,\n                            Err(crossbeam::TryRecvError::Empty) => break,\n                            Err(crossbeam::TryRecvError::Disconnected) => {\n                                panic!(\"control channel disconnected\")\n                            }\n                        }\n                    },\n                    DISPLAY => {\n                        trace!(\"dispatching display\");\n                        self.wayland_display\n                            .dispatch_clients(&mut self.compositor)\n                            .context(\"failed to dispatch the wayland display\")?;\n                    }\n                    XDISPLAY => {\n                        trace!(\"dispatching xwm\");\n                        self.compositor\n                            .dispatch_xwm()\n                            .context(\"failed to dispatch the xwm\")?;\n                    }\n                    XWAYLAND_READY => {\n                        let xwayland = self.xwayland.as_mut().unwrap();\n                        if let Some(socket) = xwayland.poll_ready()? {\n                            self.poll\n                                .registry()\n                                .deregister(&mut xwayland.displayfd_recv)?;\n\n                            // Setup the XWM connection to the Xwayland server.\n                            let fd = self.compositor.insert_xwayland(socket)?;\n                            self.poll.registry().register(\n                                &mut mio::unix::SourceFd(&fd.as_raw_fd()),\n                                XDISPLAY,\n                                mio::Interest::READABLE,\n                            )?;\n                        }\n                    }\n                    XWAYLAND if event.is_read_closed() => {\n                        self.xwayland.as_mut().unwrap().child.wait()?;\n                    }\n                    XWAYLAND if event.is_readable() => {\n                        dump_child_output(\n                            xwayland_output.as_mut().unwrap(),\n                            &mut self.xwayland_debug_log,\n                        );\n                    }\n                    TIMER => {\n                        self.timer.read()?;\n\n                        // Check if we need to resize the virtual display.\n                        if let Some(new_params) = self.new_display_params.take() {\n                            self.update_display_params(new_params)?;\n\n                            // Update the render timer to match the new framerate.\n                            self.timer\n                                .set_timeout_interval(&time::Duration::from_secs_f64(\n                                    1.0 / self.display_params.framerate as f64,\n                                ))?;\n                        }\n\n                        self.frame()?;\n                    }\n                    _ => unreachable!(),\n                }\n            }\n\n            if !self.shutting_down {\n                self.idle()?;\n            }\n\n            // Check that we haven't timed out waiting for the client to start up.\n            if self.ready_once.is_some() && self.compositor.surfaces_ready() {\n                self.ready_once.take().unwrap().send(control_send.clone())?;\n            } else if self.ready_once.is_some() && start.elapsed() > READY_TIMEOUT {\n                self.child.signal(rustix::process::Signal::KILL)?;\n                bail!(\"timed out waiting for client\");\n            }\n\n            // Sleep if we're not active.\n            if !self.sleeping && !self.active() {\n                self.sleeping = true;\n                self.timer\n                    .set_timeout_interval(&time::Duration::from_secs(1))?;\n            } else if self.sleeping && self.active() {\n                self.sleeping = false;\n                self.timer\n                    .set_timeout_interval(&time::Duration::from_secs_f64(\n                        1.0 / self.display_params.framerate as f64,\n                    ))?;\n            }\n        }\n    }\n\n    fn idle(&mut self) -> anyhow::Result<()> {\n        // Accept any waiting clients, but only if we're not mid-resize.\n        if !self.pending_attachments.is_empty()\n            && self.new_display_params.is_none()\n            && self.compositor.surfaces_ready()\n        {\n            let pending_attachments = self.pending_attachments.drain(..).collect::<Vec<_>>();\n            for attach_msg in pending_attachments {\n                if let ControlMessage::Attach {\n                    id,\n                    sender,\n                    video_params,\n                    audio_params,\n                    stream_writer,\n                    ready,\n                } = attach_msg\n                {\n                    // Check if the caller is still waiting.\n                    if ready.send(()).is_ok() {\n                        self.attach(id, sender, video_params, audio_params, stream_writer)?;\n                    }\n                } else {\n                    unreachable!()\n                }\n            }\n        }\n\n        // Perform compositor upkeep.\n        self.compositor.idle(self.active())?;\n\n        // Send pending controller SYN_REPORT events.\n        for (_, dev) in self.gamepads.iter_mut() {\n            dev.frame()\n        }\n\n        // Flush events to the app.\n        self.wayland_display.flush_clients()?;\n\n        Ok(())\n    }\n\n    fn active(&self) -> bool {\n        self.session_handle.num_attachments() > 0 || !self.pending_attachments.is_empty()\n    }\n\n    fn update_display_params(&mut self, params: DisplayParams) -> anyhow::Result<()> {\n        let old = self.display_params;\n\n        let old_ui_scale = self.display_params.ui_scale;\n        let new_ui_scale = if self.app_config.force_1x_scale {\n            PixelScale::ONE\n        } else {\n            params.ui_scale\n        };\n\n        let size_changed = old.width != params.width || old.height != params.height;\n        let scale_changed = old_ui_scale != new_ui_scale;\n        let framerate_changed = old.framerate != params.framerate;\n\n        if size_changed || scale_changed || framerate_changed {\n            debug!(\n                old_width = old.width,\n                new_width = params.width,\n                old_height = old.height,\n                new_height = params.height,\n                old_framerate = old.framerate,\n                new_framerate = params.framerate,\n                old_ui_scale = %old_ui_scale,\n                new_ui_scale = %new_ui_scale,\n                \"resizing output\",\n            );\n\n            // If the size or framerate is different, force the client to reattach.\n            // TODO: if we support multiple attachments, or attachments that\n            // differ in resolution from the render res, we need to check for\n            // that here. For now, it's safe to just kill the attachment streams.\n            let force_reattach = size_changed || framerate_changed;\n\n            self.compositor.update_display_params(\n                DisplayParams {\n                    ui_scale: new_ui_scale,\n                    ..params\n                },\n                // If we're forcing clients to reattach, the attachment is about\n                // end, so configure the surfaces as inactive.\n                !force_reattach,\n            )?;\n\n            self.session_handle\n                .dispatch(SessionEvent::DisplayParamsChanged {\n                    params,\n                    reattach: force_reattach,\n                });\n\n            if force_reattach {\n                // Clear any pending attachments which don't match the new output.\n                self.pending_attachments.retain(|pending| {\n                    let ControlMessage::Attach {\n                        video_params: VideoStreamParams { width, height, .. },\n                        ..\n                    } = pending\n                    else {\n                        unreachable!()\n                    };\n\n                    *width == params.width && *height == params.height\n                });\n\n                // Clear any current attachments.\n                self.session_handle.remove_all();\n                self.audio_pipeline.stop_stream();\n\n                self.video_pipeline = None;\n                self.new_video_stream_params = None;\n            }\n        } else if params.ui_scale != old.ui_scale {\n            // Synthesize a param change if we are forcing 1x scale.\n            self.session_handle\n                .dispatch(SessionEvent::DisplayParamsChanged {\n                    params,\n                    reattach: false,\n                });\n        }\n\n        self.display_params = DisplayParams {\n            ui_scale: new_ui_scale,\n            ..params\n        };\n\n        Ok(())\n    }\n\n    fn frame(&mut self) -> anyhow::Result<()> {\n        #[cfg(feature = \"tracy\")]\n        tracy_client::frame_mark();\n\n        if self.session_handle.num_attachments() == 0 {\n            return Ok(());\n        }\n\n        if !self.compositor.surfaces_ready() {\n            return Ok(());\n        }\n\n        if let Some(params) = self.new_video_stream_params.take() {\n            self.video_pipeline = Some(video::EncodePipeline::new(\n                self.vk.clone(),\n                self.session_handle.clone(),\n                self.display_params,\n                params,\n            )?);\n        }\n\n        let Some(video_pipeline) = &mut self.video_pipeline else {\n            return Ok(());\n        };\n\n        // Composite visible surfaces.\n        self.compositor.composite_frame(video_pipeline)?;\n\n        // Render the cursor, if needed.\n        self.compositor.render_cursor()?;\n\n        Ok(())\n    }\n\n    fn attach(\n        &mut self,\n        id: u64,\n        sender: crossbeam::Sender<SessionEvent>,\n        video_params: VideoStreamParams,\n        audio_params: AudioStreamParams,\n        stream_writer: StreamWriter,\n    ) -> anyhow::Result<()> {\n        if self.session_handle.num_attachments() > 0 {\n            unimplemented!();\n        }\n\n        self.session_handle.insert_client(id, sender, stream_writer);\n        self.new_video_stream_params = Some(video_params);\n        self.audio_pipeline.restart_stream(audio_params)?;\n        self.compositor.update_focus_and_visibility(true)?;\n\n        self.compositor.dispatch_cursor();\n        if let Some(coords) = self.compositor.default_seat.pointer_locked() {\n            let (x, y) = coords.into();\n            self.session_handle\n                .dispatch(SessionEvent::PointerLocked(x, y));\n        }\n\n        Ok(())\n    }\n\n    fn handle_control_message(&mut self, msg: ControlMessage) -> anyhow::Result<()> {\n        if self.shutting_down {\n            // We're about to shut down, so ignore all messages.\n            return Ok(());\n        }\n\n        // Attachments get handled asynchronously.\n        if matches!(msg, ControlMessage::Attach { .. }) {\n            self.pending_attachments.push(msg);\n            return Ok(());\n        }\n\n        match msg {\n            ControlMessage::Detach(id) => {\n                self.session_handle.remove_client(id);\n                self.pending_attachments.retain(|msg| {\n                    let ControlMessage::Attach { id: pending_id, .. } = msg else {\n                        unreachable!();\n                    };\n\n                    *pending_id != id\n                });\n\n                if !self.active() {\n                    self.audio_pipeline.stop_stream();\n                    self.video_pipeline = None;\n                    self.compositor.update_focus_and_visibility(false)?;\n                }\n            }\n            ControlMessage::RefreshVideo => {\n                if let Some(video) = &mut self.video_pipeline {\n                    video.request_refresh();\n                }\n            }\n            ControlMessage::UpdateDisplayParams(params) => {\n                // Updates once per render.\n                self.new_display_params = Some(params);\n            }\n            ControlMessage::KeyboardInput { .. }\n            | ControlMessage::PointerInput { .. }\n            | ControlMessage::PointerMotion { .. }\n            | ControlMessage::RelativePointerMotion { .. }\n            | ControlMessage::PointerAxis(_, _)\n            | ControlMessage::PointerAxisDiscrete(_, _)\n            | ControlMessage::PointerEntered\n            | ControlMessage::PointerLeft => self.compositor.handle_input_event(msg),\n            ControlMessage::GamepadAvailable(id) => {\n                use std::collections::btree_map::Entry;\n                if let Entry::Vacant(e) = self.gamepads.entry(id) {\n                    e.insert(self.input_manager.plug_gamepad(\n                        id,\n                        input::GamepadLayout::GenericDualStick,\n                        false,\n                    )?);\n                }\n            }\n            ControlMessage::GamepadUnavailable(id) => {\n                use std::collections::btree_map::Entry;\n                match self.gamepads.entry(id) {\n                    Entry::Occupied(v) if !v.get().permanent => {\n                        v.remove();\n                    }\n                    _ => (),\n                }\n            }\n            ControlMessage::GamepadAxis {\n                id,\n                axis_code,\n                value,\n            } => {\n                if let Some(gamepad) = self.gamepads.get_mut(&id) {\n                    gamepad.axis(axis_code, value);\n                }\n            }\n            ControlMessage::GamepadTrigger {\n                id,\n                trigger_code,\n                value,\n            } => {\n                if let Some(gamepad) = self.gamepads.get_mut(&id) {\n                    gamepad.trigger(trigger_code, value);\n                }\n            }\n            ControlMessage::GamepadInput {\n                id,\n                button_code,\n                state,\n            } => {\n                if let Some(gamepad) = self.gamepads.get_mut(&id) {\n                    gamepad.input(button_code, state);\n                }\n            }\n            // Handled above.\n            ControlMessage::Stop | ControlMessage::Attach { .. } => unreachable!(),\n        }\n\n        Ok(())\n    }\n}\n\nfn gen_socket_name() -> OsString {\n    use rand::Rng;\n    let id: u64 = rand::thread_rng().gen();\n    format!(\"magic-mirror-{}\", id).into()\n}\n\nfn dump_child_output(pipe: &mut impl BufRead, debug_log: &mut Option<std::fs::File>) {\n    let mut buf = String::new();\n\n    loop {\n        buf.clear();\n        match pipe.read_line(&mut buf) {\n            Ok(1..) => {\n                if let Some(debug_log) = debug_log {\n                    let _ = std::io::Write::write_all(debug_log, buf.as_bytes());\n                }\n\n                let buf = buf.trim();\n                if !buf.is_empty() {\n                    trace!(target: \"mmserver::session::child\", \"{}\", buf);\n                }\n            }\n            Ok(0) => break,\n            Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => break,\n            Err(e) => {\n                debug!(\"child error: {:?}\", e);\n                break;\n            }\n        }\n    }\n}\n\nfn save_glxinfo_eglinfo(\n    bug_report_dir: impl AsRef<Path>,\n    socket_name: &OsStr,\n    x11_display: Option<&OsStr>,\n) {\n    use std::process::Command;\n\n    if let Some(x11_display) = x11_display {\n        match Command::new(\"glxinfo\")\n            .env_clear()\n            .env(\"DISPLAY\", x11_display)\n            .output()\n        {\n            Ok(output) => {\n                let _ = std::fs::write(bug_report_dir.as_ref().join(\"glxinfo.log\"), output.stdout);\n            }\n            Err(e) => debug!(\"failed to run glxinfo: {:#}\", e),\n        }\n    }\n\n    match Command::new(\"eglinfo\")\n        .env_clear()\n        .env(\"WAYLAND_DISPLAY\", socket_name)\n        .output()\n    {\n        Ok(output) => {\n            let _ = std::fs::write(bug_report_dir.as_ref().join(\"eglinfo.log\"), output.stdout);\n        }\n        Err(e) => debug!(\"failed to run eglinfo: {:#}\", e),\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/video/composite.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::sync::Arc;\n\nuse anyhow::Context;\nuse ash::vk;\nuse cstr::cstr;\n\nuse crate::{color::ColorSpace, vulkan::*};\n\npub const BLEND_FORMAT: vk::Format = vk::Format::R16G16B16A16_SFLOAT;\n\n// Also defined in composite.slang.\n#[repr(u32)]\n#[derive(Copy, Clone, Debug)]\nenum SurfaceColorSpace {\n    Srgb = 0,\n    LinearExtSrgb = 1,\n    Hdr10 = 2,\n}\n\nimpl From<ColorSpace> for SurfaceColorSpace {\n    fn from(cs: ColorSpace) -> Self {\n        match cs {\n            ColorSpace::Srgb => SurfaceColorSpace::Srgb,\n            ColorSpace::LinearExtSrgb => SurfaceColorSpace::LinearExtSrgb,\n            ColorSpace::Hdr10 => SurfaceColorSpace::Hdr10,\n        }\n    }\n}\n\n#[derive(Copy, Clone, Debug)]\n#[repr(C)]\n#[allow(dead_code)]\nstruct SurfacePC {\n    // Should be in texture coords: [0, 1].\n    src_pos: glam::Vec2,\n    src_size: glam::Vec2,\n    // Should be in clip coords: [-1, 1].\n    // TODO: suck it up and use a matrix transform (mat3) to support rotations.\n    dst_pos: glam::Vec2,\n    dst_size: glam::Vec2,\n    color_space: SurfaceColorSpace,\n}\n\n/// Composites surfaces into a blend image.\npub struct CompositePipeline {\n    descriptor_set_layout: vk::DescriptorSetLayout,\n    pipeline_layout: vk::PipelineLayout,\n    pipeline: vk::Pipeline,\n    sampler: vk::Sampler,\n    vk: Arc<VkContext>,\n}\n\nimpl CompositePipeline {\n    pub fn new(vk: Arc<VkContext>) -> anyhow::Result<Self> {\n        let sampler = {\n            let create_info = vk::SamplerCreateInfo::default()\n                .mag_filter(vk::Filter::LINEAR)\n                .min_filter(vk::Filter::LINEAR)\n                .address_mode_u(vk::SamplerAddressMode::REPEAT)\n                .address_mode_v(vk::SamplerAddressMode::REPEAT)\n                .address_mode_w(vk::SamplerAddressMode::REPEAT);\n\n            unsafe { vk.device.create_sampler(&create_info, None)? }\n        };\n\n        let descriptor_set_layout = {\n            let samplers = [sampler];\n            let binding = vk::DescriptorSetLayoutBinding::default()\n                .binding(0)\n                .descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n                .descriptor_count(1)\n                .stage_flags(vk::ShaderStageFlags::FRAGMENT)\n                .immutable_samplers(&samplers);\n\n            let bindings = [binding];\n            let create_info = vk::DescriptorSetLayoutCreateInfo::default()\n                .bindings(&bindings)\n                .flags(vk::DescriptorSetLayoutCreateFlags::PUSH_DESCRIPTOR_KHR);\n\n            unsafe { vk.device.create_descriptor_set_layout(&create_info, None)? }\n        };\n\n        let pipeline_layout = {\n            let ranges = [vk::PushConstantRange::default()\n                .stage_flags(vk::ShaderStageFlags::VERTEX | vk::ShaderStageFlags::FRAGMENT)\n                .offset(0)\n                .size(std::mem::size_of::<SurfacePC>() as u32)];\n            let set_layouts = [descriptor_set_layout];\n            let create_info = vk::PipelineLayoutCreateInfo::default()\n                .push_constant_ranges(&ranges)\n                .set_layouts(&set_layouts);\n\n            unsafe { vk.device.create_pipeline_layout(&create_info, None)? }\n        };\n\n        let pipeline = {\n            let vert_bytes =\n                include_bytes!(concat!(env!(\"OUT_DIR\"), \"/shaders/composite_vert.spv\"));\n            let frag_bytes =\n                include_bytes!(concat!(env!(\"OUT_DIR\"), \"/shaders/composite_frag.spv\"));\n\n            let vert_shader = load_shader(&vk.device, vert_bytes).context(\"loading vert.spv\")?;\n            let frag_shader = load_shader(&vk.device, frag_bytes).context(\"loading frag.spv\")?;\n\n            let vert_stage = vk::PipelineShaderStageCreateInfo::default()\n                .stage(vk::ShaderStageFlags::VERTEX)\n                .module(vert_shader)\n                .name(cstr!(\"main\"));\n\n            let frag_stage = vk::PipelineShaderStageCreateInfo::default()\n                .stage(vk::ShaderStageFlags::FRAGMENT)\n                .module(frag_shader)\n                .name(cstr!(\"main\"));\n\n            let vertex_input_state = vk::PipelineVertexInputStateCreateInfo::default();\n\n            let input_assembly_state = vk::PipelineInputAssemblyStateCreateInfo::default()\n                .topology(vk::PrimitiveTopology::TRIANGLE_STRIP)\n                .primitive_restart_enable(false);\n\n            let dynamic_state = vk::PipelineDynamicStateCreateInfo::default()\n                .dynamic_states(&[vk::DynamicState::VIEWPORT, vk::DynamicState::SCISSOR]);\n\n            let viewport_state = vk::PipelineViewportStateCreateInfo::default()\n                .viewport_count(1)\n                .scissor_count(1);\n\n            let rasterization_state = vk::PipelineRasterizationStateCreateInfo::default()\n                .depth_clamp_enable(false)\n                .rasterizer_discard_enable(false)\n                .polygon_mode(vk::PolygonMode::FILL)\n                .line_width(1.0)\n                .cull_mode(vk::CullModeFlags::NONE)\n                .front_face(vk::FrontFace::CLOCKWISE)\n                .depth_bias_enable(false);\n\n            let multisample_state = vk::PipelineMultisampleStateCreateInfo::default()\n                .sample_shading_enable(false)\n                .rasterization_samples(vk::SampleCountFlags::TYPE_1);\n\n            let attachment = vk::PipelineColorBlendAttachmentState::default()\n                .color_write_mask(vk::ColorComponentFlags::RGBA)\n                .blend_enable(true)\n                .src_color_blend_factor(vk::BlendFactor::SRC_ALPHA)\n                .dst_color_blend_factor(vk::BlendFactor::ONE_MINUS_SRC_ALPHA)\n                .color_blend_op(vk::BlendOp::ADD)\n                .src_alpha_blend_factor(vk::BlendFactor::ONE)\n                .dst_alpha_blend_factor(vk::BlendFactor::ZERO)\n                .alpha_blend_op(vk::BlendOp::ADD);\n\n            let attachments = [attachment];\n            let color_blend_state = vk::PipelineColorBlendStateCreateInfo::default()\n                .logic_op_enable(false)\n                .attachments(&attachments);\n\n            let formats = [BLEND_FORMAT];\n            let mut pipeline_rendering =\n                vk::PipelineRenderingCreateInfo::default().color_attachment_formats(&formats);\n\n            let stages = [vert_stage, frag_stage];\n            let create_info = vk::GraphicsPipelineCreateInfo::default()\n                .stages(&stages)\n                .vertex_input_state(&vertex_input_state)\n                .input_assembly_state(&input_assembly_state)\n                .dynamic_state(&dynamic_state)\n                .viewport_state(&viewport_state)\n                .rasterization_state(&rasterization_state)\n                .multisample_state(&multisample_state)\n                .color_blend_state(&color_blend_state)\n                .layout(pipeline_layout)\n                .push_next(&mut pipeline_rendering);\n\n            unsafe {\n                let pipeline = match vk.device.create_graphics_pipelines(\n                    vk::PipelineCache::null(),\n                    &[create_info],\n                    None,\n                ) {\n                    Ok(pipelines) => Ok(pipelines[0]),\n                    Err((_, e)) => Err(e),\n                }?;\n\n                vk.device.destroy_shader_module(vert_shader, None);\n                vk.device.destroy_shader_module(frag_shader, None);\n                pipeline\n            }\n        };\n\n        Ok(Self {\n            descriptor_set_layout,\n            pipeline_layout,\n            pipeline,\n            sampler,\n            vk,\n        })\n    }\n\n    pub unsafe fn begin_compositing(&self, cb: vk::CommandBuffer, render_target: &VkImage) {\n        let device = &self.vk.device;\n\n        // Set the viewport and scissor.\n        let rect = render_target.rect();\n        {\n            let viewport = vk::Viewport::default()\n                .x(0.0)\n                .y(0.0)\n                .width(render_target.width as f32)\n                .height(render_target.height as f32)\n                .min_depth(0.0)\n                .max_depth(1.0);\n\n            device.cmd_set_viewport(cb, 0, &[viewport]);\n            device.cmd_set_scissor(cb, 0, &[rect]);\n        }\n\n        // Attach the render target.\n        let clear_value = vk::ClearValue {\n            color: vk::ClearColorValue {\n                #[cfg(debug_assertions)]\n                float32: [0.0, 0.3, 1.0, 1.0], // Blue for debug.\n                #[cfg(not(debug_assertions))]\n                float32: [0.0, 0.0, 0.0, 1.0],\n            },\n        };\n\n        let color_attachment = vk::RenderingAttachmentInfo::default()\n            .image_view(render_target.view)\n            .image_layout(vk::ImageLayout::ATTACHMENT_OPTIMAL)\n            .load_op(vk::AttachmentLoadOp::CLEAR)\n            .store_op(vk::AttachmentStoreOp::STORE)\n            .clear_value(clear_value);\n\n        let color_attachments = [color_attachment];\n        let rendering_info = vk::RenderingInfo::default()\n            .render_area(rect)\n            .color_attachments(&color_attachments)\n            .layer_count(1);\n\n        device.cmd_begin_rendering(cb, &rendering_info);\n        device.cmd_bind_pipeline(cb, vk::PipelineBindPoint::GRAPHICS, self.pipeline);\n    }\n\n    /// Draws the surface texture to the output. The texture should already\n    /// be in the correct layout.\n    pub unsafe fn composite_surface(\n        &self,\n        cb: vk::CommandBuffer,\n        view: vk::ImageView,\n        // In clip coordinates.\n        // TODO: mat3 transform\n        dst_pos: glam::Vec2,\n        dst_size: glam::Vec2,\n    ) -> anyhow::Result<()> {\n        let device = &self.vk.device;\n\n        let color_space = ColorSpace::Srgb; // TODO\n        let pc = SurfacePC {\n            src_pos: glam::Vec2::ZERO,\n            src_size: glam::Vec2::ONE,\n            dst_pos,\n            dst_size,\n            color_space: color_space.into(),\n        };\n\n        // Push the texture.\n        {\n            let image_info = vk::DescriptorImageInfo::default()\n                .image_layout(vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL)\n                .image_view(view);\n\n            let image_infos = [image_info];\n            let write = vk::WriteDescriptorSet::default()\n                .dst_set(vk::DescriptorSet::null())\n                .dst_binding(0)\n                .dst_array_element(0)\n                .descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n                .image_info(&image_infos);\n\n            let writes = [write];\n            unsafe {\n                self.vk.push_ds_api.cmd_push_descriptor_set(\n                    cb,\n                    vk::PipelineBindPoint::GRAPHICS,\n                    self.pipeline_layout,\n                    0,\n                    &writes,\n                );\n            }\n        }\n\n        device.cmd_push_constants(\n            cb,\n            self.pipeline_layout,\n            vk::ShaderStageFlags::VERTEX | vk::ShaderStageFlags::FRAGMENT,\n            0,\n            std::slice::from_raw_parts(\n                &pc as *const _ as *const u8,\n                std::mem::size_of::<SurfacePC>(),\n            ),\n        );\n\n        device.cmd_draw(cb, 4, 1, 0, 0);\n\n        Ok(())\n    }\n\n    pub unsafe fn end_compositing(&self, cb: vk::CommandBuffer) {\n        self.vk.device.cmd_end_rendering(cb);\n    }\n}\n\nimpl Drop for CompositePipeline {\n    fn drop(&mut self) {\n        let device = &self.vk.device;\n\n        unsafe {\n            device\n                .queue_wait_idle(self.vk.graphics_queue.queue)\n                .unwrap();\n\n            device.destroy_pipeline(self.pipeline, None);\n            device.destroy_descriptor_set_layout(self.descriptor_set_layout, None);\n            device.destroy_pipeline_layout(self.pipeline_layout, None);\n            device.destroy_sampler(self.sampler, None);\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/video/composite.slang",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nimport color;\n\nconst Sampler2D texture;\n\nstruct VertOutput\n{\n    float2 uv : TextureCoord;\n    float4 position : SV_Position;\n};\n\n// This must match the enum in composite.rs.\nenum InputTextureColorSpace\n{\n    SRGB = 0,\n    LINEAR_EXTENDED_SRGB = 1,\n    HDR10 = 2,\n}\n\nstruct PushConstants\n{\n    float2 src_pos;\n    float2 src_size;\n    float2 dst_pos;\n    float2 dst_size;\n\n    InputTextureColorSpace color_space;\n};\n\n[[vk::push_constant]]\nPushConstants pc;\n\n[shader(\"vertex\")]\nVertOutput vert(uint vid: SV_VertexID)\n{\n    float2 corner;\n    switch (vid % 4)\n    {\n    case 0:\n        corner = float2(0.0, 0.0);\n        break;\n    case 1:\n        corner = float2(1.0, 0.0);\n        break;\n    case 2:\n        corner = float2(0.0, 1.0);\n        break;\n    case 3:\n        corner = float2(1.0, 1.0);\n        break;\n    }\n\n    VertOutput output;\n    output.position = float4(pc.dst_pos + pc.dst_size * corner, 0.0, 1.0);\n    output.uv = pc.src_pos + pc.src_size * corner;\n    return output;\n}\n\nfloat3 linearize(float3 color, InputTextureColorSpace color_space)\n{\n    switch (color_space)\n    {\n    case InputTextureColorSpace::SRGB:\n        return srgb_eotf(color);\n    case InputTextureColorSpace::LINEAR_EXTENDED_SRGB:\n        return color;\n    case InputTextureColorSpace::HDR10:\n        float3 linear = pq_eotf(color);\n\n        // The resulting values have the range 0-1, where 1.0 corresponds 10,000\n        // nits. In order to effectively blend with SDR textures, we need to\n        // scale based on our virtual display brightness, producing values where\n        // 1.0 matches the maximum brightness that SDR content would produce. We\n        // use the Rec. 2408 value of 203 nits for this. On this scale, a value\n        // of 300 nits would result in a scaled value of about 1.47, and 1.0\n        // would result in about 49.26. Either value would be clipped unless we\n        // use a floating-point blend format (which we do).\n        // TODO: allow HDR metadata to override the scaling factor. This is called\n        // \"nominal diffuse white level\" or NDWL.\n        linear *= PQ_MAX_WHITE / SDR_REFERENCE_WHITE;\n\n        return transform(linear, Primaries::BT2020, Primaries::BT709);\n    default:\n        return srgb_eotf(color);\n    }\n}\n\n[shader(\"fragment\")]\nfloat4 frag(float2 uv: TextureCoord)\n    : SV_Target\n{\n    float4 color = texture.Sample(uv);\n\n    // Wayland specifies that textures have premultiplied alpha. If we just\n    // import a dmabuf as as an _SRGB format, the colors are wrong, since vulkan\n    // expects sRGB textures to have not-premultiplied alpha.\n    //\n    // Vulkan normally expects to do the sRGB -> linear conversion when sampling\n    // in the shader. However, we're bypassing that operation here, by importing\n    // the texture as UNORM (even though it's stored as sRGB) and then doing the\n    // conversion manually.\n    if (color.a == 0)\n        return float4(0);\n    else if (pc.color_space == InputTextureColorSpace::LINEAR_EXTENDED_SRGB)\n        // We're already in the right space for blending.\n        return color;\n\n    color.rgb /= color.a;\n    color.rgb = linearize(color.rgb, pc.color_space);\n    color.rgb *= color.a;\n\n    return color;\n}\n\n"
  },
  {
    "path": "mm-server/src/session/video/convert.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::sync::Arc;\n\nuse ash::vk;\nuse tracing::instrument;\n\nuse crate::{\n    color::{ColorSpace, VideoProfile},\n    vulkan::*,\n};\n\n// Also defined in convert.slang.\n#[repr(u32)]\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\nenum InputTextureColorSpace {\n    Srgb = 0,\n    LinearExtSrgb = 1,\n    Hdr10 = 2,\n}\n\nimpl From<ColorSpace> for InputTextureColorSpace {\n    fn from(cs: ColorSpace) -> Self {\n        match cs {\n            ColorSpace::Srgb => InputTextureColorSpace::Srgb,\n            ColorSpace::LinearExtSrgb => InputTextureColorSpace::LinearExtSrgb,\n            ColorSpace::Hdr10 => InputTextureColorSpace::Hdr10,\n        }\n    }\n}\n\n// Also defined in convert.slang.\n#[repr(u32)]\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\nenum OutputProfile {\n    Hd = 0,\n    Hdr10 = 1,\n}\n\nimpl From<VideoProfile> for OutputProfile {\n    fn from(profile: VideoProfile) -> Self {\n        match profile {\n            VideoProfile::Hd => OutputProfile::Hd,\n            VideoProfile::Hdr10 => OutputProfile::Hdr10,\n        }\n    }\n}\n\n#[repr(C)]\n#[derive(Debug, Copy, Clone)]\nstruct ConvertPushConstants {\n    input_color_space: InputTextureColorSpace,\n    output_profile: OutputProfile,\n}\n\npub struct ConvertPipeline {\n    semiplanar: bool,\n    descriptor_set_layout: vk::DescriptorSetLayout,\n    sampler: vk::Sampler,\n    pipeline_layout: vk::PipelineLayout,\n    pipeline: vk::Pipeline,\n    vk: Arc<VkContext>,\n}\n\nimpl ConvertPipeline {\n    #[instrument(level = \"trace\", name = \"ConvertPipeline::new\", skip_all)]\n    pub fn new(vk: Arc<VkContext>, semiplanar: bool) -> anyhow::Result<Self> {\n        let shader = if semiplanar {\n            load_shader(\n                &vk.device,\n                include_bytes!(concat!(env!(\"OUT_DIR\"), \"/shaders/convert_semiplanar.spv\")),\n            )?\n        } else {\n            load_shader(\n                &vk.device,\n                include_bytes!(concat!(env!(\"OUT_DIR\"), \"/shaders/convert_multiplanar.spv\")),\n            )?\n        };\n\n        let sampler = {\n            let create_info = vk::SamplerCreateInfo::default()\n                .mag_filter(vk::Filter::LINEAR)\n                .min_filter(vk::Filter::LINEAR)\n                .address_mode_u(vk::SamplerAddressMode::REPEAT)\n                .address_mode_v(vk::SamplerAddressMode::REPEAT)\n                .address_mode_w(vk::SamplerAddressMode::REPEAT);\n\n            unsafe { vk.device.create_sampler(&create_info, None)? }\n        };\n\n        let descriptor_set_layout = unsafe {\n            let samplers = [sampler];\n            let mut bindings = vec![\n                vk::DescriptorSetLayoutBinding::default()\n                    .binding(0)\n                    .descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n                    .descriptor_count(1)\n                    .stage_flags(vk::ShaderStageFlags::COMPUTE)\n                    .immutable_samplers(&samplers),\n                vk::DescriptorSetLayoutBinding::default()\n                    .binding(1)\n                    .descriptor_type(vk::DescriptorType::STORAGE_IMAGE)\n                    .descriptor_count(1)\n                    .stage_flags(vk::ShaderStageFlags::COMPUTE),\n                vk::DescriptorSetLayoutBinding::default()\n                    .binding(2)\n                    .descriptor_type(vk::DescriptorType::STORAGE_IMAGE)\n                    .descriptor_count(1)\n                    .stage_flags(vk::ShaderStageFlags::COMPUTE),\n            ];\n\n            if !semiplanar {\n                bindings.push(\n                    vk::DescriptorSetLayoutBinding::default()\n                        .binding(3)\n                        .descriptor_type(vk::DescriptorType::STORAGE_IMAGE)\n                        .descriptor_count(1)\n                        .stage_flags(vk::ShaderStageFlags::COMPUTE),\n                );\n            }\n\n            vk.device.create_descriptor_set_layout(\n                &vk::DescriptorSetLayoutCreateInfo::default().bindings(&bindings),\n                None,\n            )?\n        };\n\n        let pipeline_layout = {\n            let ranges = [vk::PushConstantRange::default()\n                .stage_flags(vk::ShaderStageFlags::COMPUTE)\n                .offset(0)\n                .size(std::mem::size_of::<ConvertPushConstants>() as u32)];\n\n            let set_layouts = [descriptor_set_layout];\n            let create_info = vk::PipelineLayoutCreateInfo::default()\n                .set_layouts(&set_layouts)\n                .push_constant_ranges(&ranges);\n\n            unsafe { vk.device.create_pipeline_layout(&create_info, None)? }\n        };\n\n        let pipeline = unsafe {\n            let entry_point = std::ffi::CString::new(\"main\")?;\n            let stage = vk::PipelineShaderStageCreateInfo::default()\n                .stage(vk::ShaderStageFlags::COMPUTE)\n                .module(shader)\n                .name(&entry_point);\n\n            let create_info = vk::ComputePipelineCreateInfo::default()\n                .stage(stage)\n                .layout(pipeline_layout);\n\n            let pipeline = match vk.device.create_compute_pipelines(\n                vk::PipelineCache::null(),\n                &[create_info],\n                None,\n            ) {\n                Ok(pipelines) => pipelines[0],\n                Err((_, e)) => return Err(e.into()),\n            };\n\n            vk.device.destroy_shader_module(shader, None);\n            pipeline\n        };\n\n        Ok(Self {\n            semiplanar,\n            descriptor_set_layout,\n            sampler,\n            pipeline_layout,\n            pipeline,\n            vk,\n        })\n    }\n\n    pub unsafe fn cmd_convert(\n        &self,\n        cb: vk::CommandBuffer,\n        width: u32,\n        height: u32,\n        descriptor_set: vk::DescriptorSet,\n        input_color_space: ColorSpace,\n        video_profile: VideoProfile,\n    ) {\n        self.vk\n            .device\n            .cmd_bind_pipeline(cb, vk::PipelineBindPoint::COMPUTE, self.pipeline);\n\n        self.vk.device.cmd_bind_descriptor_sets(\n            cb,\n            vk::PipelineBindPoint::COMPUTE,\n            self.pipeline_layout,\n            0,\n            &[descriptor_set],\n            &[],\n        );\n\n        let pc = ConvertPushConstants {\n            input_color_space: input_color_space.into(),\n            output_profile: video_profile.into(),\n        };\n\n        self.vk.device.cmd_push_constants(\n            cb,\n            self.pipeline_layout,\n            vk::ShaderStageFlags::COMPUTE,\n            0,\n            std::slice::from_raw_parts(\n                &pc as *const _ as *const u8,\n                std::mem::size_of::<ConvertPushConstants>(),\n            ),\n        );\n\n        // Each workgroup has 16x16 invocations, covering a 32x32 area.\n        let group_count_x = (width + 31) / 32;\n        let group_count_y = (height + 31) / 32;\n\n        self.vk\n            .device\n            .cmd_dispatch(cb, group_count_x, group_count_y, 1);\n    }\n\n    pub fn ds_for_conversion(\n        &self,\n        blend_image: &VkImage,\n        planes: &[vk::ImageView],\n    ) -> anyhow::Result<vk::DescriptorSet> {\n        let set_layouts = [self.descriptor_set_layout];\n        let allocate_info = vk::DescriptorSetAllocateInfo::default()\n            .descriptor_pool(self.vk.descriptor_pool)\n            .set_layouts(&set_layouts);\n\n        let ds = unsafe {\n            self.vk\n                .device\n                .allocate_descriptor_sets(&allocate_info)?\n                .pop()\n                .unwrap()\n        };\n\n        let blend_image_infos = [vk::DescriptorImageInfo::default()\n            .image_layout(vk::ImageLayout::GENERAL)\n            .image_view(blend_image.view)];\n        let blend_write = vk::WriteDescriptorSet::default()\n            .dst_set(ds)\n            .dst_binding(0)\n            .dst_array_element(0)\n            .descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n            .image_info(&blend_image_infos);\n\n        let y_image_infos = [vk::DescriptorImageInfo::default()\n            .image_layout(vk::ImageLayout::GENERAL)\n            .image_view(planes[0])];\n        let y_write = vk::WriteDescriptorSet::default()\n            .dst_set(ds)\n            .dst_binding(1)\n            .dst_array_element(0)\n            .descriptor_type(vk::DescriptorType::STORAGE_IMAGE)\n            .image_info(&y_image_infos);\n\n        if self.semiplanar {\n            let uv_image_infos = [vk::DescriptorImageInfo::default()\n                .image_layout(vk::ImageLayout::GENERAL)\n                .image_view(planes[1])];\n            let uv_write = vk::WriteDescriptorSet::default()\n                .dst_set(ds)\n                .dst_binding(2)\n                .dst_array_element(0)\n                .descriptor_type(vk::DescriptorType::STORAGE_IMAGE)\n                .image_info(&uv_image_infos);\n\n            let writes = [blend_write, y_write, uv_write];\n            unsafe {\n                self.vk.device.update_descriptor_sets(&writes, &[]);\n            }\n        } else {\n            let u_image_infos = [vk::DescriptorImageInfo::default()\n                .image_layout(vk::ImageLayout::GENERAL)\n                .image_view(planes[1])];\n            let u_write = vk::WriteDescriptorSet::default()\n                .dst_set(ds)\n                .dst_binding(2)\n                .dst_array_element(0)\n                .descriptor_type(vk::DescriptorType::STORAGE_IMAGE)\n                .image_info(&u_image_infos);\n\n            let v_image_infos = [vk::DescriptorImageInfo::default()\n                .image_layout(vk::ImageLayout::GENERAL)\n                .image_view(planes[2])];\n            let v_write = vk::WriteDescriptorSet::default()\n                .dst_set(ds)\n                .dst_binding(3)\n                .dst_array_element(0)\n                .descriptor_type(vk::DescriptorType::STORAGE_IMAGE)\n                .image_info(&v_image_infos);\n\n            let writes = [blend_write, y_write, u_write, v_write];\n            unsafe {\n                self.vk.device.update_descriptor_sets(&writes, &[]);\n            }\n        }\n\n        Ok(ds)\n    }\n}\n\nimpl Drop for ConvertPipeline {\n    fn drop(&mut self) {\n        let device = &self.vk.device;\n\n        unsafe {\n            device\n                .queue_wait_idle(self.vk.graphics_queue.queue)\n                .unwrap();\n\n            device.destroy_sampler(self.sampler, None);\n            device.destroy_pipeline(self.pipeline, None);\n            device.destroy_pipeline_layout(self.pipeline_layout, None);\n            device.destroy_descriptor_set_layout(self.descriptor_set_layout, None);\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session/video/convert.slang",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nimport color;\n\nconst Sampler2D blend_image;\n\nconst RWTexture2D<float> luminance;\n\n#ifdef SEMIPLANAR\nconst RWTexture2D<float2> chroma_uv;\n#else\nconst RWTexture2D<float> chroma_u;\nconst RWTexture2D<float> chroma_v;\n#endif\n\n// This must match the enum in convert.rs.\nenum InputTextureColorSpace\n{\n    SRGB = 0,\n    LINEAR_EXTENDED_SRGB = 1,\n    HDR10 = 2,\n}\n\n/// This must match the enum in convert.rs.\nenum OutputProfile\n{\n    HD = 0,\n    HDR10 = 1,\n}\n\nstruct PushConstants\n{\n    InputTextureColorSpace input_color_space;\n    OutputProfile output_profile;\n}\n\n[[vk::push_constant]]\nPushConstants pc;\n\nfloat3 to_bt709(float3 rgb, InputTextureColorSpace color_space)\n{\n    float3 linear;\n    switch (color_space)\n    {\n    case InputTextureColorSpace::SRGB:\n        linear = srgb_eotf(rgb);\n        break;\n    case InputTextureColorSpace::HDR10:\n    {\n        // Treat 203 nits as 1.0, and clip everything above that.\n        linear = pq_eotf(rgb);\n        linear = clamp(linear * (PQ_MAX_WHITE / SDR_REFERENCE_WHITE), 0.0, 1.0);\n        break;\n    }\n    case InputTextureColorSpace::LINEAR_EXTENDED_SRGB:\n        linear = clamp(rgb, 0.0, 1.0);\n        break;\n    }\n\n    return bt709_inverse_eotf(linear);\n}\n\nfloat3 to_bt2020_pq(float3 rgb, InputTextureColorSpace color_space)\n{\n    float3 bt2020_linear;\n    switch (color_space)\n    {\n    case InputTextureColorSpace::SRGB:\n        bt2020_linear = transform(srgb_eotf(rgb), Primaries::BT709, Primaries::BT2020);\n        break;\n    case InputTextureColorSpace::LINEAR_EXTENDED_SRGB:\n        bt2020_linear = transform(rgb, Primaries::BT709, Primaries::BT2020);\n        break;\n    case InputTextureColorSpace::HDR10:\n        // Happy identity path.\n        return rgb;\n    }\n\n    // Tone-map 1.0 to 203 nits, then delinearize.\n    return clamp(pq_inverse_eotf(bt2020_linear * (SDR_REFERENCE_WHITE / PQ_MAX_WHITE)), 0.0, 1.0);\n}\n\n[shader(\"compute\")]\n[numthreads(16, 16)]\nvoid main(uint2 self_id: SV_DispatchThreadID)\n{\n    let coords = uint2(self_id.x * 2, self_id.y * 2);\n    let chroma_coords = coords / 2;\n\n    int j, k;\n    float us[4];\n    float vs[4];\n    for (k = 0; k < 2; k += 1)\n    {\n        for (j = 0; j < 2; j += 1)\n        {\n            let texel_coords = coords + uint2(j, k);\n            float4 texel = blend_image.Load(uint3(texel_coords, 0));\n\n            float3 yuv;\n            switch (pc.output_profile)\n            {\n            case OutputProfile::HD:\n                yuv = encode_ycbcr(to_bt709(texel.rgb, pc.input_color_space),\n                                   YCbCrModel::BT709, false);\n                break;\n            case OutputProfile::HDR10:\n                yuv = encode_ycbcr(to_bt2020_pq(texel.rgb, pc.input_color_space),\n                                   YCbCrModel::BT2020, false);\n                break;\n            }\n\n            luminance[texel_coords] = yuv.x;\n\n            int i = k * 2 + j;\n            us[i] = yuv.y;\n            vs[i] = yuv.z;\n        }\n    }\n\n    let u = lerp(lerp(us[0], us[1], 0.5), lerp(us[2], us[3], 0.5), 0.5);\n    let v = lerp(lerp(vs[0], vs[1], 0.5), lerp(vs[2], vs[3], 0.5), 0.5);\n\n#ifdef SEMIPLANAR\n    chroma_uv[chroma_coords] = float2(u, v);\n#else\n    chroma_u[chroma_coords] = u;\n    chroma_v[chroma_coords] = v;\n#endif\n}\n"
  },
  {
    "path": "mm-server/src/session/video.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{mem::ManuallyDrop, sync::Arc};\n\nuse anyhow::anyhow;\nuse ash::vk;\n\nmod composite;\nmod convert;\n\nuse tracing::{instrument, trace, trace_span, warn};\n\nuse super::{\n    compositor::{self, buffers::SyncobjTimelinePoint},\n    DisplayParams, SessionHandle, VideoStreamParams,\n};\nuse crate::{\n    color::ColorSpace,\n    encoder::{self},\n    session::EPOCH,\n    vulkan::*,\n};\n\nstruct Sink(SessionHandle);\n\nimpl encoder::Sink for Sink {\n    fn write_frame(\n        &mut self,\n        ts: std::time::Instant,\n        frame: bytes::Bytes,\n        hierarchical_layer: u32,\n        is_keyframe: bool,\n    ) {\n        let pts = (ts - *EPOCH).as_millis() as u64;\n        self.0\n            .dispatch_video_frame(pts, frame, hierarchical_layer, is_keyframe);\n\n        // Wake the compositor, so it can release buffers and send presentation\n        // feedback.\n        let _ = self.0.wake();\n    }\n}\n\npub struct SwapFrame {\n    convert_ds: vk::DescriptorSet, // Should be dropped first.\n    draws: Vec<(vk::ImageView, glam::Vec2, glam::Vec2)>,\n    texture_semas: Vec<vk::Semaphore>, // Reused each frame.\n    texture_semas_used: usize,\n\n    /// An RGBA image to composite to.\n    blend_image: VkImage,\n    /// A YUV image we copy to before passing on to the encoder.\n    encode_image: VkImage,\n    plane_views: Vec<vk::ImageView>,\n\n    staging_cb: vk::CommandBuffer,\n    render_cb: vk::CommandBuffer,\n    use_staging: bool,\n\n    timeline: VkTimelineSemaphore,\n    tp_staging_done: VkTimelinePoint,\n    tp_render_done: VkTimelinePoint,\n    tp_clear: VkTimelinePoint,\n\n    // For tracing.\n    staging_ts_pool: VkTimestampQueryPool,\n    staging_span: Option<tracy_client::GpuSpan>,\n    render_ts_pool: VkTimestampQueryPool,\n    render_span: Option<tracy_client::GpuSpan>,\n}\n\npub enum TextureSync {\n    Explicit(SyncobjTimelinePoint),\n    ImplicitInterop,\n}\n\npub struct EncodePipeline {\n    display_params: DisplayParams,\n    streaming_params: VideoStreamParams,\n\n    composite_pipeline: composite::CompositePipeline,\n    convert_pipeline: convert::ConvertPipeline,\n    encoder: ManuallyDrop<encoder::Encoder>,\n\n    swap: [SwapFrame; 2],\n    swap_idx: usize,\n\n    vk: Arc<VkContext>,\n}\n\nimpl EncodePipeline {\n    #[instrument(level = \"trace\", skip_all)]\n    pub fn new(\n        vk: Arc<VkContext>,\n        compositor_handle: SessionHandle,\n        display_params: DisplayParams,\n        streaming_params: VideoStreamParams,\n    ) -> anyhow::Result<Self> {\n        if streaming_params.width != display_params.width\n            || streaming_params.height != display_params.height\n        {\n            trace!(\n                ?streaming_params,\n                ?display_params,\n                \"stream and display params differ\"\n            );\n\n            // Superres is not implemented yet.\n            unimplemented!()\n        }\n\n        let sink = Sink(compositor_handle);\n        let mut encoder =\n            encoder::Encoder::new(vk.clone(), streaming_params, display_params.framerate, sink)?;\n\n        let encode_format = encoder.input_format();\n\n        let composite_pipeline = composite::CompositePipeline::new(vk.clone())?;\n        let convert_pipeline =\n            convert::ConvertPipeline::new(vk.clone(), format_is_semiplanar(encode_format))?;\n\n        let swap = [\n            new_swapframe(vk.clone(), encoder.create_input_image()?, &convert_pipeline)?,\n            new_swapframe(vk.clone(), encoder.create_input_image()?, &convert_pipeline)?,\n        ];\n\n        Ok(Self {\n            display_params,\n            streaming_params,\n\n            composite_pipeline,\n            convert_pipeline,\n            encoder: ManuallyDrop::new(encoder),\n\n            swap,\n            swap_idx: 0,\n\n            vk,\n        })\n    }\n\n    // pub fn encode_single_surface(&mut self, surface: wl_surface::WlSurface) {\n    //     todo!()\n    // }\n\n    #[instrument(level = \"trace\", skip_all)]\n    pub unsafe fn begin(&mut self) -> anyhow::Result<bool> {\n        let device = &self.vk.device;\n        let frame = &mut self.swap[self.swap_idx];\n\n        let ready = frame.tp_clear.poll()?;\n\n        // If the previous frame isn't ready, drop this one to let the app\n        // catch up.\n        if !ready {\n            return Ok(false);\n        }\n\n        // Trace on on the GPU side.\n        if let Some(ref ctx) = self.vk.graphics_queue.tracy_context {\n            if let Some(span) = frame.staging_span.take() {\n                let timestamps = frame.staging_ts_pool.fetch_results(device)?;\n                span.upload_timestamp(timestamps[0], timestamps[1]);\n            }\n\n            if let Some(span) = frame.render_span.take() {\n                let timestamps = frame.render_ts_pool.fetch_results(device)?;\n                span.upload_timestamp(timestamps[0], timestamps[1]);\n            }\n\n            // We conditionally create the staging span, below. Rendering always happens.\n            frame.render_span = Some(ctx.span(tracy_client::span_location!(\"render\"))?);\n        }\n\n        frame.texture_semas_used = 0;\n        frame.tp_staging_done += 10;\n        frame.tp_render_done = &frame.tp_staging_done + 1;\n        frame.tp_clear = &frame.tp_render_done + 1;\n\n        frame.use_staging = false;\n\n        begin_command_buffer(device, frame.staging_cb)?;\n        begin_command_buffer(device, frame.render_cb)?;\n\n        // Record the start timestamp.\n        frame.render_ts_pool.cmd_reset(device, frame.render_cb);\n        device.cmd_write_timestamp(\n            frame.render_cb,\n            vk::PipelineStageFlags::TOP_OF_PIPE,\n            frame.render_ts_pool.pool,\n            0,\n        );\n\n        // Transition the blend image to be writable.\n        insert_image_barrier(\n            device,\n            frame.render_cb,\n            frame.blend_image.image,\n            None,\n            vk::ImageLayout::UNDEFINED,\n            vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,\n            vk::PipelineStageFlags2::NONE,\n            vk::AccessFlags2::NONE,\n            vk::PipelineStageFlags2::COLOR_ATTACHMENT_OUTPUT,\n            vk::AccessFlags2::COLOR_ATTACHMENT_WRITE,\n        );\n\n        Ok(true)\n    }\n\n    /// Adds a surface to be drawn. Returns the timeline point when the texture\n    /// will no longer be in use. A return value of None indicates the texture\n    /// is already safe to reuse.\n    #[instrument(level = \"trace\", skip_all)]\n    pub unsafe fn composite_surface(\n        &mut self,\n        texture: &compositor::buffers::Buffer,\n        sync: Option<TextureSync>,\n        dest: compositor::surface::SurfaceConfiguration,\n    ) -> anyhow::Result<Option<VkTimelinePoint>> {\n        let device = &self.vk.device;\n        let frame = &mut self.swap[self.swap_idx];\n\n        let (view, release) = match &texture.backing {\n            compositor::buffers::BufferBacking::Shm {\n                dirty,\n                staging_buffer,\n                image,\n                format,\n                ..\n            } => {\n                if *dirty {\n                    // We only set up tracing for the staging command buffer if\n                    // we're actually going to use it.\n                    if !frame.use_staging {\n                        if let Some(ref ctx) = self.vk.graphics_queue.tracy_context {\n                            frame.staging_span = Some(ctx.span(tracy_client::span_location!())?);\n                        }\n\n                        // Record the start timestamp.\n                        frame.staging_ts_pool.cmd_reset(device, frame.staging_cb);\n                        device.cmd_write_timestamp(\n                            frame.staging_cb,\n                            vk::PipelineStageFlags::TOP_OF_PIPE,\n                            frame.staging_ts_pool.pool,\n                            0,\n                        );\n                    }\n\n                    frame.use_staging = true;\n\n                    // Transfer the image to be writable. The upload happens\n                    // in the staging command buffer.\n                    insert_image_barrier(\n                        device,\n                        frame.staging_cb,\n                        image.image,\n                        None,\n                        vk::ImageLayout::UNDEFINED,\n                        vk::ImageLayout::TRANSFER_DST_OPTIMAL,\n                        vk::PipelineStageFlags2::NONE,\n                        vk::AccessFlags2::NONE,\n                        vk::PipelineStageFlags2::TRANSFER,\n                        vk::AccessFlags2::TRANSFER_WRITE,\n                    );\n\n                    // Upload from the staging buffer to the texture.\n                    cmd_upload_shm(\n                        device,\n                        frame.staging_cb,\n                        staging_buffer,\n                        image,\n                        format.stride / format.bpp as u32,\n                        format.height,\n                    );\n                }\n\n                // Transition the image to be readable (in the second command buffer).\n                insert_image_barrier(\n                    device,\n                    frame.render_cb,\n                    image.image,\n                    None,\n                    vk::ImageLayout::TRANSFER_DST_OPTIMAL,\n                    vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,\n                    vk::PipelineStageFlags2::TRANSFER,\n                    vk::AccessFlags2::TRANSFER_WRITE,\n                    vk::PipelineStageFlags2::FRAGMENT_SHADER,\n                    vk::AccessFlags2::SHADER_READ,\n                );\n\n                assert!(sync.is_none());\n\n                (image.view, None)\n            }\n            compositor::buffers::BufferBacking::Dmabuf { image, fd, .. } => {\n                // Transition the image to be readable. A special queue,\n                // EXTERNAL, is used in a queue transfer to indicate\n                // acquiring the texture from the wayland client.\n                insert_image_barrier(\n                    device,\n                    frame.render_cb,\n                    image.image,\n                    Some((vk::QUEUE_FAMILY_FOREIGN_EXT, self.vk.graphics_queue.family)),\n                    vk::ImageLayout::GENERAL,\n                    vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,\n                    vk::PipelineStageFlags2::NONE,\n                    vk::AccessFlags2::NONE,\n                    vk::PipelineStageFlags2::FRAGMENT_SHADER,\n                    vk::AccessFlags2::SHADER_READ,\n                );\n\n                // Release the image at the end.\n                insert_image_barrier(\n                    device,\n                    frame.render_cb,\n                    image.image,\n                    Some((self.vk.graphics_queue.family, vk::QUEUE_FAMILY_FOREIGN_EXT)),\n                    vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,\n                    vk::ImageLayout::GENERAL,\n                    vk::PipelineStageFlags2::ALL_GRAPHICS,\n                    vk::AccessFlags2::SHADER_READ,\n                    vk::PipelineStageFlags2::NONE,\n                    vk::AccessFlags2::NONE,\n                );\n\n                if let Some(sync) = sync {\n                    let sema = allocate_texture_semaphore(self.vk.clone(), frame)?;\n\n                    match sync {\n                        TextureSync::Explicit(syncobj) => {\n                            syncobj.import_as_semaphore(sema)?;\n                        }\n                        TextureSync::ImplicitInterop => {\n                            compositor::buffers::import_dmabuf_fence_as_semaphore(\n                                self.vk.clone(),\n                                sema,\n                                fd,\n                            )?;\n                        }\n                    }\n                }\n\n                (image.view, Some(frame.tp_render_done.clone()))\n            }\n        };\n\n        // Convert the destination rect into clip coordinates.\n        let display_size: glam::UVec2 =\n            (self.display_params.width, self.display_params.height).into();\n        let dst_pos = (dest.topleft.as_vec2() / display_size.as_vec2() * 2.0) - 1.0;\n        let dst_size = dest.size.as_vec2() / display_size.as_vec2() * 2.0;\n\n        // Draw.\n        frame.draws.push((view, dst_pos, dst_size));\n\n        Ok(release)\n    }\n\n    /// End the current frame and submit it to the GPU. Returns the timeline\n    /// point indicating when rendering and encoding have both completed.\n    #[instrument(skip_all)]\n    pub unsafe fn end_and_submit(&mut self) -> anyhow::Result<VkTimelinePoint> {\n        let device = &self.vk.device;\n        let frame = &mut self.swap[self.swap_idx];\n\n        // Collate draw calls. We don't do this as we go because we need to do\n        // all the sync outside of a dynamic rendering pass.\n        self.composite_pipeline\n            .begin_compositing(frame.render_cb, &frame.blend_image);\n\n        for (view, dst_pos, dst_size) in frame.draws.drain(..) {\n            self.composite_pipeline\n                .composite_surface(frame.render_cb, view, dst_pos, dst_size)?;\n        }\n\n        self.composite_pipeline.end_compositing(frame.render_cb);\n\n        // Transition the blend image to be readable.\n        insert_image_barrier(\n            device,\n            frame.render_cb,\n            frame.blend_image.image,\n            None,\n            vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,\n            vk::ImageLayout::GENERAL,\n            vk::PipelineStageFlags2::COLOR_ATTACHMENT_OUTPUT,\n            vk::AccessFlags2::COLOR_ATTACHMENT_WRITE,\n            vk::PipelineStageFlags2::COMPUTE_SHADER,\n            vk::AccessFlags2::SHADER_STORAGE_READ,\n        );\n\n        // Acquire the encode image from the encode queue, but not for the\n        // first frame.\n        if frame.tp_clear.value() > 20 {\n            let src_queue_family = self.vk.encode_queue.as_ref().unwrap().family;\n\n            insert_image_barrier(\n                device,\n                frame.render_cb,\n                frame.encode_image.image,\n                Some((src_queue_family, self.vk.graphics_queue.family)),\n                vk::ImageLayout::VIDEO_ENCODE_SRC_KHR,\n                vk::ImageLayout::GENERAL,\n                vk::PipelineStageFlags2::NONE,\n                vk::AccessFlags2::NONE,\n                vk::PipelineStageFlags2::COMPUTE_SHADER,\n                vk::AccessFlags2::SHADER_STORAGE_WRITE,\n            );\n        } else {\n            // Otherwise, just transition the image to be writable.\n            insert_image_barrier(\n                device,\n                frame.render_cb,\n                frame.encode_image.image,\n                None,\n                vk::ImageLayout::UNDEFINED,\n                vk::ImageLayout::GENERAL,\n                vk::PipelineStageFlags2::NONE,\n                vk::AccessFlags2::NONE,\n                vk::PipelineStageFlags2::COMPUTE_SHADER,\n                vk::AccessFlags2::SHADER_STORAGE_WRITE,\n            );\n        }\n\n        // We're converting the blend image, which is scRGB.\n        let input_color_space = ColorSpace::LinearExtSrgb;\n\n        self.convert_pipeline.cmd_convert(\n            frame.render_cb,\n            frame.blend_image.width,\n            frame.blend_image.height,\n            frame.convert_ds,\n            input_color_space,\n            self.streaming_params.profile,\n        );\n\n        // Transfer to the encode queue.\n        let dst_queue_family = self.vk.encode_queue.as_ref().unwrap().family;\n        insert_image_barrier(\n            device,\n            frame.render_cb,\n            frame.encode_image.image,\n            Some((self.vk.graphics_queue.family, dst_queue_family)),\n            vk::ImageLayout::GENERAL,\n            vk::ImageLayout::VIDEO_ENCODE_SRC_KHR,\n            vk::PipelineStageFlags2::COMPUTE_SHADER,\n            vk::AccessFlags2::SHADER_STORAGE_WRITE,\n            vk::PipelineStageFlags2::empty(),\n            vk::AccessFlags2::empty(),\n        );\n\n        let mut submits = Vec::new();\n\n        let staging_cb_infos =\n            [vk::CommandBufferSubmitInfoKHR::default().command_buffer(frame.staging_cb)];\n\n        let staging_signal_infos = [vk::SemaphoreSubmitInfo::default()\n            .semaphore(frame.timeline.as_semaphore())\n            .stage_mask(vk::PipelineStageFlags2::ALL_COMMANDS)\n            .value(frame.tp_staging_done.value())];\n\n        let staging_submit_info = vk::SubmitInfo2::default()\n            .command_buffer_infos(&staging_cb_infos)\n            .signal_semaphore_infos(&staging_signal_infos);\n\n        // Only submit the staging cb if we actually recorded commands to it.\n        if frame.use_staging {\n            // Record the end timestamp.\n            device.cmd_write_timestamp(\n                frame.staging_cb,\n                vk::PipelineStageFlags::ALL_COMMANDS,\n                frame.staging_ts_pool.pool,\n                1,\n            );\n\n            if let Some(span) = &mut frame.staging_span {\n                span.end_zone();\n            }\n\n            device.end_command_buffer(frame.staging_cb)?;\n            submits.push(staging_submit_info);\n        } else {\n            frame.tp_staging_done.signal()?;\n        }\n\n        // Record the end timestamp.\n        device.cmd_write_timestamp(\n            frame.render_cb,\n            vk::PipelineStageFlags::ALL_COMMANDS,\n            frame.render_ts_pool.pool,\n            1,\n        );\n\n        if let Some(span) = &mut frame.render_span {\n            span.end_zone();\n        }\n\n        device.end_command_buffer(frame.render_cb)?;\n\n        let render_cb_infos =\n            [vk::CommandBufferSubmitInfoKHR::default().command_buffer(frame.render_cb)];\n        let mut render_wait_infos = vec![vk::SemaphoreSubmitInfo::default()\n            .semaphore(frame.timeline.as_semaphore())\n            .stage_mask(vk::PipelineStageFlags2::FRAGMENT_SHADER)\n            .value(frame.tp_staging_done.value())];\n        let render_signal_infos = [vk::SemaphoreSubmitInfo::default()\n            .semaphore(frame.timeline.as_semaphore())\n            .stage_mask(vk::PipelineStageFlags2::ALL_COMMANDS)\n            .value(frame.tp_render_done.value())];\n\n        for sema in &frame.texture_semas[0..frame.texture_semas_used] {\n            render_wait_infos.push(\n                vk::SemaphoreSubmitInfo::default()\n                    .semaphore(*sema)\n                    .stage_mask(vk::PipelineStageFlags2::FRAGMENT_SHADER),\n            );\n        }\n\n        let render_submit_info = vk::SubmitInfo2::default()\n            .command_buffer_infos(&render_cb_infos)\n            .wait_semaphore_infos(&render_wait_infos)\n            .signal_semaphore_infos(&render_signal_infos);\n\n        submits.push(render_submit_info);\n\n        trace_span!(\"queue_submit2\").in_scope(|| {\n            device.queue_submit2(self.vk.graphics_queue.queue, &submits, vk::Fence::null())\n        })?;\n\n        // Trigger encode.\n        self.encoder.submit_encode(\n            &frame.encode_image,\n            frame.tp_render_done.clone(),\n            frame.tp_clear.clone(),\n        )?;\n\n        // Wait for uploads to finish before returning, so that writes to the\n        // staging buffers are synchronized.\n        trace_span!(\"tp_staging_done.wait\").in_scope(|| frame.tp_staging_done.wait())?;\n        let tp_clear = frame.tp_clear.clone();\n\n        let swap_len = self.swap.len();\n        self.swap_idx = (self.swap_idx + 1) % swap_len;\n\n        Ok(tp_clear)\n    }\n\n    pub fn request_refresh(&mut self) {\n        self.encoder.request_refresh()\n    }\n}\n\nimpl Drop for EncodePipeline {\n    fn drop(&mut self) {\n        let device = &self.vk.device;\n\n        // Drop the encoder, since it consumes some of the shared resources below.\n        unsafe {\n            ManuallyDrop::drop(&mut self.encoder);\n        }\n\n        unsafe {\n            device.device_wait_idle().unwrap();\n\n            for frame in self.swap.iter() {\n                device.free_command_buffers(\n                    self.vk.graphics_queue.command_pool,\n                    &[frame.staging_cb, frame.render_cb],\n                );\n\n                for view in &frame.plane_views {\n                    device.destroy_image_view(*view, None);\n                }\n\n                for sema in &frame.texture_semas {\n                    device.destroy_semaphore(*sema, None);\n                }\n\n                device.destroy_query_pool(frame.render_ts_pool.pool, None);\n                device.destroy_query_pool(frame.staging_ts_pool.pool, None);\n            }\n        }\n    }\n}\n\nfn new_swapframe(\n    vk: Arc<VkContext>,\n    encode_image: VkImage,\n    convert_pipeline: &convert::ConvertPipeline,\n) -> anyhow::Result<SwapFrame> {\n    let blend_image = VkImage::new(\n        vk.clone(),\n        composite::BLEND_FORMAT,\n        false,\n        encode_image.width,\n        encode_image.height,\n        vk::ImageUsageFlags::COLOR_ATTACHMENT | vk::ImageUsageFlags::SAMPLED,\n        vk::SharingMode::EXCLUSIVE,\n        vk::ImageCreateFlags::empty(),\n    )?;\n\n    let mut plane_views = Vec::new();\n    let (single_plane_format, double_plane_format) = disjoint_plane_formats(encode_image.format)\n        .ok_or(anyhow!(\n            \"couldn't find a disjoint plane formats for {:?}\",\n            encode_image.format\n        ))?;\n\n    let disjoint_formats = if format_is_semiplanar(encode_image.format) {\n        vec![\n            single_plane_format, // Y\n            double_plane_format, // UV\n        ]\n    } else {\n        vec![\n            single_plane_format, // Y\n            single_plane_format, // U\n            single_plane_format, // V\n        ]\n    };\n\n    let aspects = [\n        vk::ImageAspectFlags::PLANE_0,\n        vk::ImageAspectFlags::PLANE_1,\n        vk::ImageAspectFlags::PLANE_2,\n    ];\n\n    for (idx, format) in disjoint_formats.into_iter().enumerate() {\n        let mut usage_info =\n            vk::ImageViewUsageCreateInfo::default().usage(vk::ImageUsageFlags::STORAGE);\n        let create_info = vk::ImageViewCreateInfo::default()\n            .image(encode_image.image)\n            .view_type(vk::ImageViewType::TYPE_2D)\n            .format(format)\n            .components(vk::ComponentMapping {\n                r: vk::ComponentSwizzle::IDENTITY,\n                g: vk::ComponentSwizzle::IDENTITY,\n                b: vk::ComponentSwizzle::IDENTITY,\n                a: vk::ComponentSwizzle::IDENTITY,\n            })\n            .subresource_range(vk::ImageSubresourceRange {\n                aspect_mask: aspects[idx],\n                base_mip_level: 0,\n                level_count: 1,\n                base_array_layer: 0,\n                layer_count: 1,\n            })\n            .push_next(&mut usage_info);\n\n        let view = unsafe { vk.device.create_image_view(&create_info, None)? };\n        plane_views.push(view);\n    }\n\n    let convert_ds = convert_pipeline.ds_for_conversion(&blend_image, &plane_views)?;\n\n    let staging_ts_pool = create_timestamp_query_pool(&vk.device, 2)?;\n    let render_ts_pool = create_timestamp_query_pool(&vk.device, 2)?;\n\n    let timeline = VkTimelineSemaphore::new(vk.clone(), 0)?;\n\n    Ok(SwapFrame {\n        convert_ds,\n        texture_semas: Vec::new(),\n        texture_semas_used: 0,\n        draws: Vec::new(),\n        blend_image,\n        encode_image,\n        plane_views,\n        staging_cb: allocate_command_buffer(&vk.device, vk.graphics_queue.command_pool)?,\n        render_cb: allocate_command_buffer(&vk.device, vk.graphics_queue.command_pool)?,\n        use_staging: false,\n        timeline: timeline.clone(),\n        tp_staging_done: timeline.new_point(0),\n        tp_render_done: timeline.new_point(0),\n        tp_clear: timeline.new_point(0),\n\n        staging_ts_pool,\n        staging_span: None,\n        render_ts_pool,\n        render_span: None,\n    })\n}\n\nfn allocate_texture_semaphore(\n    vk: Arc<VkContext>,\n    frame: &mut SwapFrame,\n) -> anyhow::Result<vk::Semaphore> {\n    let idx = frame.texture_semas_used;\n    frame.texture_semas_used += 1;\n\n    if frame.texture_semas_used <= frame.texture_semas.len() {\n        return Ok(frame.texture_semas[idx]);\n    }\n\n    let sema = unsafe {\n        vk.device\n            .create_semaphore(&vk::SemaphoreCreateInfo::default(), None)?\n    };\n\n    frame.texture_semas.push(sema);\n    Ok(sema)\n}\n\nfn format_is_semiplanar(format: vk::Format) -> bool {\n    // grep for 2PLANE in the vulkan spec.\n    matches!(\n        format,\n        vk::Format::G8_B8R8_2PLANE_420_UNORM\n            | vk::Format::G8_B8R8_2PLANE_422_UNORM\n            | vk::Format::G8_B8R8_2PLANE_444_UNORM\n            | vk::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16\n            | vk::Format::G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16\n            | vk::Format::G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16\n            | vk::Format::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16\n            | vk::Format::G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16\n            | vk::Format::G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16\n            | vk::Format::G16_B16R16_2PLANE_420_UNORM\n            | vk::Format::G16_B16R16_2PLANE_422_UNORM\n            | vk::Format::G16_B16R16_2PLANE_444_UNORM\n    )\n}\n\npub unsafe fn cmd_upload_shm(\n    device: &ash::Device,\n    cb: vk::CommandBuffer,\n    buffer: &VkHostBuffer,\n    image: &VkImage,\n    stride: u32, // In texels.\n    height: u32, // In texels.\n) {\n    let region = vk::BufferImageCopy::default()\n        .buffer_row_length(stride)\n        .buffer_image_height(height)\n        .image_subresource(vk::ImageSubresourceLayers {\n            aspect_mask: vk::ImageAspectFlags::COLOR,\n            mip_level: 0,\n            base_array_layer: 0,\n            layer_count: 1,\n        })\n        .image_extent(vk::Extent3D {\n            width: image.width,\n            height: image.height,\n            depth: 1,\n        });\n\n    let regions = [region];\n    device.cmd_copy_buffer_to_image(\n        cb,\n        buffer.buffer,\n        image.image,\n        vk::ImageLayout::TRANSFER_DST_OPTIMAL,\n        &regions,\n    );\n}\n\nfn disjoint_plane_formats(format: vk::Format) -> Option<(vk::Format, vk::Format)> {\n    match format {\n        vk::Format::G8_B8R8_2PLANE_420_UNORM\n        | vk::Format::G8_B8R8_2PLANE_422_UNORM\n        | vk::Format::G8_B8R8_2PLANE_444_UNORM\n        | vk::Format::G8_B8_R8_3PLANE_420_UNORM\n        | vk::Format::G8_B8_R8_3PLANE_422_UNORM\n        | vk::Format::G8_B8_R8_3PLANE_444_UNORM => {\n            Some((vk::Format::R8_UNORM, vk::Format::R8G8_UNORM))\n        }\n        vk::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16\n        | vk::Format::G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16\n        | vk::Format::G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16\n        | vk::Format::G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16\n        | vk::Format::G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16\n        | vk::Format::G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 => Some((\n            vk::Format::R10X6_UNORM_PACK16,\n            vk::Format::R10X6G10X6_UNORM_2PACK16,\n        )),\n        vk::Format::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16\n        | vk::Format::G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16\n        | vk::Format::G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16\n        | vk::Format::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16\n        | vk::Format::G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16\n        | vk::Format::G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 => Some((\n            vk::Format::R12X4_UNORM_PACK16,\n            vk::Format::R12X4G12X4_UNORM_2PACK16,\n        )),\n        vk::Format::G16_B16R16_2PLANE_420_UNORM\n        | vk::Format::G16_B16R16_2PLANE_422_UNORM\n        | vk::Format::G16_B16R16_2PLANE_444_UNORM\n        | vk::Format::G16_B16_R16_3PLANE_420_UNORM\n        | vk::Format::G16_B16_R16_3PLANE_422_UNORM\n        | vk::Format::G16_B16_R16_3PLANE_444_UNORM => {\n            Some((vk::Format::R16_UNORM, vk::Format::R16G16_UNORM))\n        }\n        _ => None,\n    }\n}\n"
  },
  {
    "path": "mm-server/src/session.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{path::PathBuf, sync::Arc, time};\n\nuse anyhow::{anyhow, bail};\nuse crossbeam_channel as crossbeam;\nuse mm_protocol as protocol;\nuse pathsearch::find_executable_in_path;\nuse tracing::{debug_span, info};\n\nuse crate::{\n    codec::probe_codec, server::stream::StreamWriter, vulkan::VkContext,\n    waking_sender::WakingSender,\n};\n\nmod audio;\npub mod compositor;\npub mod control;\nmod handle;\nmod input;\nmod reactor;\nmod video;\n\nuse control::{AudioStreamParams, ControlMessage, DisplayParams, SessionEvent, VideoStreamParams};\npub use handle::SessionHandle;\npub use input::GamepadLayout;\nuse reactor::Reactor;\npub use reactor::EPOCH;\n\n/// How long to wait for the compositor to accept a new attachment.\nconst ATTACH_TIMEOUT: time::Duration = time::Duration::from_secs(10);\n\npub struct Session {\n    pub id: u64,\n    pub display_params: DisplayParams,\n    pub application_id: String,\n    pub started: time::SystemTime,\n    pub detached_since: Option<time::Instant>,\n    pub permanent_gamepads: Vec<protocol::Gamepad>,\n    pub defunct: bool,\n\n    comp_thread_handle: std::thread::JoinHandle<anyhow::Result<()>>,\n    control_sender: WakingSender<ControlMessage>,\n    operator_attachment_id: Option<u64>,\n\n    pub bug_report_dir: Option<PathBuf>,\n\n    vk: Arc<VkContext>,\n}\n\npub struct Attachment {\n    pub session_id: u64,\n    pub attachment_id: u64,\n    pub events: crossbeam::Receiver<SessionEvent>,\n    pub control: WakingSender<ControlMessage>,\n}\n\nimpl Session {\n    /// Launches a standalone compositor and the application process. Blocks\n    /// until both have started up and connected over a unix socket.\n    pub fn launch(\n        vk: Arc<VkContext>,\n        id: u64,\n        application_id: &str,\n        application_config: &super::config::AppConfig,\n        display_params: DisplayParams,\n        permanent_gamepads: Vec<protocol::Gamepad>,\n        bug_report_dir: Option<PathBuf>,\n    ) -> anyhow::Result<Self> {\n        // Do an early check that the executable exists.\n        let exe = application_config.command.first().unwrap();\n        find_executable_in_path(exe).ok_or(anyhow!(\"command {:?} not in PATH\", exe))?;\n\n        // Launch the compositor, which in turn launches the app.\n        let (ready_send, ready_recv) = oneshot::channel();\n        let vk_clone = vk.clone();\n        let app_name = application_id.to_owned();\n        let app_cfg = application_config.clone();\n        let gamepads = permanent_gamepads\n            .iter()\n            .map(|pad| (pad.id, GamepadLayout::GenericDualStick)) // TODO layout.\n            .collect();\n\n        let bug_report_dir_clone = bug_report_dir.clone();\n        let comp_thread_handle = std::thread::spawn(move || {\n            tracy_client::set_thread_name!(\"compositor\");\n\n            let span = debug_span!(\"session\", session_id = id, app = app_name);\n            let _guard = span.enter();\n\n            Reactor::run(\n                vk_clone,\n                app_cfg,\n                display_params,\n                gamepads,\n                bug_report_dir_clone,\n                ready_send,\n            )\n        });\n\n        info!(session_id = id, application = ?application_id, \"launching session\");\n\n        // Wait until the compositor is ready.\n        let control_sender = match ready_recv.recv() {\n            Ok(s) => s,\n            Err(_) => {\n                return match comp_thread_handle.join() {\n                    Ok(Ok(())) => Err(anyhow!(\"compositor thread exited unexpectedly\")),\n                    Ok(Err(e)) => Err(e),\n                    Err(_) => Err(anyhow!(\"compositor thread panicked\")),\n                }\n            }\n        };\n\n        Ok(Self {\n            id,\n            application_id: application_id.to_string(),\n            display_params,\n            permanent_gamepads,\n            started: time::SystemTime::now(),\n            defunct: false,\n            detached_since: None,\n            operator_attachment_id: None,\n            comp_thread_handle,\n            control_sender,\n            bug_report_dir,\n            vk,\n        })\n    }\n\n    pub fn update_display_params(&mut self, display_params: DisplayParams) -> anyhow::Result<()> {\n        if self.defunct {\n            return Err(anyhow!(\"session defunct\"));\n        }\n\n        match self\n            .control_sender\n            .send(ControlMessage::UpdateDisplayParams(display_params))\n        {\n            Ok(_) => {\n                self.display_params = display_params;\n                Ok(())\n            }\n            Err(crossbeam::SendError(_)) => {\n                self.defunct = true;\n                Err(anyhow!(\"compositor died\"))\n            }\n        }\n    }\n\n    pub fn attach(\n        &mut self,\n        id: u64,\n        operator: bool,\n        video_params: VideoStreamParams,\n        audio_params: AudioStreamParams,\n        stream_writer: StreamWriter,\n    ) -> anyhow::Result<Attachment> {\n        if self.defunct {\n            return Err(anyhow!(\"session defunct\"));\n        } else if !operator {\n            unimplemented!()\n        } else if self.operator_attachment_id.is_some() {\n            return Err(anyhow!(\"session already has an operator\"));\n        }\n\n        info!(\n            session_id = self.id,\n            attachment_id = id,\n            operator,\n            \"new attachment\"\n        );\n\n        let (events_send, events_recv) = crossbeam_channel::unbounded();\n        let (ready_send, ready_recv) = oneshot::channel();\n        if self\n            .control_sender\n            .send(ControlMessage::Attach {\n                id,\n                sender: events_send,\n                video_params,\n                audio_params,\n                stream_writer,\n                ready: ready_send,\n            })\n            .is_err()\n        {\n            self.defunct = true;\n            bail!(\"compositor died\");\n        }\n\n        if ready_recv.recv_timeout(ATTACH_TIMEOUT).is_err() {\n            let _ = self.control_sender.send(ControlMessage::Detach(id));\n            bail!(\"attachment rejected\");\n        }\n\n        self.operator_attachment_id = Some(id);\n        self.detached_since = None;\n\n        Ok(Attachment {\n            session_id: self.id,\n            attachment_id: id,\n            events: events_recv,\n            control: self.control_sender.clone(),\n        })\n    }\n\n    pub fn detach(&mut self, attachment: Attachment) -> anyhow::Result<()> {\n        if self.defunct {\n            return Err(anyhow!(\"session defunct\"));\n        }\n\n        self.operator_attachment_id = None;\n        self.detached_since = Some(time::Instant::now());\n        match self\n            .control_sender\n            .send(ControlMessage::Detach(attachment.attachment_id))\n        {\n            Ok(_) => Ok(()),\n            Err(crossbeam::SendError(_)) => {\n                self.defunct = true;\n                Err(anyhow!(\"compositor died\"))\n            }\n        }\n    }\n\n    pub fn stop(self) -> anyhow::Result<()> {\n        if let Err(crossbeam::TrySendError::Full(_)) =\n            self.control_sender.try_send(ControlMessage::Stop)\n        {\n            bail!(\"compositor channel full\");\n        }\n\n        match self.comp_thread_handle.join() {\n            Ok(Ok(())) => Ok(()),\n            Ok(Err(e)) => Err(e),\n            Err(v) => Err(anyhow!(\"compositor thread panicked: {:?}\", v)),\n        }\n    }\n\n    pub fn supports_stream(&self, params: VideoStreamParams) -> bool {\n        if params.width != self.display_params.width || params.height != self.display_params.height\n        {\n            return false;\n        }\n\n        probe_codec(self.vk.clone(), params.codec)\n    }\n}\n"
  },
  {
    "path": "mm-server/src/state.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::sync::Arc;\n\nuse hashbrown::HashMap;\nuse parking_lot::Mutex;\nuse tracing::{error, info};\n\nuse crate::config::Config;\nuse crate::{session::Session, vulkan::VkContext};\n\npub type SharedState = Arc<Mutex<ServerState>>;\n\npub struct ServerState {\n    // TODO: we'd rather use a BTreeMap, but we want\n    // hash_brown::HashMap::extract_if.\n    pub sessions: HashMap<u64, Session>,\n    pub session_seq: usize,\n    pub id_generator: tiny_id::ShortCodeGenerator<char>,\n    pub cfg: Config,\n    pub vk: Arc<VkContext>,\n}\n\nimpl ServerState {\n    pub fn new(vk: Arc<VkContext>, cfg: Config) -> Self {\n        Self {\n            vk,\n            cfg,\n            sessions: HashMap::new(),\n            session_seq: 0,\n            id_generator: tiny_id::ShortCodeGenerator::new_numeric(6),\n        }\n    }\n\n    pub fn generate_session_id(&mut self) -> (usize, u64) {\n        let seq = self.session_seq;\n        self.session_seq += 1;\n\n        (seq, self.id_generator.next_int())\n    }\n\n    /// Run periodic cleanup, e.g. ending defunct sessions.\n    pub fn tick(&mut self) -> anyhow::Result<()> {\n        self.sessions\n            .extract_if(|_, s| {\n                if s.defunct {\n                    info!(\"cleaning up defunct session {}\", s.id);\n                    return true;\n                }\n\n                let session_timeout = self.cfg.apps[&s.application_id].session_timeout;\n                if s.detached_since\n                    .zip(session_timeout)\n                    .is_some_and(|(t, timeout)| t.elapsed() > timeout)\n                {\n                    info!(\"cleaning up idle session {}\", s.id);\n                    true\n                } else {\n                    false\n                }\n            })\n            .for_each(|(_, s)| match s.stop() {\n                Ok(()) => {}\n                Err(e) => {\n                    error!(\"session ended with error: {:#}\", e);\n                }\n            });\n        Ok(())\n    }\n}\n"
  },
  {
    "path": "mm-server/src/vulkan/chain.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\n/// Used to construct a pinned chain of vulkan structures.\n///\n/// Ash provides the builder pattern for generating temporary input structs on\n/// the stack, but it doesn't work well with structs stored on the heap for\n/// re-use. Part of the reason for that is that the `p_next` pointer mechanism\n/// is out of scope of the borrow checker.\n///\n/// If we want to store a chain of structs in a `Box`, we should also `Pin` it,\n/// since the holding struct is effectively self-referential. This macro handles\n/// the boilerplate for that, by:\n///\n///  - Generating a constructor for the struct that returns `Pin<Box<Self>>`\n///  - Generating `AsRef<T>` and `AsMut<T>` for the first struct in the chain\n///  - Generating `with_foo` methods that allow you to replace one struct in the\n///    chain (presumably using the builder pattern).\n///\n/// Besides letting us reuse allocations for heavy structs, this also achieves a\n/// level of polymorphism, since calling code can take an `impl AsRef<T>` where\n/// `T` is the first struct, and generalize over the remaining chain.\n///\n/// Note that the chain is always created and maintained in declaration order,\n/// with the first field being the \"head\" and the head's `p_next` pointer\n/// pointing to the second field, and so on.\nmacro_rules! vk_chain {\n    (\n        $(#[$meta:meta])*\n        $vis:vis struct $Chain:ident <$lifetime:lifetime> {\n            $(#[$head_meta:meta])*\n            pub $HeadName:ident: $HeadStruct:ty,\n            $(\n                $(#[$field_meta:meta])*\n                pub $Name:ident: $Struct:ty,\n            )+\n        }\n    ) => {\n        paste::paste! {\n            $(#[$meta])*\n            $vis struct [<$Chain Inner>] <$lifetime> {\n                $(#[$head_meta])*\n                pub $HeadName: $HeadStruct,\n                $(\n                    $(#[$field_meta])*\n                    pub $Name: $Struct,\n                )*\n            }\n\n            $vis struct $Chain(std::pin::Pin<Box<[<$Chain Inner>] <'static> >>);\n\n            unsafe impl Send for $Chain {}\n\n            #[allow(dead_code)]\n            impl $Chain {\n                pub fn new<$lifetime: 'static>($HeadName: $HeadStruct, $($Name: $Struct,)*) -> Self {\n                    let mut ch = Box::pin([<$Chain Inner>] {\n                        $HeadName,\n                        $($Name,)*\n                    });\n\n                    __set_p_next!(ch, $HeadName, $($Name),*);\n                    Self(ch)\n                }\n\n                $(\n                    #[doc = \"Replaces the `\" $Name \"` field with the new (or modified) struct returned by `f`. Maintains the `p_next` chain.\"]\n                    pub fn [<with_ $Name>]<$lifetime: 'static, F>(&mut self, f: F)\n                    where\n                        F: FnOnce($Struct) -> $Struct,\n                    {\n                        let p_next = self.0.$Name.p_next;\n                        self.0.$Name = f(self.$Name);\n                        self.0.$Name.p_next = p_next;\n                    }\n                )*\n            }\n\n            impl Default for $Chain {\n                fn default() -> Self {\n                    Self::new(__replace_expr!(($HeadStruct) Default::default()), $(__replace_expr!(($Struct) Default::default()),)*)\n                }\n            }\n\n            impl std::ops::Deref for $Chain {\n                type Target = [<$Chain Inner>]<'static>;\n\n                fn deref(&self) -> &Self::Target {\n                    std::pin::Pin::deref(&self.0)\n                }\n            }\n        }\n\n        impl<$lifetime: 'static> AsRef<$HeadStruct> for $Chain {\n            fn as_ref(&self) -> &$HeadStruct {\n                &self.0.as_ref().get_ref().$HeadName\n            }\n        }\n\n        impl<$lifetime: 'static> AsMut<$HeadStruct> for $Chain {\n            fn as_mut(&mut self) -> &mut $HeadStruct {\n                &mut self.0.as_mut().get_mut().$HeadName\n            }\n        }\n    };\n}\n\nmacro_rules! __set_p_next(\n    ($target:ident, $head:ident, $next:ident) => {\n        $target.$head.p_next = <*mut _>::cast(&mut $target.$next);\n    };\n    ($target:ident, $head:ident, $next:ident, $($tail:ident),+) => {\n        $target.$head.p_next = <*mut _>::cast(&mut $target.$next);\n        __set_p_next!($target, $next, $($tail),+);\n    };\n);\n\nmacro_rules! __replace_expr {\n    ($_t:tt $sub:expr) => {\n        $sub\n    };\n}\n\npub(crate) use __replace_expr;\npub(crate) use __set_p_next;\npub(crate) use vk_chain;\n\n#[cfg(test)]\nmod tests {\n    use ash::vk;\n\n    #[test]\n    fn test_chain() {\n        vk_chain! {\n            pub struct H264EncodeProfile<'a> {\n                pub profile: vk::VideoProfileInfoKHR<'a>,\n                pub encode_usage_info: vk::VideoEncodeUsageInfoKHR<'a>,\n                pub h264_profile: vk::VideoEncodeH264ProfileInfoEXT<'a>,\n            }\n        }\n\n        let mut chain = H264EncodeProfile::new(\n            vk::VideoProfileInfoKHR::default(),\n            vk::VideoEncodeUsageInfoKHR::default(),\n            vk::VideoEncodeH264ProfileInfoEXT::default(),\n        );\n\n        chain.with_encode_usage_info(|info| {\n            info.video_usage_hints(vk::VideoEncodeUsageFlagsKHR::STREAMING)\n        });\n\n        assert_eq!(\n            chain.encode_usage_info.video_usage_hints,\n            vk::VideoEncodeUsageFlagsKHR::STREAMING\n        );\n    }\n}\n"
  },
  {
    "path": "mm-server/src/vulkan/drm.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    fs::{File, OpenOptions},\n    os::fd::{AsFd, BorrowedFd},\n};\n\nuse anyhow::anyhow;\nuse libc::dev_t;\n\npub struct DrmDevice(File);\n\nimpl AsFd for DrmDevice {\n    fn as_fd(&self) -> BorrowedFd<'_> {\n        self.0.as_fd()\n    }\n}\n\nimpl drm::Device for DrmDevice {}\nimpl drm::control::Device for DrmDevice {}\n\nimpl DrmDevice {\n    pub fn new(dev: dev_t) -> anyhow::Result<Self> {\n        let path = drm::node::DrmNode::from_dev_id(dev)?\n            .dev_path()\n            .ok_or(anyhow!(\"no device file found\"))?;\n\n        let mut options = OpenOptions::new();\n        options.read(true);\n        options.write(true);\n\n        Ok(Self(options.open(path)?))\n    }\n}\n"
  },
  {
    "path": "mm-server/src/vulkan/timeline.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::{\n    os::fd::{IntoRawFd as _, OwnedFd},\n    sync::Arc,\n};\n\nuse anyhow::Context as _;\nuse ash::vk;\nuse tracing::instrument;\n\nuse super::VkContext;\n\n#[derive(Clone)]\npub struct VkTimelineSemaphore(Arc<Inner>);\n\nstruct Inner {\n    vk: Arc<VkContext>,\n    sema: vk::Semaphore,\n}\n\n#[derive(Clone)]\npub struct VkTimelinePoint(Arc<Inner>, u64);\n\nimpl From<VkTimelinePoint> for u64 {\n    fn from(value: VkTimelinePoint) -> Self {\n        value.1\n    }\n}\n\nimpl std::ops::Add<u64> for VkTimelinePoint {\n    type Output = Self;\n\n    fn add(self, rhs: u64) -> Self {\n        Self(self.0, self.1 + rhs)\n    }\n}\n\nimpl std::ops::Add<u64> for &VkTimelinePoint {\n    type Output = VkTimelinePoint;\n\n    fn add(self, rhs: u64) -> Self::Output {\n        VkTimelinePoint(self.0.clone(), self.1 + rhs)\n    }\n}\n\nimpl std::ops::AddAssign<u64> for VkTimelinePoint {\n    fn add_assign(&mut self, rhs: u64) {\n        self.1 += rhs\n    }\n}\n\nimpl VkTimelineSemaphore {\n    pub fn new(vk: Arc<VkContext>, initial_value: u64) -> anyhow::Result<Self> {\n        let sema = unsafe {\n            vk.device\n                .create_semaphore(\n                    &vk::SemaphoreCreateInfo::default().push_next(\n                        &mut vk::SemaphoreTypeCreateInfo::default()\n                            .semaphore_type(vk::SemaphoreType::TIMELINE)\n                            .initial_value(initial_value),\n                    ),\n                    None,\n                )\n                .context(\"VkCreateSemaphore\")?\n        };\n\n        Ok(Self(Arc::new(Inner { vk, sema })))\n    }\n\n    pub fn from_syncobj_fd(vk: Arc<VkContext>, fd: OwnedFd) -> anyhow::Result<Self> {\n        let sema = Self::new(vk.clone(), 0)?;\n\n        let import_info = vk::ImportSemaphoreFdInfoKHR::default()\n            .semaphore(sema.as_semaphore())\n            .handle_type(vk::ExternalSemaphoreHandleTypeFlags::OPAQUE_FD)\n            .fd(fd.into_raw_fd()); // Vulkan owns the fd now.\n\n        unsafe {\n            vk.external_semaphore_api\n                .import_semaphore_fd(&import_info)\n                .context(\"VkImportSemaphoreFdKHR\")?;\n        }\n\n        Ok(sema)\n    }\n\n    pub fn new_point(&self, value: u64) -> VkTimelinePoint {\n        VkTimelinePoint(self.0.clone(), value)\n    }\n\n    pub fn as_semaphore(&self) -> vk::Semaphore {\n        self.0.sema\n    }\n}\n\nimpl VkTimelinePoint {\n    pub fn value(&self) -> u64 {\n        self.1\n    }\n\n    pub fn timeline(&self) -> VkTimelineSemaphore {\n        VkTimelineSemaphore(self.0.clone())\n    }\n\n    #[instrument(skip_all)]\n    pub unsafe fn wait(&self) -> anyhow::Result<()> {\n        let device = &self.0.vk.device;\n        device.wait_semaphores(\n            &vk::SemaphoreWaitInfo::default()\n                .semaphores(&[self.0.sema])\n                .values(&[self.1]),\n            1_000_000_000, // 1 second\n        )?;\n\n        Ok(())\n    }\n\n    #[instrument(skip_all)]\n    pub unsafe fn signal(&self) -> anyhow::Result<()> {\n        let device = &self.0.vk.device;\n        device.signal_semaphore(\n            &vk::SemaphoreSignalInfo::default()\n                .semaphore(self.0.sema)\n                .value(self.1),\n        )?;\n\n        Ok(())\n    }\n\n    pub unsafe fn poll(&self) -> anyhow::Result<bool> {\n        let device = &self.0.vk.device;\n        let value = device.get_semaphore_counter_value(self.0.sema)?;\n        Ok(value >= self.1)\n    }\n}\n\nimpl Drop for Inner {\n    fn drop(&mut self) {\n        unsafe {\n            self.vk.device.device_wait_idle().unwrap();\n            self.vk.device.destroy_semaphore(self.sema, None)\n        };\n    }\n}\n"
  },
  {
    "path": "mm-server/src/vulkan/video.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse ash::prelude::*;\nuse ash::vk;\nuse ash::RawPtr;\n\npub struct VideoQueueExt {\n    handle: vk::Device,\n    fp: vk::KhrVideoQueueFn,\n}\n\n#[allow(dead_code)]\nimpl VideoQueueExt {\n    pub fn new(entry: &ash::Entry, instance: &ash::Instance, device: &ash::Device) -> Self {\n        let handle = device.handle();\n        let fp = vk::KhrVideoQueueFn::load(|name| unsafe {\n            std::mem::transmute(entry.get_instance_proc_addr(instance.handle(), name.as_ptr()))\n        });\n\n        Self { handle, fp }\n    }\n\n    #[inline]\n    pub fn name() -> &'static std::ffi::CStr {\n        vk::KhrVideoDecodeQueueFn::NAME\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkBindVideoSessionMemoryKHR.html>\n    pub unsafe fn bind_video_session_memory(\n        &self,\n        device: &ash::Device,\n        video_session: vk::VideoSessionKHR,\n        bind_session_memory_infos: &[vk::BindVideoSessionMemoryInfoKHR],\n    ) -> VkResult<()> {\n        (self.fp.bind_video_session_memory_khr)(\n            device.handle(),\n            video_session,\n            bind_session_memory_infos.len() as u32,\n            bind_session_memory_infos.as_ptr(),\n        )\n        .result()\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkCmdBeginVideoCodingKHR.html>\n    pub unsafe fn cmd_begin_video_coding(\n        &self,\n        command_buffer: vk::CommandBuffer,\n        begin_info: &vk::VideoBeginCodingInfoKHR,\n    ) {\n        (self.fp.cmd_begin_video_coding_khr)(command_buffer, begin_info);\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkCmdControlVideoCodingKHR.html>\n    pub unsafe fn cmd_control_video_coding(\n        &self,\n        command_buffer: vk::CommandBuffer,\n        coding_control_info: &vk::VideoCodingControlInfoKHR,\n    ) {\n        (self.fp.cmd_control_video_coding_khr)(command_buffer, coding_control_info);\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkCmdEndVideoCodingKHR.html>\n    pub unsafe fn cmd_end_video_coding(\n        &self,\n        command_buffer: vk::CommandBuffer,\n        end_coding_info: &vk::VideoEndCodingInfoKHR,\n    ) {\n        (self.fp.cmd_end_video_coding_khr)(command_buffer, end_coding_info);\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkCreateVideoSessionKHR.html>\n    pub unsafe fn create_video_session(\n        &self,\n        create_info: &vk::VideoSessionCreateInfoKHR,\n        allocation_callbacks: Option<&vk::AllocationCallbacks>,\n    ) -> VkResult<vk::VideoSessionKHR> {\n        let mut video_session = std::mem::zeroed();\n        (self.fp.create_video_session_khr)(\n            self.handle,\n            create_info,\n            allocation_callbacks.as_raw_ptr(),\n            &mut video_session,\n        )\n        .result_with_success(video_session)\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkCreateVideoSessionParametersKHR.html>\n    pub unsafe fn create_video_session_parameters(\n        &self,\n        create_info: &vk::VideoSessionParametersCreateInfoKHR,\n        allocation_callbacks: Option<&vk::AllocationCallbacks>,\n    ) -> VkResult<vk::VideoSessionParametersKHR> {\n        let mut video_session_parameters = std::mem::zeroed();\n        (self.fp.create_video_session_parameters_khr)(\n            self.handle,\n            create_info,\n            allocation_callbacks.as_raw_ptr(),\n            &mut video_session_parameters,\n        )\n        .result_with_success(video_session_parameters)\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkDestroyVideoSessionKHR.html>\n    pub unsafe fn destroy_video_session(\n        &self,\n        video_session: vk::VideoSessionKHR,\n        allocation_callbacks: Option<&vk::AllocationCallbacks>,\n    ) {\n        (self.fp.destroy_video_session_khr)(\n            self.handle,\n            video_session,\n            allocation_callbacks.as_raw_ptr(),\n        );\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkDestroyVideoSessionParametersKHR.html>\n    pub unsafe fn destroy_video_session_parameters(\n        &self,\n        video_session_parameters: vk::VideoSessionParametersKHR,\n        allocation_callbacks: Option<&vk::AllocationCallbacks>,\n    ) {\n        (self.fp.destroy_video_session_parameters_khr)(\n            self.handle,\n            video_session_parameters,\n            allocation_callbacks.as_raw_ptr(),\n        );\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html>\n    pub unsafe fn get_physical_device_video_capabilities(\n        &self,\n        physical_device: vk::PhysicalDevice,\n        video_profile: &vk::VideoProfileInfoKHR,\n        capabilities: &mut vk::VideoCapabilitiesKHR,\n    ) -> VkResult<()> {\n        (self.fp.get_physical_device_video_capabilities_khr)(\n            physical_device,\n            video_profile,\n            capabilities,\n        )\n        .result()\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html>\n    pub unsafe fn get_physical_device_video_format_properties(\n        &self,\n        physical_device: vk::PhysicalDevice,\n        video_format_info: &vk::PhysicalDeviceVideoFormatInfoKHR,\n    ) -> VkResult<Vec<vk::VideoFormatPropertiesKHR>> {\n        read_into_defaulted_vector(|count, data| {\n            (self.fp.get_physical_device_video_format_properties_khr)(\n                physical_device,\n                video_format_info,\n                count,\n                data,\n            )\n        })\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkGetVideoSessionMemoryRequirementsKHR.html>\n    pub unsafe fn get_video_session_memory_requirements(\n        &self,\n        video_session: vk::VideoSessionKHR,\n    ) -> VkResult<Vec<vk::VideoSessionMemoryRequirementsKHR>> {\n        read_into_defaulted_vector(|count, data| {\n            (self.fp.get_video_session_memory_requirements_khr)(\n                self.handle,\n                video_session,\n                count,\n                data,\n            )\n        })\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkUpdateVideoSessionParametersKHR.html>\n    pub unsafe fn update_video_session_parameters(\n        &self,\n        video_session_parameters: vk::VideoSessionParametersKHR,\n        update_info: &vk::VideoSessionParametersUpdateInfoKHR,\n    ) -> VkResult<()> {\n        (self.fp.update_video_session_parameters_khr)(\n            self.handle,\n            video_session_parameters,\n            update_info,\n        )\n        .result()\n    }\n}\n\npub struct VideoDecodeQueueExt {\n    fp: vk::KhrVideoDecodeQueueFn,\n}\n\n#[allow(dead_code)]\nimpl VideoDecodeQueueExt {\n    pub fn new(entry: &ash::Entry, instance: &ash::Instance) -> Self {\n        let fp = vk::KhrVideoDecodeQueueFn::load(|name| unsafe {\n            std::mem::transmute(entry.get_instance_proc_addr(instance.handle(), name.as_ptr()))\n        });\n\n        Self { fp }\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkCmdDecodeVideoKHR.html>\n    pub unsafe fn cmd_decode_video(\n        &self,\n        command_buffer: vk::CommandBuffer,\n        decode_info: &vk::VideoDecodeInfoKHR,\n    ) {\n        (self.fp.cmd_decode_video_khr)(command_buffer, decode_info);\n    }\n}\n\npub struct VideoEncodeQueueExt {\n    handle: vk::Device,\n    fp: vk::KhrVideoEncodeQueueFn,\n}\n\n#[allow(dead_code)]\nimpl VideoEncodeQueueExt {\n    pub fn new(entry: &ash::Entry, instance: &ash::Instance, device: &ash::Device) -> Self {\n        let handle = device.handle();\n        let fp = vk::KhrVideoEncodeQueueFn::load(|name| unsafe {\n            std::mem::transmute(entry.get_instance_proc_addr(instance.handle(), name.as_ptr()))\n        });\n\n        Self { handle, fp }\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html>\n    pub unsafe fn get_physical_device_video_encode_quality_level_properties(\n        &self,\n        physical_device: vk::PhysicalDevice,\n        quality_level_info: &vk::PhysicalDeviceVideoEncodeQualityLevelInfoKHR,\n        quality_level_properties: &mut vk::VideoEncodeQualityLevelPropertiesKHR,\n    ) -> VkResult<()> {\n        (self\n            .fp\n            .get_physical_device_video_encode_quality_level_properties_khr)(\n            physical_device,\n            quality_level_info,\n            quality_level_properties,\n        )\n        .result()\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkCmdEncodeVideoKHR.html>\n    pub unsafe fn cmd_encode_video(\n        &self,\n        command_buffer: vk::CommandBuffer,\n        encode_info: &vk::VideoEncodeInfoKHR,\n    ) {\n        (self.fp.cmd_encode_video_khr)(command_buffer, encode_info);\n    }\n\n    #[inline]\n    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkGetEncodedVideoSessionParametersKHR.html>\n    pub unsafe fn get_encoded_video_session_parameters(\n        &self,\n        session_parameters_info: &vk::VideoEncodeSessionParametersGetInfoKHR,\n        info: &mut vk::VideoEncodeSessionParametersFeedbackInfoKHR,\n    ) -> VkResult<Vec<u8>> {\n        let ptr = info as *mut _;\n        read_into_uninitialized_vector(|count, data: *mut u8| {\n            (self.fp.get_encoded_video_session_parameters_khr)(\n                self.handle,\n                session_parameters_info,\n                ptr,\n                count,\n                data.cast(),\n            )\n        })\n    }\n}\n\n// Copied from ash.\n/// Repeatedly calls `f` until it does not return [`vk::Result::INCOMPLETE`]\n/// anymore, ensuring all available data has been read into the vector.\n///\n/// See for example [`vkEnumerateInstanceExtensionProperties`]: the number of\n/// available items may change between calls; [`vk::Result::INCOMPLETE`] is\n/// returned when the count increased (and the vector is not large enough after\n/// querying the initial size), requiring Ash to try again.\n///\n/// [`vkEnumerateInstanceExtensionProperties`]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumerateInstanceExtensionProperties.html\npub(crate) unsafe fn read_into_uninitialized_vector<N: Copy + Default + TryInto<usize>, T>(\n    f: impl Fn(&mut N, *mut T) -> vk::Result,\n) -> VkResult<Vec<T>>\nwhere\n    <N as TryInto<usize>>::Error: std::fmt::Debug,\n{\n    loop {\n        let mut count = N::default();\n        f(&mut count, std::ptr::null_mut()).result()?;\n        let mut data =\n            Vec::with_capacity(count.try_into().expect(\"`N` failed to convert to `usize`\"));\n\n        let err_code = f(&mut count, data.as_mut_ptr());\n        if err_code != vk::Result::INCOMPLETE {\n            break err_code.set_vec_len_on_success(\n                data,\n                count.try_into().expect(\"`N` failed to convert to `usize`\"),\n            );\n        }\n    }\n}\n\n/// Repeatedly calls `f` until it does not return [`vk::Result::INCOMPLETE`]\n/// anymore, ensuring all available data has been read into the vector.\n///\n/// Items in the target vector are [`default()`][Default::default()]-initialized\n/// which is required for [`vk::BaseOutStructure`]-like structs where\n/// [`vk::BaseOutStructure::s_type`] needs to be a valid type and\n/// [`vk::BaseOutStructure::p_next`] a valid or [`null`][std::ptr::null_mut()]\n/// pointer.\n///\n/// See for example [`vkEnumerateInstanceExtensionProperties`]: the number of\n/// available items may change between calls; [`vk::Result::INCOMPLETE`] is\n/// returned when the count increased (and the vector is not large enough after\n/// querying the initial size), requiring Ash to try again.\n///\n/// [`vkEnumerateInstanceExtensionProperties`]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumerateInstanceExtensionProperties.html\npub(crate) unsafe fn read_into_defaulted_vector<\n    N: Copy + Default + TryInto<usize>,\n    T: Default + Clone,\n>(\n    f: impl Fn(&mut N, *mut T) -> vk::Result,\n) -> VkResult<Vec<T>>\nwhere\n    <N as TryInto<usize>>::Error: std::fmt::Debug,\n{\n    loop {\n        let mut count = N::default();\n        f(&mut count, std::ptr::null_mut()).result()?;\n        let mut data =\n            vec![Default::default(); count.try_into().expect(\"`N` failed to convert to `usize`\")];\n\n        let err_code = f(&mut count, data.as_mut_ptr());\n        if err_code != vk::Result::INCOMPLETE {\n            data.set_len(count.try_into().expect(\"`N` failed to convert to `usize`\"));\n            break err_code.result_with_success(data);\n        }\n    }\n}\n"
  },
  {
    "path": "mm-server/src/vulkan.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\n#![allow(clippy::too_many_arguments)]\n\nmod chain;\nmod drm;\nmod timeline;\npub mod video;\n\nuse std::ffi::{c_void, CStr, CString};\nuse std::sync::Arc;\n\nuse anyhow::{bail, Context, Result};\nuse ash::extensions::{ext, khr};\nuse ash::vk;\npub(crate) use chain::*;\nuse cstr::cstr;\npub use timeline::*;\nuse tracing::{debug, error, info, instrument, warn};\n\nuse self::video::{VideoEncodeQueueExt, VideoQueueExt};\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\npub enum Vendor {\n    Amd,\n    Nvidia,\n    Other,\n}\n\n#[derive(Debug, Clone, PartialEq, Eq)]\npub enum DriverVersion {\n    MesaRadv { major: u32, minor: u32, patch: u32 },\n    NvidiaProprietary { major: u32, minor: u32 },\n    Other(String),\n}\n\npub struct VkContext {\n    pub entry: ash::Entry,\n    pub push_ds_api: khr::PushDescriptor,\n    pub external_memory_api: khr::ExternalMemoryFd,\n    pub external_semaphore_api: khr::ExternalSemaphoreFd,\n    pub video_apis: Option<(VideoQueueExt, VideoEncodeQueueExt)>,\n\n    pub instance: ash::Instance,\n    pub debug: Option<VkDebugContext>,\n    pub device: ash::Device,\n    pub device_info: VkDeviceInfo,\n    pub drm_device: drm::DrmDevice,\n    pub graphics_queue: VkQueue,\n    pub encode_queue: Option<VkQueue>,\n    pub descriptor_pool: vk::DescriptorPool,\n}\n\npub struct VkDebugContext {\n    debug: ext::DebugUtils,\n    messenger: vk::DebugUtilsMessengerEXT,\n}\n\n#[derive(Clone)]\npub struct VkQueue {\n    pub family: u32,\n    pub queue: vk::Queue,\n    pub command_pool: vk::CommandPool, // TODO: synchronize access\n\n    #[allow(unused)]\n    pub tracy_context: Option<tracy_client::GpuContext>,\n}\n\nimpl VkQueue {\n    pub fn new(\n        device: &ash::Device,\n        _pdevice: &VkDeviceInfo,\n        _props: vk::QueueFamilyProperties,\n        family: u32,\n        _name: &str,\n    ) -> Result<Self> {\n        let queue = unsafe { device.get_device_queue(family, 0) };\n\n        let command_pool = unsafe {\n            let create_info = vk::CommandPoolCreateInfo::default()\n                .queue_family_index(family)\n                .flags(vk::CommandPoolCreateFlags::RESET_COMMAND_BUFFER);\n\n            device.create_command_pool(&create_info, None)?\n        };\n\n        #[cfg(feature = \"tracy\")]\n        let tracy_context = tracy_client::Client::running().and_then(|client| {\n            if _props.timestamp_valid_bits == 0 {\n                debug!(\n                    \"queue family {:?} does not support timestamp queries\",\n                    family\n                );\n                return None;\n            }\n\n            match init_tracy_context(device, _pdevice, queue, command_pool, client, _name) {\n                Ok(ctx) => Some(ctx),\n                Err(err) => {\n                    error!(\"failed to initialize tracy GPU context: {err}\");\n                    None\n                }\n            }\n        });\n\n        #[cfg(not(feature = \"tracy\"))]\n        let tracy_context = None;\n\n        Ok(Self {\n            family,\n            queue,\n            command_pool,\n            tracy_context,\n        })\n    }\n}\n\npub struct VkDeviceInfo {\n    pub pdevice: vk::PhysicalDevice,\n    pub device_name: CString,\n    pub device_type: vk::PhysicalDeviceType,\n    pub device_vendor: Vendor,\n    pub driver_version: DriverVersion,\n    pub limits: vk::PhysicalDeviceLimits,\n    pub drm_node: libc::dev_t,\n    pub graphics_family: u32,\n    pub encode_family: Option<u32>,\n    pub supports_h264: bool,\n    pub supports_h265: bool,\n    pub supports_av1: bool,\n    pub memory_props: vk::PhysicalDeviceMemoryProperties,\n    pub host_visible_mem_type_index: u32,\n    pub host_mem_is_cached: bool,\n    pub selected_extensions: Vec<CString>,\n}\n\nimpl VkDeviceInfo {\n    fn query(instance: &ash::Instance, device: vk::PhysicalDevice) -> Result<Self> {\n        let mut drm_props = vk::PhysicalDeviceDrmPropertiesEXT::default();\n        let mut host_mem_props = vk::PhysicalDeviceExternalMemoryHostPropertiesEXT::default();\n        let mut driver_props = vk::PhysicalDeviceDriverPropertiesKHR::default();\n        let mut props = vk::PhysicalDeviceProperties2::default()\n            .push_next(&mut drm_props)\n            .push_next(&mut host_mem_props)\n            .push_next(&mut driver_props);\n        unsafe { instance.get_physical_device_properties2(device, &mut props) };\n\n        let limits = props.properties.limits;\n        let device_type = props.properties.device_type;\n        let device_name =\n            unsafe { CStr::from_ptr(props.properties.device_name.as_ptr()).to_owned() };\n        let device_vendor = match props.properties.vendor_id {\n            0x1002 => Vendor::Amd,\n            0x10de => Vendor::Nvidia,\n            _ => Vendor::Other,\n        };\n\n        let version = props.properties.driver_version;\n        let driver_version = match driver_props.driver_id {\n            vk::DriverId::MESA_RADV => DriverVersion::MesaRadv {\n                major: vk::api_version_major(version),\n                minor: vk::api_version_minor(version),\n                patch: vk::api_version_patch(version),\n            },\n            vk::DriverId::NVIDIA_PROPRIETARY => DriverVersion::NvidiaProprietary {\n                major: (version >> 22) & 0x3ff,\n                minor: (version >> 14) & 0x0ff,\n            },\n            _ => DriverVersion::Other(\n                CStr::from_bytes_with_nul(&driver_props.driver_info.map(|x| x as u8)[..])\n                    .unwrap_or(c\"unknown\")\n                    .to_str()\n                    .unwrap_or(\"unknown\")\n                    .to_owned(),\n            ),\n        };\n\n        if drm_props.render_major != 226 || drm_props.render_minor < 128 {\n            bail!(\"device {:?} is not a render node\", device_name);\n        }\n\n        let drm_node = libc::makedev(drm_props.render_major as u32, drm_props.render_minor as u32);\n\n        let queue_families = unsafe {\n            instance\n                .get_physical_device_queue_family_properties(device)\n                .into_iter()\n                .collect::<Vec<_>>()\n        };\n\n        let graphics_family = queue_families\n            .iter()\n            .enumerate()\n            .find(|(_, properties)| {\n                properties.queue_flags.contains(vk::QueueFlags::GRAPHICS)\n                    && properties.queue_flags.contains(vk::QueueFlags::COMPUTE)\n            })\n            .map(|(index, _)| index as u32)\n            .to_owned()\n            .ok_or_else(|| anyhow::anyhow!(\"no graphics queue found\"))?;\n\n        let encode_family = queue_families\n            .iter()\n            .enumerate()\n            .find(|(_, properties)| {\n                properties\n                    .queue_flags\n                    .contains(vk::QueueFlags::VIDEO_ENCODE_KHR)\n            })\n            .map(|(index, _)| index as u32);\n\n        let available_extensions = unsafe {\n            instance\n                .enumerate_device_extension_properties(device)\n                .unwrap()\n                .into_iter()\n                .map(|properties| CStr::from_ptr(&properties.extension_name as *const _).to_owned())\n                .collect::<Vec<_>>()\n        };\n\n        let mut selected_extensions = vec![\n            // Push descriptors for compositing.\n            vk::KhrPushDescriptorFn::NAME.to_owned(),\n            // All required for dma-buf import.\n            vk::KhrExternalMemoryFdFn::NAME.to_owned(),\n            vk::KhrExternalSemaphoreFdFn::NAME.to_owned(),\n            vk::ExtExternalMemoryDmaBufFn::NAME.to_owned(),\n            vk::ExtImageDrmFormatModifierFn::NAME.to_owned(),\n            vk::ExtPhysicalDeviceDrmFn::NAME.to_owned(),\n            vk::ExtQueueFamilyForeignFn::NAME.to_owned(),\n        ];\n\n        for ext in selected_extensions.iter() {\n            if !contains_extension(&available_extensions, ext) {\n                bail!(\"extension {:?} not available\", ext);\n            }\n        }\n\n        let ext_video_queue = vk::KhrVideoQueueFn::NAME;\n        let ext_video_encode_queue = vk::KhrVideoEncodeQueueFn::NAME;\n\n        // TODO: ash hasn't picked up the promoted names yet.\n        let ext_h264 = cstr!(\"VK_KHR_video_encode_h264\");\n        let ext_h265 = cstr!(\"VK_KHR_video_encode_h265\");\n\n        // This doesn't exist yet.\n        let ext_av1 = cstr!(\"VK_EXT_video_encode_av1\");\n\n        let mut supports_h264 = false;\n        let mut supports_h265 = false;\n        let mut supports_av1 = false;\n        if encode_family.is_some()\n            && contains_extension(&available_extensions, ext_video_queue)\n            && contains_extension(&available_extensions, ext_video_encode_queue)\n        {\n            selected_extensions.push(ext_video_encode_queue.to_owned());\n            selected_extensions.push(ext_video_queue.to_owned());\n\n            if contains_extension(&available_extensions, ext_h264) {\n                supports_h264 = true;\n                selected_extensions.push(ext_h264.to_owned());\n            }\n\n            if contains_extension(&available_extensions, ext_h265) {\n                supports_h265 = true;\n                selected_extensions.push(ext_h265.to_owned());\n            }\n            if contains_extension(&available_extensions, ext_av1) {\n                supports_av1 = true;\n                selected_extensions.push(ext_av1.to_owned());\n            }\n        }\n\n        if !supports_av1 && !supports_h265 && !supports_h264 {\n            bail!(\"hardware encode extensions not available\");\n        }\n\n        // We want HOST_CACHED | HOST_COHERENT, but we can make do with just\n        // HOST_VISIBLE.\n        let memory_props = unsafe { instance.get_physical_device_memory_properties(device) };\n\n        let (host_visible_mem_type_index, host_mem_is_cached) = {\n            let mut cached = true;\n            let mut idx = select_memory_type(\n                &memory_props,\n                vk::MemoryPropertyFlags::HOST_VISIBLE\n                    | vk::MemoryPropertyFlags::HOST_CACHED\n                    | vk::MemoryPropertyFlags::HOST_COHERENT,\n                None,\n            );\n\n            if idx.is_none() {\n                idx = select_memory_type(\n                    &memory_props,\n                    vk::MemoryPropertyFlags::HOST_VISIBLE | vk::MemoryPropertyFlags::HOST_COHERENT,\n                    None,\n                );\n\n                if idx.is_none() {\n                    bail!(\"no host visible memory type found\");\n                }\n\n                cached = false;\n            }\n\n            (idx.unwrap(), cached)\n        };\n\n        // Make sure we have the features needed for dmabuf import.\n        let mut semaphore_props = vk::ExternalSemaphoreProperties::default();\n        unsafe {\n            let info = vk::PhysicalDeviceExternalSemaphoreInfo::default()\n                .handle_type(vk::ExternalSemaphoreHandleTypeFlags::SYNC_FD);\n\n            instance.get_physical_device_external_semaphore_properties(\n                device,\n                &info,\n                &mut semaphore_props,\n            );\n        }\n\n        if !semaphore_props\n            .external_semaphore_features\n            .contains(vk::ExternalSemaphoreFeatureFlags::IMPORTABLE)\n        {\n            bail!(\"no support found for importable semaphores\");\n        }\n\n        Ok(Self {\n            pdevice: device,\n            device_name,\n            device_type,\n            device_vendor,\n            driver_version,\n            limits,\n            drm_node,\n            graphics_family,\n            encode_family,\n            supports_h264,\n            supports_h265,\n            supports_av1,\n            memory_props,\n            host_visible_mem_type_index,\n            host_mem_is_cached,\n            selected_extensions,\n        })\n    }\n}\n\nimpl VkContext {\n    pub fn new(enable_debug: bool) -> Result<Self> {\n        // Try to enable RADV's video support.\n        std::env::set_var(\n            \"RADV_PERFTEST\",\n            std::env::var(\"RADV_PERFTEST\").unwrap_or(\"video_encode,video_decode\".to_string()),\n        );\n\n        let entry = unsafe { ash::Entry::load().context(\"failed to load vulkan libraries!\") }?;\n        debug!(\"creating vulkan instance\");\n\n        let (major, minor) = match unsafe { entry.try_enumerate_instance_version()? } {\n            // Vulkan 1.1+\n            Some(version) => (\n                vk::api_version_major(version),\n                vk::api_version_minor(version),\n            ),\n            // Vulkan 1.0\n            None => (1, 0),\n        };\n\n        if major < 1 || (major == 1 && minor < 3) {\n            return Err(anyhow::anyhow!(\"vulkan 1.3 or higher is required\"));\n        }\n\n        let app_info = vk::ApplicationInfo::default()\n            .application_name(cstr!(\"Magic Mirror\"))\n            .application_version(vk::make_api_version(0, 0, 1, 0))\n            .engine_name(cstr!(\"No Engine\"))\n            .engine_version(vk::make_api_version(0, 0, 1, 0))\n            .api_version(vk::make_api_version(0, major, minor, 0));\n\n        let available_extensions = unsafe {\n            entry\n                .enumerate_instance_extension_properties(None)?\n                .into_iter()\n                .map(|properties| CStr::from_ptr(&properties.extension_name as *const _).to_owned())\n                .collect::<Vec<_>>()\n        };\n\n        let mut extensions = Vec::new();\n        let mut layers = Vec::new();\n\n        if enable_debug {\n            if !available_extensions\n                .iter()\n                .any(|ext| ext.as_c_str() == ext::DebugUtils::NAME)\n            {\n                return Err(anyhow::anyhow!(\n                    \"debug utils extension requested, but not available\"\n                ));\n            }\n\n            warn!(\"vulkan validation layers enabled!\");\n            extensions.push(ext::DebugUtils::NAME.as_ptr());\n\n            unsafe {\n                let validation_layer = cstr!(\"VK_LAYER_KHRONOS_validation\");\n                if entry\n                    .enumerate_instance_layer_properties()?\n                    .into_iter()\n                    .map(|properties| CStr::from_ptr(&properties.layer_name as *const _))\n                    .any(|layer| layer == validation_layer)\n                {\n                    layers.push(validation_layer.as_ptr());\n                } else {\n                    warn!(\"validation layers requested, but not available!\")\n                }\n            }\n        }\n\n        let instance_create_info = vk::InstanceCreateInfo::default()\n            .application_info(&app_info)\n            .enabled_layer_names(&layers)\n            .enabled_extension_names(&extensions);\n\n        let instance = unsafe { entry.create_instance(&instance_create_info, None)? };\n\n        // Enable validation layers and a debugging callback, if requested.\n        let debug_utils = if enable_debug {\n            let debug_utils = ext::DebugUtils::new(&entry, &instance);\n\n            let create_info = vk::DebugUtilsMessengerCreateInfoEXT::default()\n                .message_severity(\n                    vk::DebugUtilsMessageSeverityFlagsEXT::WARNING\n                        | vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE\n                        | vk::DebugUtilsMessageSeverityFlagsEXT::INFO\n                        | vk::DebugUtilsMessageSeverityFlagsEXT::ERROR,\n                )\n                .message_type(\n                    vk::DebugUtilsMessageTypeFlagsEXT::GENERAL\n                        | vk::DebugUtilsMessageTypeFlagsEXT::PERFORMANCE\n                        | vk::DebugUtilsMessageTypeFlagsEXT::VALIDATION,\n                )\n                .pfn_user_callback(Some(vulkan_debug_utils_callback));\n\n            let messenger =\n                unsafe { debug_utils.create_debug_utils_messenger(&create_info, None) }?;\n\n            Some(VkDebugContext {\n                debug: debug_utils,\n                messenger,\n            })\n        } else {\n            None\n        };\n\n        // Select a device based on encoding support.\n        let devices = unsafe { instance.enumerate_physical_devices()? };\n        let mut devices = devices\n            .into_iter()\n            .enumerate()\n            .flat_map(|(index, dev)| match VkDeviceInfo::query(&instance, dev) {\n                Ok(device) => Some((index as u32, device)),\n                Err(err) => {\n                    let device_name = unsafe {\n                        CStr::from_ptr(\n                            instance\n                                .get_physical_device_properties(dev)\n                                .device_name\n                                .as_ptr(),\n                        )\n                        .to_owned()\n                    };\n\n                    info!(\"gpu {device_name:?} ineligible: {err}\");\n                    None\n                }\n            })\n            .collect::<Vec<_>>();\n\n        if devices.is_empty() {\n            return Err(anyhow::anyhow!(\"no suitable gpu found\"));\n        }\n\n        devices.sort_by_key(|(_, dev)| {\n            let mut score = match dev.device_type {\n                vk::PhysicalDeviceType::DISCRETE_GPU => 0,\n                vk::PhysicalDeviceType::INTEGRATED_GPU => 10,\n                _ => 20,\n            };\n\n            score += dev.encode_family.is_none() as u32;\n            score += !dev.supports_h264 as u32;\n            score += !dev.supports_h265 as u32;\n            score += !dev.supports_av1 as u32;\n            score\n        });\n\n        let (index, device_info) = devices.remove(0);\n        info!(\"selected gpu: {:?} ({index})\", device_info.device_name);\n\n        let drm_device = drm::DrmDevice::new(device_info.drm_node)?;\n\n        let device = {\n            let queue_priorities = &[1.0];\n            let mut queue_indices = Vec::new();\n            queue_indices.push(device_info.graphics_family);\n            if let Some(idx) = device_info.encode_family {\n                queue_indices.push(idx);\n            }\n\n            queue_indices.dedup();\n            let queue_create_infos = queue_indices\n                .iter()\n                .map(|&index| {\n                    vk::DeviceQueueCreateInfo::default()\n                        .queue_family_index(index)\n                        .queue_priorities(queue_priorities)\n                })\n                .collect::<Vec<_>>();\n\n            let mut enabled_1_1_features =\n                vk::PhysicalDeviceVulkan11Features::default().sampler_ycbcr_conversion(true);\n\n            let mut enabled_1_2_features = vk::PhysicalDeviceVulkan12Features::default()\n                .timeline_semaphore(true)\n                .host_query_reset(true);\n\n            let mut enabled_1_3_features = vk::PhysicalDeviceVulkan13Features::default()\n                .dynamic_rendering(true)\n                .synchronization2(true);\n\n            let extension_names = device_info\n                .selected_extensions\n                .iter()\n                .map(|v| v.as_c_str().as_ptr())\n                .collect::<Vec<_>>();\n            let device_create_info = vk::DeviceCreateInfo::default()\n                .queue_create_infos(&queue_create_infos)\n                .enabled_extension_names(&extension_names)\n                .push_next(&mut enabled_1_1_features)\n                .push_next(&mut enabled_1_2_features)\n                .push_next(&mut enabled_1_3_features);\n\n            unsafe { instance.create_device(device_info.pdevice, &device_create_info, None)? }\n        };\n\n        let qf_props =\n            unsafe { instance.get_physical_device_queue_family_properties(device_info.pdevice) };\n\n        let graphics_queue = VkQueue::new(\n            &device,\n            &device_info,\n            qf_props[device_info.graphics_family as usize],\n            device_info.graphics_family,\n            \"graphics\",\n        )?;\n\n        let encode_queue = if device_info.encode_family.is_some() {\n            info!(\n                \"hardware encoding support: (h264: {}, h265: {}, av1: {})\",\n                device_info.supports_h264, device_info.supports_h265, device_info.supports_av1\n            );\n\n            Some(VkQueue::new(\n                &device,\n                &device_info,\n                qf_props[device_info.encode_family.unwrap() as usize],\n                device_info.encode_family.unwrap(),\n                \"encode\",\n            )?)\n        } else {\n            warn!(\"no hardware encoding support found!\");\n            None\n        };\n\n        if !device_info.host_mem_is_cached {\n            warn!(\"no cache-coherent memory type found on device!\");\n        }\n\n        let external_memory_api = khr::ExternalMemoryFd::new(&instance, &device);\n        let external_semaphore_api = khr::ExternalSemaphoreFd::new(&instance, &device);\n\n        let video_apis = if device_info.encode_family.is_some() {\n            let video_queue = VideoQueueExt::new(&entry, &instance, &device);\n            let video_encode_queue = VideoEncodeQueueExt::new(&entry, &instance, &device);\n\n            Some((video_queue, video_encode_queue))\n        } else {\n            None\n        };\n\n        let push_ds_api = khr::PushDescriptor::new(&instance, &device);\n\n        let descriptor_pool = {\n            let pool_sizes = [\n                vk::DescriptorPoolSize::default()\n                    .ty(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n                    .descriptor_count(1024),\n                vk::DescriptorPoolSize::default()\n                    .ty(vk::DescriptorType::STORAGE_IMAGE)\n                    .descriptor_count(1024),\n            ];\n\n            let create_info = vk::DescriptorPoolCreateInfo::default()\n                .pool_sizes(&pool_sizes)\n                .flags(vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET)\n                .max_sets(1024);\n\n            unsafe { device.create_descriptor_pool(&create_info, None)? }\n        };\n\n        Ok(Self {\n            entry,\n            push_ds_api,\n            external_memory_api,\n            external_semaphore_api,\n            video_apis,\n            instance,\n            device,\n            device_info,\n            drm_device,\n            graphics_queue,\n            encode_queue,\n            descriptor_pool,\n            debug: debug_utils,\n        })\n    }\n}\n\nunsafe extern \"system\" fn vulkan_debug_utils_callback(\n    message_severity: vk::DebugUtilsMessageSeverityFlagsEXT,\n    message_type: vk::DebugUtilsMessageTypeFlagsEXT,\n    p_callback_data: *const vk::DebugUtilsMessengerCallbackDataEXT,\n    _userdata: *mut c_void,\n) -> vk::Bool32 {\n    let _ = std::panic::catch_unwind(|| {\n        let message = unsafe { CStr::from_ptr((*p_callback_data).p_message) }.to_string_lossy();\n        let ty = format!(\"{:?}\", message_type).to_lowercase();\n\n        // TODO: these should all be debug.\n        match message_severity {\n            vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE => {\n                tracing::trace!(ty, \"{}\", message)\n            }\n            vk::DebugUtilsMessageSeverityFlagsEXT::INFO => {\n                debug!(target: \"mmserver::vulkan::driver\", ty, \"{}\", message)\n            }\n            vk::DebugUtilsMessageSeverityFlagsEXT::WARNING => {\n                debug!(target: \"mmserver::vulkan::driver\", ty, \"{}\", message)\n            }\n            vk::DebugUtilsMessageSeverityFlagsEXT::ERROR => {\n                error!(target: \"mmserver::vulkan::driver\", ty, \"{}\", message)\n            }\n            _ => (),\n        }\n    });\n\n    // Must always return false.\n    vk::FALSE\n}\n\nimpl Drop for VkContext {\n    fn drop(&mut self) {\n        debug!(\"destroying vulkan instance\");\n\n        unsafe {\n            if let Some(debug) = self.debug.as_ref() {\n                debug\n                    .debug\n                    .destroy_debug_utils_messenger(debug.messenger, None);\n            }\n\n            self.device\n                .destroy_command_pool(self.graphics_queue.command_pool, None);\n\n            if let Some(encode_queue) = self.encode_queue.as_ref() {\n                self.device\n                    .destroy_command_pool(encode_queue.command_pool, None);\n            }\n\n            self.device\n                .destroy_descriptor_pool(self.descriptor_pool, None);\n            self.device.destroy_device(None);\n            self.instance.destroy_instance(None);\n        }\n    }\n}\n\n#[cfg(feature = \"tracy\")]\nfn init_tracy_context(\n    device: &ash::Device,\n    pdevice: &VkDeviceInfo,\n    queue: vk::Queue,\n    command_pool: vk::CommandPool,\n    client: tracy_client::Client,\n    name: &str,\n) -> anyhow::Result<tracy_client::GpuContext> {\n    // Query the timestamp once to calibrate the clocks.\n    let cb = allocate_command_buffer(device, command_pool)?;\n\n    unsafe {\n        device.reset_command_buffer(cb, vk::CommandBufferResetFlags::empty())?;\n\n        let query_pool = create_timestamp_query_pool(device, 1)?;\n\n        let fence = device.create_fence(&vk::FenceCreateInfo::default(), None)?;\n\n        // Begin the command buffer.\n        device.begin_command_buffer(\n            cb,\n            &vk::CommandBufferBeginInfo::default()\n                .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT),\n        )?;\n\n        // Write a timestamp.\n        query_pool.cmd_reset(device, cb);\n        device.cmd_write_timestamp(\n            cb,\n            vk::PipelineStageFlags::BOTTOM_OF_PIPE,\n            query_pool.pool,\n            0,\n        );\n\n        // Submit.\n        device.end_command_buffer(cb)?;\n\n        device.queue_submit(\n            queue,\n            &[vk::SubmitInfo::default().command_buffers(&[cb])],\n            fence,\n        )?;\n\n        // Wait for the fence, fetch the timestamp.\n        device.wait_for_fences(&[fence], true, u64::MAX)?;\n        let ts = query_pool.fetch_results(device)?[0];\n\n        let context = client.new_gpu_context(\n            Some(name),\n            tracy_client::GpuContextType::Vulkan,\n            ts as i64,\n            pdevice.limits.timestamp_period,\n        )?;\n\n        // Cleanup.\n        device.free_command_buffers(command_pool, &[cb]);\n        device.destroy_fence(fence, None);\n        device.destroy_query_pool(query_pool.pool, None);\n\n        Ok(context)\n    }\n}\n\npub fn select_memory_type(\n    props: &vk::PhysicalDeviceMemoryProperties,\n    flags: vk::MemoryPropertyFlags,\n    memory_type_bits: Option<u32>,\n) -> Option<u32> {\n    for i in 0..props.memory_type_count {\n        if let Some(mask) = memory_type_bits {\n            if mask & (1 << i) == 0 {\n                continue;\n            }\n        }\n\n        if flags.is_empty()\n            || props.memory_types[i as usize]\n                .property_flags\n                .contains(flags)\n        {\n            return Some(i);\n        }\n    }\n\n    None\n}\n\npub struct VkImage {\n    pub image: vk::Image,\n    pub view: vk::ImageView,\n    pub memory: vk::DeviceMemory,\n    pub format: vk::Format,\n    pub width: u32,\n    pub height: u32,\n    vk: Arc<VkContext>,\n}\n\nimpl VkImage {\n    pub fn new(\n        vk: Arc<VkContext>,\n        format: vk::Format,\n        ignore_alpha: bool,\n        width: u32,\n        height: u32,\n        usage: vk::ImageUsageFlags,\n        sharing_mode: vk::SharingMode,\n        flags: vk::ImageCreateFlags,\n    ) -> anyhow::Result<Self> {\n        let image = {\n            let create_info = vk::ImageCreateInfo::default()\n                .image_type(vk::ImageType::TYPE_2D)\n                .format(format)\n                .extent(vk::Extent3D {\n                    width,\n                    height,\n                    depth: 1,\n                })\n                .mip_levels(1)\n                .array_layers(1)\n                .samples(vk::SampleCountFlags::TYPE_1)\n                .tiling(vk::ImageTiling::OPTIMAL)\n                .usage(usage)\n                .sharing_mode(sharing_mode)\n                .initial_layout(vk::ImageLayout::UNDEFINED)\n                .flags(flags);\n\n            unsafe {\n                vk.device\n                    .create_image(&create_info, None)\n                    .context(\"VkCreateImage\")?\n            }\n        };\n\n        let memory =\n            unsafe { bind_memory_for_image(&vk.device, &vk.device_info.memory_props, image)? };\n\n        let view = unsafe { create_image_view(&vk.device, image, format, ignore_alpha)? };\n\n        Ok(Self {\n            image,\n            view,\n            memory,\n            format,\n            width,\n            height,\n            vk,\n        })\n    }\n\n    pub fn wrap(\n        vk: Arc<VkContext>,\n        image: vk::Image,\n        view: vk::ImageView,\n        memory: vk::DeviceMemory,\n        format: vk::Format,\n        width: u32,\n        height: u32,\n    ) -> Self {\n        Self {\n            image,\n            view,\n            memory,\n            format,\n            width,\n            height,\n            vk,\n        }\n    }\n\n    pub fn extent(&self) -> vk::Extent2D {\n        vk::Extent2D {\n            width: self.width,\n            height: self.height,\n        }\n    }\n\n    pub fn rect(&self) -> vk::Rect2D {\n        vk::Rect2D {\n            offset: vk::Offset2D { x: 0, y: 0 },\n            extent: self.extent(),\n        }\n    }\n}\n\nimpl Drop for VkImage {\n    fn drop(&mut self) {\n        unsafe {\n            self.vk.device.destroy_image_view(self.view, None);\n            self.vk.device.destroy_image(self.image, None);\n            self.vk.device.free_memory(self.memory, None);\n        }\n    }\n}\n\npub unsafe fn bind_memory_for_image(\n    device: &ash::Device,\n    props: &vk::PhysicalDeviceMemoryProperties,\n    image: vk::Image,\n) -> anyhow::Result<vk::DeviceMemory> {\n    let image_memory_req = unsafe { device.get_image_memory_requirements(image) };\n\n    let mem_type_index = select_memory_type(\n        props,\n        vk::MemoryPropertyFlags::DEVICE_LOCAL,\n        Some(image_memory_req.memory_type_bits),\n    );\n\n    if mem_type_index.is_none() {\n        bail!(\n            \"no appropriate memory type found for reqs: {:?}\",\n            image_memory_req\n        );\n    }\n\n    let memory = {\n        let image_allocate_info = vk::MemoryAllocateInfo::default()\n            .allocation_size(image_memory_req.size)\n            .memory_type_index(mem_type_index.unwrap());\n\n        unsafe {\n            device\n                .allocate_memory(&image_allocate_info, None)\n                .context(\"VkAllocateMemory\")?\n        }\n    };\n\n    unsafe {\n        device\n            .bind_image_memory(image, memory, 0)\n            .context(\"VkBindImageMemory\")?;\n    }\n\n    Ok(memory)\n}\n\npub unsafe fn create_image_view(\n    device: &ash::Device,\n    image: vk::Image,\n    format: vk::Format,\n    ignore_alpha: bool,\n) -> anyhow::Result<vk::ImageView> {\n    let alpha_swizzle = if ignore_alpha {\n        vk::ComponentSwizzle::ONE\n    } else {\n        vk::ComponentSwizzle::IDENTITY\n    };\n\n    let create_info = vk::ImageViewCreateInfo::default()\n        .image(image)\n        .view_type(vk::ImageViewType::TYPE_2D)\n        .format(format)\n        .components(vk::ComponentMapping {\n            r: vk::ComponentSwizzle::IDENTITY,\n            g: vk::ComponentSwizzle::IDENTITY,\n            b: vk::ComponentSwizzle::IDENTITY,\n            a: alpha_swizzle,\n        })\n        .subresource_range(vk::ImageSubresourceRange {\n            aspect_mask: vk::ImageAspectFlags::COLOR,\n            base_mip_level: 0,\n            level_count: vk::REMAINING_MIP_LEVELS,\n            base_array_layer: 0,\n            layer_count: vk::REMAINING_ARRAY_LAYERS,\n        });\n\n    device\n        .create_image_view(&create_info, None)\n        .context(\"VkCreateImageView\")\n}\n\npub struct VkHostBuffer {\n    pub buffer: vk::Buffer,\n    pub memory: vk::DeviceMemory,\n    pub access: *mut c_void,\n    pub len: usize,\n    vk: Arc<VkContext>,\n}\n\nunsafe impl Send for VkHostBuffer {}\n\nimpl VkHostBuffer {\n    pub fn new(\n        vk: Arc<VkContext>,\n        mem_type: u32,\n        usage: vk::BufferUsageFlags,\n        size: usize,\n    ) -> anyhow::Result<Self> {\n        let buffer = {\n            let create_info = vk::BufferCreateInfo::default()\n                .size(size as u64)\n                .usage(usage)\n                .sharing_mode(vk::SharingMode::EXCLUSIVE);\n\n            unsafe {\n                vk.device\n                    .create_buffer(&create_info, None)\n                    .context(\"VkCreateBuffer\")?\n            }\n        };\n\n        let requirements = unsafe { vk.device.get_buffer_memory_requirements(buffer) };\n\n        let alloc_info = vk::MemoryAllocateInfo::default()\n            .allocation_size(requirements.size)\n            .memory_type_index(mem_type);\n\n        let memory = unsafe {\n            vk.device\n                .allocate_memory(&alloc_info, None)\n                .context(\"VkAllocateMemory\")?\n        };\n\n        unsafe {\n            vk.device\n                .bind_buffer_memory(buffer, memory, 0)\n                .context(\"vkBindBufferMemory\")?\n        };\n\n        let access = {\n            unsafe {\n                vk.device\n                    .map_memory(memory, 0, vk::WHOLE_SIZE, vk::MemoryMapFlags::empty())\n                    .context(\"VkMapMemory\")?\n            }\n        };\n\n        Ok(VkHostBuffer {\n            buffer,\n            memory,\n            access,\n            len: size,\n            vk,\n        })\n    }\n\n    pub(crate) fn wrap(\n        vk: Arc<VkContext>,\n        buf: vk::Buffer,\n        memory: vk::DeviceMemory,\n        buffer_size: usize,\n    ) -> Self {\n        let access = unsafe {\n            vk.device\n                .map_memory(memory, 0, vk::WHOLE_SIZE, vk::MemoryMapFlags::empty())\n                .context(\"failed to map buffer memory\")\n                .unwrap()\n        };\n\n        Self {\n            buffer: buf,\n            memory,\n            access,\n            len: buffer_size,\n            vk,\n        }\n    }\n\n    pub fn copy_from_slice(&mut self, src: &[u8]) {\n        let dst = unsafe { std::slice::from_raw_parts_mut(self.access as *mut u8, self.len) };\n        dst.copy_from_slice(src);\n    }\n}\n\nimpl Drop for VkHostBuffer {\n    fn drop(&mut self) {\n        unsafe {\n            self.vk.device.unmap_memory(self.memory);\n            self.vk.device.destroy_buffer(self.buffer, None);\n            self.vk.device.free_memory(self.memory, None);\n        }\n    }\n}\n\npub struct VkTimestampQueryPool {\n    pub pool: vk::QueryPool,\n    num_timestamps: u32,\n}\n\nimpl VkTimestampQueryPool {\n    pub unsafe fn cmd_reset(&self, device: &ash::Device, command_buffer: vk::CommandBuffer) {\n        device.cmd_reset_query_pool(command_buffer, self.pool, 0, self.num_timestamps);\n    }\n\n    pub fn fetch_results(&self, device: &ash::Device) -> anyhow::Result<Vec<i64>> {\n        let mut results = vec![0_i64; self.num_timestamps as usize];\n        unsafe {\n            device\n                .get_query_pool_results(self.pool, 0, &mut results, vk::QueryResultFlags::WAIT)\n                .context(\"vkGetQueryPoolResults\")?;\n        }\n\n        for v in &results {\n            assert!(v > &0_i64, \"invalid query pool results\")\n        }\n\n        Ok(results)\n    }\n}\n\npub fn create_timestamp_query_pool(\n    device: &ash::Device,\n    num_timestamps: u32,\n) -> anyhow::Result<VkTimestampQueryPool> {\n    let create_info = vk::QueryPoolCreateInfo::default()\n        .query_type(vk::QueryType::TIMESTAMP)\n        .query_count(num_timestamps);\n\n    let pool = unsafe {\n        device\n            .create_query_pool(&create_info, None)\n            .context(\"vkCreateQueryPool\")?\n    };\n\n    Ok(VkTimestampQueryPool {\n        pool,\n        num_timestamps,\n    })\n}\n\npub fn load_shader(device: &ash::Device, bytes: &[u8]) -> anyhow::Result<vk::ShaderModule> {\n    let code = ash::util::read_spv(&mut std::io::Cursor::new(bytes))?;\n    let create_info = vk::ShaderModuleCreateInfo::default().code(&code);\n\n    let shader = unsafe { device.create_shader_module(&create_info, None)? };\n\n    Ok(shader)\n}\n\npub fn allocate_command_buffer(\n    device: &ash::Device,\n    pool: vk::CommandPool,\n) -> anyhow::Result<vk::CommandBuffer> {\n    let create_info = vk::CommandBufferAllocateInfo::default()\n        .level(vk::CommandBufferLevel::PRIMARY)\n        .command_pool(pool)\n        .command_buffer_count(1);\n\n    let cb = unsafe {\n        device\n            .allocate_command_buffers(&create_info)\n            .context(\"failed to allocate render command buffer\")?\n            .pop()\n            .unwrap()\n    };\n\n    Ok(cb)\n}\n\n#[instrument(level = \"trace\", skip_all)]\npub unsafe fn begin_command_buffer(\n    device: &ash::Device,\n    cb: vk::CommandBuffer,\n) -> anyhow::Result<()> {\n    device.reset_command_buffer(cb, vk::CommandBufferResetFlags::empty())?;\n    device.begin_command_buffer(\n        cb,\n        &vk::CommandBufferBeginInfo::default().flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT),\n    )?;\n\n    Ok(())\n}\n\npub fn insert_image_barrier(\n    device: &ash::Device,\n    cb: vk::CommandBuffer,\n    image: vk::Image,\n    queue_transfer: Option<(u32, u32)>,\n    old_layout: vk::ImageLayout,\n    new_layout: vk::ImageLayout,\n    src_stage: vk::PipelineStageFlags2,\n    src_access: vk::AccessFlags2,\n    dst_stage: vk::PipelineStageFlags2,\n    dst_access: vk::AccessFlags2,\n) {\n    let (src_family, dst_family) =\n        queue_transfer.unwrap_or((vk::QUEUE_FAMILY_IGNORED, vk::QUEUE_FAMILY_IGNORED));\n\n    let barriers = [vk::ImageMemoryBarrier2::default()\n        .src_stage_mask(src_stage)\n        .src_access_mask(src_access)\n        .dst_stage_mask(dst_stage)\n        .dst_access_mask(dst_access)\n        .old_layout(old_layout)\n        .new_layout(new_layout)\n        .src_queue_family_index(src_family)\n        .dst_queue_family_index(dst_family)\n        .image(image)\n        .subresource_range(vk::ImageSubresourceRange {\n            aspect_mask: vk::ImageAspectFlags::COLOR,\n            base_mip_level: 0,\n            level_count: vk::REMAINING_MIP_LEVELS,\n            base_array_layer: 0,\n            layer_count: vk::REMAINING_ARRAY_LAYERS,\n        })];\n\n    unsafe {\n        device.cmd_pipeline_barrier2(\n            cb,\n            &vk::DependencyInfo::default().image_memory_barriers(&barriers),\n        )\n    };\n}\n\nfn contains_extension(list: &[CString], str: &CStr) -> bool {\n    list.iter().any(|v| v.as_c_str() == str)\n}\n"
  },
  {
    "path": "mm-server/src/waking_sender.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: BUSL-1.1\n\nuse std::sync::Arc;\n\npub struct WakingSender<T> {\n    waker: Arc<mio::Waker>,\n    sender: crossbeam_channel::Sender<T>,\n}\n\nimpl<T> Clone for WakingSender<T> {\n    fn clone(&self) -> Self {\n        Self {\n            waker: self.waker.clone(),\n            sender: self.sender.clone(),\n        }\n    }\n}\n\nimpl<T> WakingSender<T> {\n    pub fn new(waker: Arc<mio::Waker>, sender: crossbeam_channel::Sender<T>) -> Self {\n        assert!(\n            !sender.is_full(),\n            \"WakingSender must be created with a non-zero capacity channel\"\n        );\n\n        Self { waker, sender }\n    }\n\n    pub fn send(&self, msg: T) -> Result<(), crossbeam_channel::SendError<T>> {\n        self.sender.send(msg)?;\n        self.waker.wake().unwrap();\n        Ok(())\n    }\n\n    pub fn try_send(&self, msg: T) -> Result<(), crossbeam_channel::TrySendError<T>> {\n        self.sender.try_send(msg)?;\n        self.waker.wake().unwrap();\n        Ok(())\n    }\n}\n\npub struct WakingOneshot<T> {\n    waker: Arc<mio::Waker>,\n    sender: oneshot::Sender<T>,\n}\n\nimpl<T> WakingOneshot<T> {\n    pub fn new(waker: Arc<mio::Waker>, sender: oneshot::Sender<T>) -> Self {\n        Self { waker, sender }\n    }\n\n    pub fn send(self, msg: T) -> Result<(), oneshot::SendError<T>> {\n        self.sender.send(msg)?;\n        self.waker.wake().unwrap();\n        Ok(())\n    }\n}\n"
  },
  {
    "path": "mmserver.default.toml",
    "content": "## Copyright 2024 Colin Marc <hi@colinmarc.com>\n##\n## SPDX-License-Identifier: MIT\n##\n## This file specifies the configuration defaults for the magic mirror server. If\n## a line is commented out, the default is to leave the value unset (and the\n## setting is not required, unless stated otherwise).\n##\n## To determine the final config, the server merges the values in this file with\n## the provided configuration file (by default, /etc/magic-mirror/mmserver.toml).\n##\n## All configuration files may be json instead of toml.\n\n## ***-----------------***\n## *** Global Settings ***\n## ***-----------------***\n\n## If set, this special setting instructs the server to load additional application\n## configurations from the given files or directories. Each file (or file in in\n## the directories) should be named using the scheme `<NAME>.toml` or\n## `<NAME>.json`, where `<NAME>` is the name of the application, following the\n## rules outlined below. The contents should be identical to the configuration\n## for an individual app.\n##\n# include_apps = [\"/etc/magic-mirror/apps.d\"]\n\n## This determines where the server stores application data, i.e. the $HOME for\n## containerized applications. If not set, then $XDG_DATA_HOME/mmserver is used,\n## or $HOME/.local/share/mmserver if $XDG_DATA_HOME is not set.\n##\n## If you're running magic-mirror as a permanent daemon, you should set this to\n## something like /var/lib/magic-mirror.\n##\n# data_home = \"/var/lib/magic-mirror\"\n\n## ***-----------------***\n## *** Server Settings ***\n## ***-----------------***\n##\n## This section contains configuration options for the network server.\n\n[server]\n\n## Where the server should listen for incoming connections. IPv6 addresses are\n## supported. Use `0.0.0.0` or `[::]` to listen on all available interfaces.\nbind = \"localhost:9599\"\n\n## If set, `bind` will be ignored, and the server will instead listen for\n## incoming connections on the socket specified by the LISTEN_FDS environment\n## variable. See the systemd documentation on \"socket activation\", here:\n## <https://www.freedesktop.org/software/systemd/man/latest/systemd.socket.html>\nbind_systemd = false\n\n## Used for TLS. Both are required unless the host portion of the bind address\n## resolves to a private address (as defined by RFCs 1918, 4193, and 6598) or\n## otherwise not routable, for example `127.0.0.1`, `192.168.24.25`, or\n## `fd12:3456:789a:1::1`.\n# tls_key = \"/path/to/tls.key\"\n# tls_cert = \"/path/to/tls.cert\"\n\n## The number of threads to spawn for handling incoming requests.\nworker_threads = 8\n\n## The maximum number of concurrent connections the server will accept. Use `inf`\n## to specify no limit.\nmax_connections = 4\n\n## Whether to use mDNS to allow clients to discover the server.\nmdns = true\n\n## Determines the FEC (forward error correction) ratio to use for each video\n## layer. For example, an array of values ilke `[0.20, 0.10, 0.05]` would use\n## 0.20 for the base layer, 0.10 for the second layer, etc. If a layer is not\n## covered because the array isn't long enough, the FEC ratio for that layer\n## defaults to 0.0. Use an empty array to disable FEC altogether.\n##\n## If hierarchical coding is not in use, then only the first value applies to\n## all video frames.\nvideo_fec_ratios = [0.15]\n\n## The hostname to advertise over mDNS. Defaults to `\"$(uname -n).local.` if left\n## unset, or ignored if `mdns` is `false`.\n# mdns_hostname = \"mycomputer.local.\"\n\n## The instance name to advertise over mDNS. Defaults to the unqualified value of\n## `mdns_hostname`, converted to uppercase.\n# mdns_instance_name = \"MYCOMPUTER\"\n\n## ***-------------------------***\n## *** Configured Applications ***\n## ***-------------------------***\n##\n## Each application you want to stream must be configured in advance, with each\n## application as its own section. Applications can, alternatively, be\n## configured as individual files. See the documentation for `include_apps`\n## above for more information. At least one application must always be\n## configured.\n##\n## App names must be unique and only contain characters in the set `[a-z0-9-_]`.\n## The section is structured as a dictionary, with the key as the application\n## name.\n##\n## An example application configuration follows. (Note that unlike the rest of\n## this file, this application is not included in the default configuration.)\n# [apps.steam-big-picture]\n\n## A short name for the app.\n# description = \"Steam\"\n\n## The command to run. Must be in `$PATH` or absolute.\n# command = [\"steam\", \"-gamepadui\"]\n\n## Key/value pairs to set in the environment when running the command.\n# environment = { \"FOO\" = \"bar\" }\n\n## Configure a \"path\" for the application. Clients can use this to group apps\n## into folders. This has nothing to do with the local filesystem. Paths should\n## use unix path separators. They may include characters in the set\n## `[A-Za-z0-9-_ ]` (including spaces).\n# app_path = \"My Games/Puzzle Games\"\n\n## Add a header image to the app, for displaying in clients. The image must be a\n## PNG file and less than 1mb. Any aspect ratio is permitted, but roughly 2:1\n## with a transparent background will work best.\n# header_image = \"/path/to/image.png\"\n\n## Enable XWayland support for this application. This is required for any\n## applications that are built for the legacy X11 windowing system, such as Steam.\n##\n## If unset, defaults to `default_app_settings.xwayland`.\n# xwayland = true\n\n## Force the app to run at 1x. This is useful for applications where you know in\n## advance they don't support any UI scaling, for example any application run\n## through XWayland. This setting will ensure that the app always renders at the\n## full session resolution, but may result in small font sizes or other UI\n## elements.\n##\n## If unset, defaults to `default_app_settings.force_1x_scale`.\n# force_1x_scale = false\n\n## How long to leave the session running without any client attached to it, in\n## seconds. Use the value `inf` to specify no timeout.\n# session_timeout = 600\n\n## Isolate the home directory. If set, the application will see a clean,\n## sandboxed `$HOME` (and `/home/$(whoami)`), rather than the system-wide one.\n## This home directory is saved between runs of the app to\n## `<data_home>/homes/<shared_home_name>`.\n##\n## If unset, defaults to `default_app_settings.isolate_home`.\n# isolate_home = true\n\n## If `isolate_home` is set to true, this sets a name for the home directory,\n## can be shared between apps. For example, multiple apps with this option set\n## to 'myhome' will all see the same $HOME when they run. By default, this is\n## set to the name of the application.\n##\n## If unset, defaults to `default_app_settings.shared_home_name`.\n# shared_home_name = same as application name\n\n## If `isolate_home` is set to true, this mounts a brand new $HOME (using tmpfs)\n## each time the application is run. If set, `shared_home_name` is ignored.\n##\n## Note that any data saved while the app is running will be irrevocably\n## destroyed when it exits.\n##\n## If unset, defaults to `default_app_settings.tmp_home`.\n# tmp_home = false\n\n## ***----------------------***\n## *** Default App Settings ***\n## ***----------------------***\n##\n## This section can be used to set global defaults for all apps. Any setting here\n## can be overriden in the configuration for each individual app.\n\n[default_app_settings]\nxwayland = true\nforce_1x_scale = false\nsession_timeout = 3600 # 1h\nisolate_home = true\ntmp_home = false\n"
  },
  {
    "path": "shader-common/color.slang",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nmodule color;\n\n// A set of color primaries, defined in terms of a transformation to/from XYZ\n// space.\npublic struct PrimariesTransform\n{\n    public float3x3 to_xyz;\n    public float3x3 from_xyz;\n}\n\n// Named sets of color primaries.\nnamespace Primaries\n{\npublic static const PrimariesTransform BT709 = {\n    float3x3(\n        0.4124564f, 0.3575761f, 0.1804375f,\n        0.2126729f, 0.7151522f, 0.0721750f,\n        0.0193339f, 0.1191920f, 0.9503041f),\n    float3x3(\n        3.2404542f, -1.5371385f, -0.4985314f,\n        -0.9692660f, 1.8760108f, 0.0415560f,\n        0.0556434f, -0.2040259f, 1.0572252f)\n};\n\npublic static const PrimariesTransform BT2020 = {\n    float3x3(\n        0.636958f, 0.1446169f, 0.1688810f,\n        0.2627002f, 0.6779981f, 0.0593017f,\n        0.0000000f, 0.0280727f, 1.0609851f),\n    float3x3(\n        1.7166512, -0.3556708, -0.2533663,\n        -0.6666844, 1.6164812, 0.0157685,\n        0.0176399, -0.0427706, 0.9421031),\n};\n}\n\n// Applies the sRGB EOTF to a color, producing linear values.\npublic float3 srgb_eotf(float3 color)\n{\n    return float3(\n        srgb_eotf(color.r),\n        srgb_eotf(color.g),\n        srgb_eotf(color.b));\n}\n\n// Applies the sRGB EOTF to one channel of a color, producing a linear value.\npublic float srgb_eotf(float channel)\n{\n    return channel > 0.04045 ? pow((channel + 0.055) / 1.055, 2.4) : channel / 12.92;\n}\n\n// Applies the inverse sRGB EOTF to a color, producing non-linear values. This\n// is sometimes called gamma correction.\npublic float3 srgb_inverse_eotf(float3 color)\n{\n    return float3(\n        srgb_inverse_eotf(color.r),\n        srgb_inverse_eotf(color.g),\n        srgb_inverse_eotf(color.b));\n}\n\n// Applies the inverse sRGB EOTF to one channel of a color, producing non-linear\n// values. This is sometimes called gamma correction.\npublic float srgb_inverse_eotf(float channel)\n{\n    return channel > 0.0031308 ? 1.055 * pow(channel, 1.0 / 2.4) - 0.055 : 12.92 * channel;\n}\n\n// Applies the BT.709 EOTF to a color, producing linear values.\npublic float3 bt709_eotf(float3 color)\n{\n    return float3(\n        bt709_eotf(color.r),\n        bt709_eotf(color.g),\n        bt709_eotf(color.b));\n}\n\n// Applies the BT.709 EOTF to one channel of a color, producing a linear value.\npublic float bt709_eotf(float channel)\n{\n    return channel > 0.081 ? pow((channel + 0.099) / 1.099, 1.0 / 0.45) : channel / 4.5;\n}\n\n// Applies the inverse BT.709 EOTF to a color, producing non-linear values. This\n// is sometimes called gamma correction.\npublic float3 bt709_inverse_eotf(float3 color)\n{\n    return float3(\n        bt709_inverse_eotf(color.r),\n        bt709_inverse_eotf(color.g),\n        bt709_inverse_eotf(color.b));\n}\n\n// Applies the inverse BT.709 EOTF to one channel of a color, producing non-linear\n// values. This is sometimes called gamma correction.\npublic float bt709_inverse_eotf(float channel)\n{\n    return channel >= 0.018 ? 1.099 * pow(channel, 1.0 / 2.2) - 0.099 : 4.5 * channel;\n}\n\nstatic const float PQ_M1 = 0.1593017578125;\nstatic const float PQ_M2 = 78.84375;\nstatic const float PQ_C1 = 0.8359375;\nstatic const float PQ_C2 = 18.8515625;\nstatic const float PQ_C3 = 18.6875;\n\npublic static const float SDR_REFERENCE_WHITE = 203.0;\npublic static const float PQ_MAX_WHITE = 10000.0;\n\n// Applies the Perceptual Quantizer EOTF to a color, producing linear values.\n// The input should be in the range [0, 1], where 1 corresponds to the maximum\n// 10,000 nits.\npublic float3 pq_eotf(float3 color)\n{\n    return float3(\n        pq_eotf(color.r),\n        pq_eotf(color.g),\n        pq_eotf(color.b));\n}\n\n// Applies the Perceptual Quantizer EOTF to a color channel, producing linear\n// values. The input should be in the range [0, 1], where 1 corresponds to the\n// maximum 10,000 nits.\nfloat pq_eotf(float channel)\n{\n    let c = pow(channel, 1.0 / PQ_M2);\n    return pow(\n        max(c - PQ_C1, 0.0) / (PQ_C2 - PQ_C3 * c),\n        1.0 / PQ_M1);\n}\n\n// Applies the inverse Perceptual Quantizer EOTF to a color, producing non-linear\n// values. The output will be in the range [0, 1], where 1 corresponds to the\n// maximum 10,000 nits.\npublic float3 pq_inverse_eotf(float3 color)\n{\n    return float3(\n        pq_inverse_eotf(color.r),\n        pq_inverse_eotf(color.g),\n        pq_inverse_eotf(color.b));\n}\n\n// Applies the inverse Perceptual Quantizer EOTF to a color channel, producing a\n// non-linear value. The output will be in the range [0, 1], where 1 corresponds\n// to the maximum 10,000 nits.\nfloat pq_inverse_eotf(float channel)\n{\n    let c = pow(channel, PQ_M1);\n    return pow(\n        (PQ_C1 + PQ_C2 * c) / (1.0 + PQ_C3 * c),\n        PQ_M2);\n}\n\n// Transform a color from one set of primaries to another. The colors must be\n// linear, that is, they must have already been linearized using the relevant\n// OETF.\npublic float4 transform(float4 color, PrimariesTransform pa, PrimariesTransform pb)\n{\n    return float4(\n        transform(color.rgb, pa, pb),\n        color.a);\n}\n\n// Transform a color from one set of primaries to another. The colors must be\n// linear, that is, they must have already been linearized using the relevant\n// inverse EOTF.\npublic float3 transform(float3 color, PrimariesTransform pa, PrimariesTransform pb)\n{\n    let mat = mul(pb.from_xyz, pa.to_xyz);\n    return mul(mat, color);\n}\n\n// Available conversions to and from YCbCr color space.\npublic enum YCbCrModel\n{\n    BT709,\n    BT2020,\n}\n\nstatic const float3x3 YCBCR_709_MATRIX = float3x3(\n    0.2126, 0.7152, 0.0722,\n    -0.114572, -0.385428, 0.5,\n    0.5, -0.454153, -0.045847);\n\nstatic const float3x3 YCBCR_2020_MATRIX = float3x3(\n    0.2627, 0.6780, 0.0593,\n    -0.139630, -0.360370, 0.5,\n    0.5, -0.459786, -0.040214);\n\n// Encode a color in the YCbCr color system. The color should already be in\n// nonlinear space.\npublic float3 encode_ycbcr(float3 color, YCbCrModel model, bool full_range)\n{\n    float3 ycbcr;\n    switch (model)\n    {\n    case YCbCrModel::BT709:\n        ycbcr = mul(YCBCR_709_MATRIX, color);\n        break;\n    case YCbCrModel::BT2020:\n        ycbcr = mul(YCBCR_2020_MATRIX, color);\n        break;\n    }\n\n    // The matrix multiplication gives us Y in [0, 1] and Cb and Cr in [-0.5, 0.5].\n    ycbcr.y += 0.5;\n    ycbcr.z += 0.5;\n\n    if (!full_range)\n        // This converts to \"MPEG\" or \"Narrow\" in the range [16, 235] and [16, 240].\n        ycbcr = float3(\n            (219.0 * ycbcr.x + 16.0) / 256.0,\n            (224.0 * ycbcr.y + 16.0) / 256.0,\n            (224.0 * ycbcr.z + 16.0) / 256.0);\n\n    return clamp(ycbcr, 0.0, 1.0);\n}\n"
  },
  {
    "path": "test-apps/Cargo.toml",
    "content": "# Copyright 2024 Colin Marc <hi@colinmarc.com>\n#\n# SPDX-License-Identifier: MIT\n\n[package]\nname = \"latency-test\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[[bin]]\nname = \"latency-test\"\npath = \"bin/latency.rs\"\n\n[[bin]]\nname = \"color-test\"\npath = \"bin/color.rs\"\n\n[[bin]]\nname = \"cursorlock-test\"\npath = \"bin/cursorlock.rs\"\n\n[dependencies]\nanyhow = \"1.0.75\"\nash = \"0.37.3\"\nash-window = \"0.12.0\"\nraw-window-handle = \"0.5.2\"\nclap = { version = \"4.4.5\", features = [\"derive\"] }\nglam = \"0.27.0\"\nimgui-rs-vulkan-renderer = { version = \"1.12\", features = [\n    \"dynamic-rendering\",\n] }\nimgui = { version = \"0.11\", features = [\"tables-api\"] }\nimgui-winit-support = \"0.11\"\n\n\n[dependencies.winit]\nversion = \"0.29\"\ndefault-features = false\nfeatures = [\"wayland\", \"rwh_05\"]\n\n[dependencies.bevy]\nversion = \"0.15\"\ndefault-features = false\nfeatures = [\n    #\"animation\",\n    #\"bevy_asset\",\n    #\"bevy_audio\",\n    \"bevy_gilrs\",\n    #\"bevy_scene\",\n    \"bevy_winit\",\n    \"bevy_core_pipeline\",\n    \"bevy_pbr\",\n    #\"bevy_gltf\",\n    \"bevy_render\",\n    \"bevy_sprite\",\n    #\"bevy_text\",\n    #\"bevy_ui\",\n    #\"png\",\n    #\"hdr\",\n    #\"vorbis\",\n    #\"x11\",\n    \"wayland\",\n    #\"bevy_gizmos\",\n    #\"android_shared_stdcxx\",\n    \"tonemapping_luts\",\n    #\"default_font\",\n    #\"webgl2\",\n    #\"bevy_debug_stepping\",\n]\n\n[build-dependencies.slang]\ngit = \"https://github.com/colinmarc/slang-rs\"\nrev = \"075daa4faa8d1ab6d7bfbb5293812b087a527207\"\n# Uses SLANG_DIR if set, otherwise builds slang from source\nfeatures = [\"from-source\"]\n\n[patch.crates-io]\nimgui = { git = \"https://github.com/colinmarc/imgui-rs\" }\nimgui-winit-support = { git = \"https://github.com/colinmarc/imgui-rs\" }\n"
  },
  {
    "path": "test-apps/bin/color.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::{\n    ffi::{c_void, CStr, CString},\n    rc::Rc,\n    time,\n};\n\nuse anyhow::{anyhow, Context};\nuse ash::{\n    extensions::{\n        ext::DebugUtils as DebugUtilsExt, khr::DynamicRendering as DynamicRenderingKhr,\n        khr::Surface as SurfaceKhr, khr::Swapchain as SwapchainKhr,\n    },\n    vk,\n};\nuse imgui_rs_vulkan_renderer as imgui_vulkan;\nuse raw_window_handle::{HasRawDisplayHandle, HasRawWindowHandle};\nuse winit::{\n    event::{ElementState, Event, KeyEvent, MouseButton, WindowEvent},\n    event_loop::EventLoop,\n    keyboard::{KeyCode, PhysicalKey},\n    window::WindowBuilder,\n};\n\nstruct ImguiContext {\n    imgui: imgui::Context,\n    platform: imgui_winit_support::WinitPlatform,\n}\n\n#[derive(Copy, Clone, Debug)]\n#[repr(C)]\nstruct PushConstants {\n    size: glam::Vec2,\n    mouse: glam::Vec2,\n    color_mul: f32,\n    color_space: vk::ColorSpaceKHR,\n}\n\nstruct VkDebugContext {\n    debug: DebugUtilsExt,\n    messenger: vk::DebugUtilsMessengerEXT,\n}\n\nstruct DeviceInfo {\n    device_name: CString,\n    device_type: vk::PhysicalDeviceType,\n    present_family: u32,\n}\n\npub struct VkQueue {\n    pub queue: vk::Queue,\n    pub command_pool: vk::CommandPool,\n}\n\nstruct Renderer {\n    _entry: ash::Entry,\n    instance: ash::Instance,\n    device: ash::Device,\n    swapchain_loader: SwapchainKhr,\n    surface_loader: SurfaceKhr,\n    dynamic_rendering_loader: DynamicRenderingKhr,\n    debug: Option<VkDebugContext>,\n\n    pdevice: vk::PhysicalDevice,\n    _device_info: DeviceInfo,\n\n    surface: vk::SurfaceKHR,\n    surface_formats: Vec<vk::SurfaceFormatKHR>,\n    format: vk::Format,\n    colorspace: vk::ColorSpaceKHR,\n\n    pc: PushConstants,\n    present_queue: VkQueue,\n    width: u32,\n    height: u32,\n\n    imgui: Option<ImguiContext>,\n\n    window: Rc<winit::window::Window>,\n\n    swapchain: Option<Swapchain>,\n    swapchain_dirty: bool,\n}\n\nstruct Swapchain {\n    swapchain: vk::SwapchainKHR,\n    frames: Vec<InFlightFrame>,\n    present_images: Vec<SwapImage>,\n    current_frame: usize,\n\n    imgui_renderer: Option<imgui_vulkan::Renderer>,\n\n    descriptor_set_layout: vk::DescriptorSetLayout,\n    descriptor_pool: vk::DescriptorPool,\n    pipeline_layout: vk::PipelineLayout,\n    pipeline: vk::Pipeline,\n}\n\nstruct InFlightFrame {\n    render_cb: vk::CommandBuffer,\n    render_fence: vk::Fence,\n    image_acquired_sema: vk::Semaphore,\n    render_complete_sema: vk::Semaphore,\n}\n\nstruct SwapImage {\n    image: vk::Image,\n    view: vk::ImageView,\n}\n\nimpl Renderer {\n    fn new(window: Rc<winit::window::Window>, debug: bool) -> anyhow::Result<Self> {\n        let entry = unsafe { ash::Entry::load().context(\"failed to load vulkan libraries!\") }?;\n        eprintln!(\"creating vulkan instance\");\n\n        let (major, minor) = match entry.try_enumerate_instance_version()? {\n            // Vulkan 1.1+\n            Some(version) => (\n                vk::api_version_major(version),\n                vk::api_version_minor(version),\n            ),\n            // Vulkan 1.0\n            None => (1, 0),\n        };\n\n        if major < 1 || (major == 1 && minor < 2) {\n            return Err(anyhow::anyhow!(\"vulkan 1.2 or higher is required\"));\n        }\n\n        // MoltenVK doesn't actually support 1.3.\n        let (major, minor) = if cfg!(any(target_os = \"macos\", target_os = \"ios\")) {\n            (1, 2)\n        } else {\n            (major, minor)\n        };\n\n        let app_info = vk::ApplicationInfo::builder()\n            .application_name(c\"c\")\n            .application_version(vk::make_api_version(0, 0, 1, 0))\n            .engine_name(c\"No Engine\")\n            .engine_version(vk::make_api_version(0, 0, 1, 0))\n            .api_version(vk::make_api_version(0, major, minor, 0));\n\n        let mut extensions =\n            ash_window::enumerate_required_extensions(window.raw_display_handle())?.to_vec();\n\n        let mut layers = Vec::new();\n\n        #[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\n        {\n            extensions.push(vk::KhrPortabilityEnumerationFn::name().as_ptr());\n            // Enabling this extension is a requirement when using `VK_KHR_portability_subset`\n            extensions.push(vk::KhrGetPhysicalDeviceProperties2Fn::name().as_ptr());\n        }\n\n        if debug {\n            let props = entry.enumerate_instance_extension_properties(None)?;\n            let available_extensions = props\n                .into_iter()\n                .map(|properties| unsafe {\n                    CStr::from_ptr(&properties.extension_name as *const _).to_owned()\n                })\n                .collect::<Vec<_>>();\n\n            if !available_extensions\n                .iter()\n                .any(|ext| ext.as_c_str() == DebugUtilsExt::name())\n            {\n                return Err(anyhow::anyhow!(\n                    \"debug utils extension requested, but not available\"\n                ));\n            }\n\n            extensions.push(DebugUtilsExt::name().as_ptr());\n\n            let validation_layer = c\"VK_LAYER_KHRONOS_validation\";\n            let layer_props = entry.enumerate_instance_layer_properties()?;\n            if layer_props\n                .into_iter()\n                .map(|properties| unsafe { CStr::from_ptr(&properties.layer_name as *const _) })\n                .any(|layer| layer == validation_layer)\n            {\n                layers.push(validation_layer.as_ptr());\n            } else {\n                eprintln!(\"validation layers requested, but not available!\")\n            }\n        }\n\n        let instance = {\n            let flags = if cfg!(any(target_os = \"macos\", target_os = \"ios\")) {\n                vk::InstanceCreateFlags::ENUMERATE_PORTABILITY_KHR\n            } else {\n                vk::InstanceCreateFlags::default()\n            };\n\n            let instance_create_info = vk::InstanceCreateInfo::builder()\n                .flags(flags)\n                .application_info(&app_info)\n                .enabled_layer_names(&layers)\n                .enabled_extension_names(&extensions);\n\n            unsafe { entry.create_instance(&instance_create_info, None)? }\n        };\n\n        let debug_utils = if debug {\n            let debug_utils = DebugUtilsExt::new(&entry, &instance);\n\n            let create_info = vk::DebugUtilsMessengerCreateInfoEXT::builder()\n                .message_severity(\n                    vk::DebugUtilsMessageSeverityFlagsEXT::WARNING\n                        | vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE\n                        | vk::DebugUtilsMessageSeverityFlagsEXT::INFO\n                        | vk::DebugUtilsMessageSeverityFlagsEXT::ERROR,\n                )\n                .message_type(\n                    vk::DebugUtilsMessageTypeFlagsEXT::GENERAL\n                        | vk::DebugUtilsMessageTypeFlagsEXT::PERFORMANCE\n                        | vk::DebugUtilsMessageTypeFlagsEXT::VALIDATION,\n                )\n                .pfn_user_callback(Some(vulkan_debug_utils_callback));\n\n            let messenger =\n                unsafe { debug_utils.create_debug_utils_messenger(&create_info, None) }?;\n\n            Some(VkDebugContext {\n                debug: debug_utils,\n                messenger,\n            })\n        } else {\n            None\n        };\n\n        let surface_loader = SurfaceKhr::new(&entry, &instance);\n        let surface = unsafe {\n            ash_window::create_surface(\n                &entry,\n                &instance,\n                window.raw_display_handle(),\n                window.raw_window_handle(),\n                None,\n            )?\n        };\n\n        let devices = unsafe { instance.enumerate_physical_devices()? };\n        let mut devices = devices\n            .into_iter()\n            .enumerate()\n            .flat_map(\n                |(index, dev)| match query_device(&instance, &surface_loader, surface, dev) {\n                    Ok(info) => Some((index as u32, dev, info)),\n                    Err(err) => {\n                        let device_name = unsafe {\n                            CStr::from_ptr(\n                                instance\n                                    .get_physical_device_properties(dev)\n                                    .device_name\n                                    .as_ptr(),\n                            )\n                            .to_owned()\n                        };\n\n                        eprintln!(\"gpu {device_name:?} ineligible: {err}\");\n                        None\n                    }\n                },\n            )\n            .collect::<Vec<_>>();\n\n        devices.sort_by_key(|(_, _, info)| match info.device_type {\n            vk::PhysicalDeviceType::DISCRETE_GPU => 0,\n            vk::PhysicalDeviceType::INTEGRATED_GPU => 1,\n            _ => 2,\n        });\n\n        if devices.is_empty() {\n            return Err(anyhow!(\"no eligible GPU found!\"));\n        }\n\n        let (index, pdevice, device_info) = devices.remove(0);\n        eprintln!(\"selected gpu: {:?} ({index})\", device_info.device_name);\n\n        let device = {\n            let queue_priorities = &[1.0];\n            let mut queue_indices = Vec::new();\n            queue_indices.push(device_info.present_family);\n\n            queue_indices.dedup();\n            let queue_create_infos = queue_indices\n                .iter()\n                .map(|&index| {\n                    vk::DeviceQueueCreateInfo::builder()\n                        .queue_family_index(index)\n                        .queue_priorities(queue_priorities)\n                        .build()\n                })\n                .collect::<Vec<_>>();\n\n            let mut enabled_1_1_features =\n                vk::PhysicalDeviceVulkan11Features::builder().sampler_ycbcr_conversion(true);\n\n            let mut dynamic_rendering_features =\n                vk::PhysicalDeviceDynamicRenderingFeatures::builder().dynamic_rendering(true);\n\n            let selected_extensions = [\n                vk::KhrSwapchainFn::name().to_owned(),\n                vk::KhrDynamicRenderingFn::name().to_owned(),\n                #[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\n                vk::KhrPortabilitySubsetFn::name().to_owned(),\n            ];\n\n            let extension_names = selected_extensions\n                .iter()\n                .map(|v| v.as_c_str().as_ptr())\n                .collect::<Vec<_>>();\n            let device_create_info = vk::DeviceCreateInfo::builder()\n                .queue_create_infos(&queue_create_infos)\n                .enabled_extension_names(&extension_names)\n                .push_next(&mut enabled_1_1_features)\n                .push_next(&mut dynamic_rendering_features);\n\n            unsafe { instance.create_device(pdevice, &device_create_info, None)? }\n        };\n\n        let present_queue = get_queue_with_command_pool(&device, device_info.present_family)?;\n        let window_size = window.inner_size();\n\n        let surface_formats =\n            unsafe { surface_loader.get_physical_device_surface_formats(pdevice, surface)? };\n\n        for surface_format in &surface_formats {\n            eprintln!(\n                \"available surface format: {:?} ({}) -> {:?} ({})\",\n                surface_format.format,\n                surface_format.format.as_raw(),\n                surface_format.color_space,\n                surface_format.color_space.as_raw()\n            );\n        }\n\n        // Disable Vulkan's automatic sRGB conversion.\n        let surface_formats = surface_formats\n            .into_iter()\n            .filter(|sf| !format_is_srgb(sf.format) && colorspace_supported(sf.color_space))\n            .collect::<Vec<_>>();\n\n        let surface_format = surface_formats[0];\n        eprintln!(\n            \"using surface format: {:?} / {:?}\",\n            surface_format.format, surface_format.color_space,\n        );\n\n        let swapchain_loader = SwapchainKhr::new(&instance, &device);\n        let dynamic_rendering_loader = DynamicRenderingKhr::new(&instance, &device);\n\n        let mut imgui = imgui::Context::create();\n        imgui.set_ini_filename(None);\n\n        let mut imgui_platform = imgui_winit_support::WinitPlatform::init(&mut imgui);\n        imgui_platform.attach_window(\n            imgui.io_mut(),\n            &window,\n            imgui_winit_support::HiDpiMode::Default,\n        );\n\n        let mut renderer = Self {\n            _entry: entry,\n            instance,\n            device,\n            swapchain_loader,\n            surface_loader,\n            dynamic_rendering_loader,\n            debug: debug_utils,\n\n            pdevice,\n            _device_info: device_info,\n\n            surface,\n            surface_formats,\n            format: surface_format.format,\n            colorspace: surface_format.color_space,\n\n            pc: PushConstants {\n                size: glam::Vec2::new(window_size.width as f32, window_size.height as f32),\n                mouse: glam::Vec2::ZERO,\n                color_mul: 1.0,\n                color_space: surface_format.color_space,\n            },\n            present_queue,\n            width: window_size.width,\n            height: window_size.height,\n\n            imgui: Some(ImguiContext {\n                imgui,\n                platform: imgui_platform,\n            }),\n\n            window,\n            swapchain: None,\n\n            swapchain_dirty: false,\n        };\n\n        unsafe { renderer.recreate_swapchain()? };\n\n        Ok(renderer)\n    }\n\n    unsafe fn recreate_swapchain(&mut self) -> anyhow::Result<()> {\n        let start = time::Instant::now();\n        let device = &self.device;\n\n        let surface_format = self\n            .surface_formats\n            .iter()\n            .find(|sf| sf.format == self.format && sf.color_space == self.colorspace)\n            .expect(\"invalid format / colorspace combination\");\n        eprintln!(\n            \"recreating swapchain with format {:?} / {:?}\",\n            surface_format.format, surface_format.color_space\n        );\n\n        self.pc.color_space = surface_format.color_space;\n\n        let surface_capabilities = self\n            .surface_loader\n            .get_physical_device_surface_capabilities(self.pdevice, self.surface)\n            .unwrap();\n        let mut desired_image_count = surface_capabilities.min_image_count + 1;\n        if surface_capabilities.max_image_count > 0\n            && desired_image_count > surface_capabilities.max_image_count\n        {\n            desired_image_count = surface_capabilities.max_image_count;\n        }\n\n        let surface_resolution = match surface_capabilities.current_extent.width {\n            std::u32::MAX => vk::Extent2D {\n                width: self.width,\n                height: self.height,\n            },\n            _ => surface_capabilities.current_extent,\n        };\n\n        self.pc.size = glam::Vec2::new(\n            surface_resolution.width as f32,\n            surface_resolution.height as f32,\n        );\n\n        let pre_transform = if surface_capabilities\n            .supported_transforms\n            .contains(vk::SurfaceTransformFlagsKHR::IDENTITY)\n        {\n            vk::SurfaceTransformFlagsKHR::IDENTITY\n        } else {\n            surface_capabilities.current_transform\n        };\n\n        let present_modes = self\n            .surface_loader\n            .get_physical_device_surface_present_modes(self.pdevice, self.surface)\n            .unwrap();\n\n        let mut present_modes = present_modes.clone();\n        present_modes.sort_by_key(|&mode| match mode {\n            vk::PresentModeKHR::MAILBOX => 0,\n            vk::PresentModeKHR::IMMEDIATE => 1,\n            vk::PresentModeKHR::FIFO => 2,\n            _ => 4,\n        });\n\n        let present_mode = present_modes.first().unwrap();\n        if *present_mode != vk::PresentModeKHR::MAILBOX {\n            eprintln!(\n                \"present mode MAILBOX not available, using {:?} (available: {:?})\",\n                present_mode, present_modes\n            );\n        }\n\n        let mut swapchain_create_info = vk::SwapchainCreateInfoKHR::builder()\n            .surface(self.surface)\n            .min_image_count(desired_image_count)\n            .image_color_space(surface_format.color_space)\n            .image_format(surface_format.format)\n            .image_extent(surface_resolution)\n            .image_usage(vk::ImageUsageFlags::COLOR_ATTACHMENT)\n            .image_sharing_mode(vk::SharingMode::EXCLUSIVE)\n            .pre_transform(pre_transform)\n            .composite_alpha(vk::CompositeAlphaFlagsKHR::OPAQUE)\n            .present_mode(*present_mode)\n            .clipped(true)\n            .image_array_layers(1);\n\n        if let Some(old_swapchain) = self.swapchain.as_ref() {\n            swapchain_create_info = swapchain_create_info.old_swapchain(old_swapchain.swapchain);\n        }\n\n        let swapchain = self\n            .swapchain_loader\n            .create_swapchain(&swapchain_create_info, None)?;\n        let swapchain_images = self.swapchain_loader.get_swapchain_images(swapchain)?;\n\n        let descriptor_set_layout = {\n            let create_info = vk::DescriptorSetLayoutCreateInfo::builder();\n            unsafe { device.create_descriptor_set_layout(&create_info, None)? }\n        };\n\n        let descriptor_pool = {\n            let sampler_size = vk::DescriptorPoolSize::builder()\n                .ty(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n                .descriptor_count(swapchain_images.len() as u32);\n\n            let pool_sizes = &[sampler_size.build()];\n            let info = vk::DescriptorPoolCreateInfo::builder()\n                .pool_sizes(pool_sizes)\n                .max_sets(swapchain_images.len() as u32);\n\n            unsafe { device.create_descriptor_pool(&info, None)? }\n        };\n\n        let pipeline_layout = {\n            let pc_ranges = [vk::PushConstantRange::builder()\n                .stage_flags(vk::ShaderStageFlags::VERTEX | vk::ShaderStageFlags::FRAGMENT)\n                .offset(0)\n                .size(std::mem::size_of::<PushConstants>() as u32)\n                .build()];\n            let set_layouts = [descriptor_set_layout];\n            let create_info = vk::PipelineLayoutCreateInfo::builder()\n                .set_layouts(&set_layouts)\n                .push_constant_ranges(&pc_ranges);\n\n            unsafe { device.create_pipeline_layout(&create_info, None)? }\n        };\n\n        let pipeline = {\n            let vert_bytes = include_bytes!(concat!(env!(\"OUT_DIR\"), \"/color-test/vert.spv\"));\n            let frag_bytes = include_bytes!(concat!(env!(\"OUT_DIR\"), \"/color-test/frag.spv\"));\n            let vert_shader = load_shader(device, vert_bytes).context(\"loading vert.spv\")?;\n            let frag_shader = load_shader(device, frag_bytes).context(\"loading frag.spv\")?;\n\n            let vert_stage = vk::PipelineShaderStageCreateInfo::builder()\n                .stage(vk::ShaderStageFlags::VERTEX)\n                .module(vert_shader)\n                .name(c\"main\");\n\n            let frag_stage = vk::PipelineShaderStageCreateInfo::builder()\n                .stage(vk::ShaderStageFlags::FRAGMENT)\n                .module(frag_shader)\n                .name(c\"main\");\n\n            let vertex_input_state = vk::PipelineVertexInputStateCreateInfo::builder();\n\n            let input_assembly_state = vk::PipelineInputAssemblyStateCreateInfo::builder()\n                .topology(vk::PrimitiveTopology::TRIANGLE_STRIP)\n                .primitive_restart_enable(false);\n\n            let viewport = vk::Viewport::builder()\n                .x(0.0)\n                .y(0.0)\n                .width(self.width as f32)\n                .height(self.height as f32)\n                .min_depth(0.0)\n                .max_depth(1.0);\n\n            let scissor = vk::Rect2D::builder().extent(vk::Extent2D {\n                width: self.width,\n                height: self.height,\n            });\n\n            let viewports = [viewport.build()];\n            let scissors = [scissor.build()];\n            let viewport_state = vk::PipelineViewportStateCreateInfo::builder()\n                .viewports(&viewports)\n                .scissors(&scissors);\n\n            let rasterization_state = vk::PipelineRasterizationStateCreateInfo::builder()\n                .depth_clamp_enable(false)\n                .rasterizer_discard_enable(false)\n                .polygon_mode(vk::PolygonMode::FILL)\n                .line_width(1.0)\n                .depth_bias_enable(false)\n                // Per https://www.saschawillems.de/blog/2016/08/13/vulkan-tutorial-on-rendering-a-fullscreen-quad-without-buffers\n                .cull_mode(vk::CullModeFlags::FRONT)\n                .front_face(vk::FrontFace::COUNTER_CLOCKWISE);\n\n            let multisample_state = vk::PipelineMultisampleStateCreateInfo::builder()\n                .sample_shading_enable(false)\n                .rasterization_samples(vk::SampleCountFlags::TYPE_1);\n\n            let attachment = vk::PipelineColorBlendAttachmentState::builder()\n                .color_write_mask(vk::ColorComponentFlags::RGBA)\n                .blend_enable(true)\n                .src_color_blend_factor(vk::BlendFactor::SRC_ALPHA)\n                .dst_color_blend_factor(vk::BlendFactor::ONE_MINUS_SRC_ALPHA)\n                .color_blend_op(vk::BlendOp::ADD)\n                .src_alpha_blend_factor(vk::BlendFactor::ONE)\n                .dst_alpha_blend_factor(vk::BlendFactor::ZERO)\n                .alpha_blend_op(vk::BlendOp::ADD);\n\n            let attachments = [attachment.build()];\n            let color_blend_state = vk::PipelineColorBlendStateCreateInfo::builder()\n                .logic_op_enable(false)\n                .attachments(&attachments);\n\n            let formats = [surface_format.format];\n            let mut pipeline_rendering = vk::PipelineRenderingCreateInfo::builder()\n                .color_attachment_formats(&formats)\n                .build();\n\n            let stages = [vert_stage.build(), frag_stage.build()];\n            let create_info = vk::GraphicsPipelineCreateInfo::builder()\n                .stages(&stages)\n                .vertex_input_state(&vertex_input_state)\n                .input_assembly_state(&input_assembly_state)\n                .viewport_state(&viewport_state)\n                .rasterization_state(&rasterization_state)\n                .multisample_state(&multisample_state)\n                .color_blend_state(&color_blend_state)\n                .layout(pipeline_layout)\n                .push_next(&mut pipeline_rendering);\n\n            unsafe {\n                let pipeline = match device.create_graphics_pipelines(\n                    vk::PipelineCache::null(),\n                    &[create_info.build()],\n                    None,\n                ) {\n                    Ok(pipelines) => Ok(pipelines[0]),\n                    Err((_, e)) => Err(e),\n                }?;\n\n                device.destroy_shader_module(vert_shader, None);\n                device.destroy_shader_module(frag_shader, None);\n                pipeline\n            }\n        };\n\n        let create_frame = || -> anyhow::Result<InFlightFrame> {\n            let render_cb = {\n                let create_info = vk::CommandBufferAllocateInfo::builder()\n                    .level(vk::CommandBufferLevel::PRIMARY)\n                    .command_pool(self.present_queue.command_pool)\n                    .command_buffer_count(1);\n\n                let cbs = device\n                    .allocate_command_buffers(&create_info)\n                    .context(\"failed to allocate render command buffer\")?;\n\n                cbs[0]\n            };\n\n            let render_fence = create_fence(device, true)?;\n            let image_acquired_sema = create_semaphore(device)?;\n            let render_complete_sema = create_semaphore(device)?;\n\n            Ok(InFlightFrame {\n                render_cb,\n                render_fence,\n                image_acquired_sema,\n                render_complete_sema,\n            })\n        };\n\n        let frames = (0..swapchain_images.len())\n            .map(|_| create_frame())\n            .collect::<anyhow::Result<Vec<_>>>()?;\n\n        let swapchain_images = swapchain_images\n            .into_iter()\n            .map(|image| {\n                let create_info = vk::ImageViewCreateInfo::builder()\n                    .image(image)\n                    .view_type(vk::ImageViewType::TYPE_2D)\n                    .format(surface_format.format)\n                    .components(vk::ComponentMapping {\n                        r: vk::ComponentSwizzle::IDENTITY,\n                        g: vk::ComponentSwizzle::IDENTITY,\n                        b: vk::ComponentSwizzle::IDENTITY,\n                        a: vk::ComponentSwizzle::IDENTITY,\n                    })\n                    .subresource_range(vk::ImageSubresourceRange {\n                        aspect_mask: vk::ImageAspectFlags::COLOR,\n                        base_mip_level: 0,\n                        level_count: vk::REMAINING_MIP_LEVELS,\n                        base_array_layer: 0,\n                        layer_count: vk::REMAINING_ARRAY_LAYERS,\n                    });\n\n                let image_view = device\n                    .create_image_view(&create_info, None)\n                    .context(\"vkCreateImageView\")?;\n\n                Ok(SwapImage {\n                    image,\n                    view: image_view,\n                })\n            })\n            .collect::<anyhow::Result<Vec<_>>>()?;\n\n        let imgui_renderer = if let Some(ImguiContext { imgui, .. }) = &mut self.imgui {\n            Some(imgui_vulkan::Renderer::with_default_allocator(\n                &self.instance,\n                self.pdevice,\n                device.clone(),\n                self.present_queue.queue,\n                self.present_queue.command_pool,\n                imgui_vulkan::DynamicRendering {\n                    color_attachment_format: surface_format.format,\n                    depth_attachment_format: None,\n                },\n                imgui,\n                Some(imgui_vulkan::Options {\n                    in_flight_frames: frames.len(),\n                    ..Default::default()\n                }),\n            )?)\n        } else {\n            None\n        };\n\n        let swapchain = Swapchain {\n            swapchain,\n            frames,\n            present_images: swapchain_images,\n            current_frame: 0,\n\n            descriptor_pool,\n            descriptor_set_layout,\n            pipeline_layout,\n            pipeline,\n\n            imgui_renderer,\n        };\n\n        eprintln!(\"recreated swapchain in {:?}\", start.elapsed());\n\n        if let Some(old_swapchain) = self.swapchain.replace(swapchain) {\n            self.destroy_swapchain(old_swapchain);\n        };\n\n        Ok(())\n    }\n\n    fn handle_event<T>(&mut self, event: &winit::event::Event<T>) -> anyhow::Result<()> {\n        if let Some(ImguiContext {\n            platform, imgui, ..\n        }) = self.imgui.as_mut()\n        {\n            platform.handle_event(imgui.io_mut(), &self.window, event);\n        }\n\n        match event {\n            winit::event::Event::WindowEvent {\n                window_id,\n                event: winit::event::WindowEvent::Resized(size),\n            } if *window_id == self.window.id() => {\n                self.resize(size.width, size.height);\n            }\n            _ => (),\n        }\n\n        Ok(())\n    }\n\n    fn resize(&mut self, width: u32, height: u32) {\n        if self.width == width && self.height == height {\n            return;\n        }\n\n        self.width = width;\n        self.height = height;\n        self.swapchain_dirty = true;\n    }\n\n    unsafe fn render(&mut self) -> anyhow::Result<()> {\n        if self.swapchain_dirty || self.swapchain.is_none() {\n            self.recreate_swapchain()?;\n            self.swapchain_dirty = false;\n        }\n\n        let device = &self.device;\n        let swapchain = self.swapchain.as_mut().unwrap();\n        let num_frames = swapchain.frames.len();\n\n        let frame = &mut swapchain.frames[swapchain.current_frame];\n        swapchain.current_frame = (swapchain.current_frame + 1) % num_frames;\n\n        // Wait for the gpu to catch up.\n        device.wait_for_fences(&[frame.render_fence], true, u64::MAX)?;\n\n        let result = self.swapchain_loader.acquire_next_image(\n            swapchain.swapchain,\n            u64::MAX,\n            frame.image_acquired_sema,\n            vk::Fence::null(),\n        );\n\n        let swapchain_index = match result {\n            Ok((image_index, _)) => image_index,\n            Err(vk::Result::ERROR_OUT_OF_DATE_KHR) => {\n                // Recreate and try again.\n                self.swapchain_dirty = true;\n                return self.render();\n            }\n            Err(e) => return Err(e.into()),\n        };\n\n        let present_image = swapchain\n            .present_images\n            .get(swapchain_index as usize)\n            .unwrap();\n\n        // Reset the command buffer.\n        device.reset_command_buffer(frame.render_cb, vk::CommandBufferResetFlags::empty())?;\n\n        // Begin the command buffer.\n        {\n            let begin_info = vk::CommandBufferBeginInfo::builder()\n                .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT);\n\n            device.begin_command_buffer(frame.render_cb, &begin_info)?;\n        }\n\n        // Transition the present image to be writable.\n        cmd_image_barrier(\n            device,\n            frame.render_cb,\n            present_image.image,\n            vk::PipelineStageFlags::TOP_OF_PIPE,\n            vk::AccessFlags::empty(),\n            vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT,\n            vk::AccessFlags::COLOR_ATTACHMENT_WRITE,\n            vk::ImageLayout::UNDEFINED,\n            vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,\n        );\n\n        // Begin rendering.\n        {\n            let rect: vk::Rect2D = vk::Rect2D::builder()\n                .extent(vk::Extent2D {\n                    width: self.width,\n                    height: self.height,\n                })\n                .build();\n\n            let clear_value = vk::ClearValue {\n                color: vk::ClearColorValue {\n                    float32: [0.0, 0.0, 0.0, 1.0],\n                },\n            };\n\n            let color_attachment = vk::RenderingAttachmentInfo::builder()\n                .image_view(present_image.view)\n                .image_layout(vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL)\n                .load_op(vk::AttachmentLoadOp::CLEAR)\n                .store_op(vk::AttachmentStoreOp::STORE)\n                .clear_value(clear_value)\n                .build();\n\n            let color_attachments = [color_attachment];\n            let rendering_info = vk::RenderingInfo::builder()\n                .render_area(rect)\n                .color_attachments(&color_attachments)\n                .layer_count(1);\n\n            self.dynamic_rendering_loader\n                .cmd_begin_rendering(frame.render_cb, &rendering_info);\n            device.cmd_bind_pipeline(\n                frame.render_cb,\n                vk::PipelineBindPoint::GRAPHICS,\n                swapchain.pipeline,\n            );\n        }\n\n        device.cmd_push_constants(\n            frame.render_cb,\n            swapchain.pipeline_layout,\n            vk::ShaderStageFlags::VERTEX | vk::ShaderStageFlags::FRAGMENT,\n            0,\n            std::slice::from_raw_parts(\n                &self.pc as *const _ as *const u8,\n                std::mem::size_of::<PushConstants>(),\n            ),\n        );\n\n        // Draw the triangle.\n        device.cmd_draw(frame.render_cb, 3, 1, 0, 0);\n\n        // Draw the overlay.\n        if let Some(ImguiContext { platform, imgui }) = self.imgui.as_mut() {\n            let mut formats = self\n                .surface_formats\n                .iter()\n                .map(|sf| sf.format)\n                .collect::<Vec<_>>();\n\n            let mut colorspaces = self\n                .surface_formats\n                .iter()\n                .map(|sf| sf.color_space)\n                .collect::<Vec<_>>();\n\n            formats.sort();\n            formats.dedup();\n            colorspaces.sort();\n            colorspaces.dedup();\n\n            let format_names = formats\n                .iter()\n                .map(|f| format!(\"{:?}\", f))\n                .collect::<Vec<_>>();\n\n            let cs_names = colorspaces\n                .iter()\n                .map(|c| format!(\"{:?}\", c))\n                .collect::<Vec<_>>();\n\n            let mut format_idx = formats.iter().position(|&f| f == self.format).unwrap() as i32;\n            let mut cs_idx = colorspaces\n                .iter()\n                .position(|&c| c == self.colorspace)\n                .unwrap() as i32;\n\n            platform.prepare_frame(imgui.io_mut(), &self.window)?;\n\n            {\n                let ui = imgui.new_frame();\n\n                let [width, _height] = ui.io().display_size;\n\n                let _padding = ui.push_style_var(imgui::StyleVar::WindowPadding([8.0, 8.0]));\n                let _rounding = ui.push_style_var(imgui::StyleVar::WindowRounding(4.0));\n                let _frame_rounding = ui.push_style_var(imgui::StyleVar::FrameRounding(4.0));\n\n                if let Some(_window) = ui\n                    .window(\"controls\")\n                    .position([width - 16.0, 16.0], imgui::Condition::Always)\n                    .position_pivot([1.0, 0.0])\n                    .bg_alpha(0.8)\n                    .size([250.0, 300.0], imgui::Condition::Always)\n                    .begin()\n                {\n                    let _stretch = ui.push_item_width(-1.0);\n                    ui.text(\"Format:\");\n                    ui.list_box(\n                        \"##format\",\n                        &mut format_idx,\n                        &format_names.iter().map(|f| f.as_str()).collect::<Vec<_>>(),\n                        4,\n                    );\n\n                    ui.text(\"Color Space:\");\n                    ui.list_box(\n                        \"##cs\",\n                        &mut cs_idx,\n                        &cs_names.iter().map(|f| f.as_str()).collect::<Vec<_>>(),\n                        4,\n                    );\n\n                    ui.text(\"Headroom:\");\n                    ui.slider(\"##headroom\", 0.75, 4.0, &mut self.pc.color_mul);\n                }\n\n                platform.prepare_render(ui, &self.window);\n            }\n\n            let renderer = swapchain.imgui_renderer.as_mut().unwrap();\n            renderer.cmd_draw(frame.render_cb, imgui.render())?;\n\n            if formats[format_idx as usize] != self.format {\n                self.format = formats[format_idx as usize];\n                self.swapchain_dirty = true;\n            }\n\n            if colorspaces[cs_idx as usize] != self.colorspace {\n                self.colorspace = colorspaces[cs_idx as usize];\n                self.swapchain_dirty = true;\n            }\n        }\n\n        // Done rendereng.\n        self.dynamic_rendering_loader\n            .cmd_end_rendering(frame.render_cb);\n\n        // Transition the present image to be presentable.\n        cmd_image_barrier(\n            device,\n            frame.render_cb,\n            present_image.image,\n            vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT,\n            vk::AccessFlags::COLOR_ATTACHMENT_WRITE,\n            vk::PipelineStageFlags::BOTTOM_OF_PIPE,\n            vk::AccessFlags::empty(),\n            vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,\n            vk::ImageLayout::PRESENT_SRC_KHR,\n        );\n\n        // Submit and present!\n        {\n            let present_queue = self.present_queue.queue;\n\n            device.end_command_buffer(frame.render_cb)?;\n            device.reset_fences(&[frame.render_fence])?;\n\n            let cbs = [frame.render_cb];\n            let wait_semas = [frame.image_acquired_sema];\n            let signal_semas = [frame.render_complete_sema];\n            let wait_stages = [vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT];\n            let submit_info = vk::SubmitInfo::builder()\n                .command_buffers(&cbs)\n                .wait_semaphores(&wait_semas)\n                .wait_dst_stage_mask(&wait_stages)\n                .signal_semaphores(&signal_semas);\n\n            let submits = [submit_info.build()];\n            device.queue_submit(present_queue, &submits, frame.render_fence)?;\n\n            // This \"helps winit [with stuff]\". It also seems to increase latency.\n            self.window.pre_present_notify();\n\n            let wait_semas = [frame.render_complete_sema];\n            let swapchains = [swapchain.swapchain];\n            let image_indices = [swapchain_index];\n            let present_info = vk::PresentInfoKHR::builder()\n                .wait_semaphores(&wait_semas)\n                .swapchains(&swapchains)\n                .image_indices(&image_indices);\n\n            self.swapchain_dirty = match self\n                .swapchain_loader\n                .queue_present(present_queue, &present_info)\n            {\n                Ok(false) => self.swapchain_dirty,\n                Ok(true) => true,\n                Err(vk::Result::ERROR_OUT_OF_DATE_KHR) => true,\n                Err(e) => return Err(e.into()),\n            };\n        }\n\n        // Render again!\n        if self.swapchain_dirty {\n            return self.render();\n        }\n\n        Ok(())\n    }\n\n    unsafe fn destroy_swapchain(&mut self, mut swapchain: Swapchain) {\n        let device = &self.device;\n        device.device_wait_idle().unwrap();\n\n        for frame in swapchain.frames.drain(..) {\n            device.free_command_buffers(self.present_queue.command_pool, &[frame.render_cb]);\n            device.destroy_fence(frame.render_fence, None);\n            device.destroy_semaphore(frame.image_acquired_sema, None);\n            device.destroy_semaphore(frame.render_complete_sema, None);\n        }\n\n        for swap_img in swapchain.present_images.drain(..) {\n            // Destroying the swapchain does this.\n            // device.destroy_image(swap_img.image, None);\n            device.destroy_image_view(swap_img.view, None);\n        }\n\n        device.destroy_pipeline_layout(swapchain.pipeline_layout, None);\n        device.destroy_descriptor_pool(swapchain.descriptor_pool, None);\n        device.destroy_descriptor_set_layout(swapchain.descriptor_set_layout, None);\n\n        device.destroy_pipeline(swapchain.pipeline, None);\n        self.swapchain_loader\n            .destroy_swapchain(swapchain.swapchain, None)\n    }\n}\n\nimpl Drop for Renderer {\n    fn drop(&mut self) {\n        unsafe {\n            if let Some(swapchain) = self.swapchain.take() {\n                self.destroy_swapchain(swapchain);\n            }\n\n            self.device\n                .destroy_command_pool(self.present_queue.command_pool, None);\n\n            if let Some(debug) = self.debug.take() {\n                debug\n                    .debug\n                    .destroy_debug_utils_messenger(debug.messenger, None);\n            }\n\n            if let Some(imgui) = self.imgui.take() {\n                drop(imgui);\n            }\n\n            self.surface_loader.destroy_surface(self.surface, None);\n            self.device.destroy_device(None);\n            self.instance.destroy_instance(None);\n        }\n    }\n}\n\nfn main() -> anyhow::Result<()> {\n    let event_loop = EventLoop::new()?;\n    let window = WindowBuilder::new()\n        .with_title(\"Colorful Triangle\")\n        .with_inner_size(winit::dpi::LogicalSize::new(800.0, 600.0))\n        .build(&event_loop)\n        .unwrap();\n\n    let window = Rc::new(window);\n    let mut renderer = Renderer::new(window.clone(), cfg!(debug_assertions))?;\n\n    let mut mouse_pressed = false;\n    let mut mouse_pos = glam::Vec2::ZERO;\n\n    event_loop.run(move |event, el| {\n        renderer.handle_event(&event).expect(\"resize failed\");\n\n        match event {\n            Event::AboutToWait { .. } => {\n                window.request_redraw();\n            }\n            Event::WindowEvent { window_id, event } if window_id == window.id() => {\n                match event {\n                    WindowEvent::CloseRequested\n                    | WindowEvent::KeyboardInput {\n                        event:\n                            KeyEvent {\n                                state: ElementState::Pressed,\n                                physical_key: PhysicalKey::Code(KeyCode::Escape),\n                                ..\n                            },\n                        ..\n                    } => el.exit(),\n                    WindowEvent::MouseInput {\n                        state,\n                        button: MouseButton::Left,\n                        ..\n                    } => {\n                        mouse_pressed = state == ElementState::Pressed;\n                    }\n                    WindowEvent::CursorMoved { position, .. } => {\n                        let phys_size = window.inner_size();\n                        let mouse_x = position.x as f32 / phys_size.width as f32 - 0.5;\n                        let mouse_y = position.y as f32 / phys_size.height as f32 - 0.5;\n                        mouse_pos = glam::Vec2::new(mouse_x, mouse_y);\n                    }\n                    WindowEvent::RedrawRequested => unsafe {\n                        renderer.render().expect(\"render failed\")\n                    },\n                    _ => (),\n                };\n\n                if mouse_pressed {\n                    renderer.pc.mouse = mouse_pos;\n                }\n            }\n            _ => (),\n        }\n    })?;\n\n    Ok(())\n}\n\nfn query_device(\n    instance: &ash::Instance,\n    surface_loader: &SurfaceKhr,\n    surface: vk::SurfaceKHR,\n    device: vk::PhysicalDevice,\n) -> anyhow::Result<DeviceInfo> {\n    let props = unsafe { instance.get_physical_device_properties(device) };\n    let device_type = props.device_type;\n    let device_name = unsafe { CStr::from_ptr(props.device_name.as_ptr()).to_owned() };\n\n    let queue_families = unsafe {\n        instance\n            .get_physical_device_queue_family_properties(device)\n            .into_iter()\n            .collect::<Vec<_>>()\n    };\n\n    let present_family = queue_families\n        .iter()\n        .enumerate()\n        .find(|(idx, properties)| {\n            properties.queue_flags.contains(vk::QueueFlags::GRAPHICS)\n                && properties.queue_flags.contains(vk::QueueFlags::COMPUTE)\n                && unsafe {\n                    surface_loader\n                        .get_physical_device_surface_support(device, *idx as u32, surface)\n                        .unwrap_or(false)\n                }\n        })\n        .map(|(index, _)| index as u32)\n        .to_owned()\n        .ok_or_else(|| anyhow::anyhow!(\"no graphics queue found\"))?;\n\n    let available_extensions = unsafe {\n        instance\n            .enumerate_device_extension_properties(device)\n            .unwrap()\n            .into_iter()\n            .map(|properties| CStr::from_ptr(&properties.extension_name as *const _).to_owned())\n            .collect::<Vec<_>>()\n    };\n\n    let ext_swapchain = SwapchainKhr::name();\n    if !available_extensions\n        .iter()\n        .any(|ext| **ext == *ext_swapchain)\n    {\n        return Err(anyhow::anyhow!(\"no swapchain extension found\"));\n    }\n\n    Ok(DeviceInfo {\n        device_name,\n        device_type,\n        present_family,\n    })\n}\n\nfn get_queue_with_command_pool(device: &ash::Device, idx: u32) -> Result<VkQueue, vk::Result> {\n    let queue = unsafe { device.get_device_queue(idx, 0) };\n\n    let command_pool = unsafe {\n        let create_info = vk::CommandPoolCreateInfo::builder()\n            .queue_family_index(idx)\n            .flags(vk::CommandPoolCreateFlags::RESET_COMMAND_BUFFER);\n\n        device.create_command_pool(&create_info, None)?\n    };\n\n    Ok(VkQueue {\n        queue,\n        command_pool,\n    })\n}\n\nfn create_fence(device: &ash::Device, signalled: bool) -> Result<vk::Fence, vk::Result> {\n    let mut create_info = vk::FenceCreateInfo::builder();\n    if signalled {\n        create_info = create_info.flags(vk::FenceCreateFlags::SIGNALED);\n    }\n\n    let fence = unsafe { device.create_fence(&create_info, None)? };\n\n    Ok(fence)\n}\n\nfn create_semaphore(device: &ash::Device) -> Result<vk::Semaphore, vk::Result> {\n    let semaphore = unsafe { device.create_semaphore(&vk::SemaphoreCreateInfo::default(), None)? };\n    Ok(semaphore)\n}\n\n#[allow(clippy::too_many_arguments)]\nfn cmd_image_barrier(\n    device: &ash::Device,\n    command_buffer: vk::CommandBuffer,\n    image: vk::Image,\n    src_stage_mask: vk::PipelineStageFlags,\n    src_access_mask: vk::AccessFlags,\n    dst_stage_mask: vk::PipelineStageFlags,\n    dst_access_mask: vk::AccessFlags,\n    old_layout: vk::ImageLayout,\n    new_layout: vk::ImageLayout,\n) {\n    let barrier = vk::ImageMemoryBarrier::builder()\n        .src_access_mask(src_access_mask)\n        .dst_access_mask(dst_access_mask)\n        .old_layout(old_layout)\n        .new_layout(new_layout)\n        .image(image)\n        .subresource_range(vk::ImageSubresourceRange {\n            aspect_mask: vk::ImageAspectFlags::COLOR,\n            base_mip_level: 0,\n            level_count: 1,\n            base_array_layer: 0,\n            layer_count: 1,\n        })\n        .build();\n\n    unsafe {\n        device.cmd_pipeline_barrier(\n            command_buffer,\n            src_stage_mask,\n            dst_stage_mask,\n            vk::DependencyFlags::empty(),\n            &[],\n            &[],\n            &[barrier],\n        )\n    };\n}\n\nfn load_shader(device: &ash::Device, bytes: &[u8]) -> anyhow::Result<vk::ShaderModule> {\n    let code = ash::util::read_spv(&mut std::io::Cursor::new(bytes))?;\n    let create_info = vk::ShaderModuleCreateInfo::builder().code(&code);\n\n    let shader = unsafe { device.create_shader_module(&create_info, None)? };\n\n    Ok(shader)\n}\n\nfn format_is_srgb(format: vk::Format) -> bool {\n    matches!(\n        format,\n        vk::Format::R8_SRGB\n            | vk::Format::R8G8_SRGB\n            | vk::Format::R8G8B8_SRGB\n            | vk::Format::B8G8R8_SRGB\n            | vk::Format::R8G8B8A8_SRGB\n            | vk::Format::B8G8R8A8_SRGB\n            | vk::Format::A8B8G8R8_SRGB_PACK32\n            | vk::Format::BC1_RGB_SRGB_BLOCK\n            | vk::Format::BC1_RGBA_SRGB_BLOCK\n            | vk::Format::BC2_SRGB_BLOCK\n            | vk::Format::BC3_SRGB_BLOCK\n            | vk::Format::BC7_SRGB_BLOCK\n            | vk::Format::ETC2_R8G8B8_SRGB_BLOCK\n            | vk::Format::ETC2_R8G8B8A1_SRGB_BLOCK\n            | vk::Format::ETC2_R8G8B8A8_SRGB_BLOCK\n            | vk::Format::ASTC_4X4_SRGB_BLOCK\n            | vk::Format::ASTC_5X4_SRGB_BLOCK\n            | vk::Format::ASTC_5X5_SRGB_BLOCK\n            | vk::Format::ASTC_6X5_SRGB_BLOCK\n            | vk::Format::ASTC_6X6_SRGB_BLOCK\n            | vk::Format::ASTC_8X5_SRGB_BLOCK\n            | vk::Format::ASTC_8X6_SRGB_BLOCK\n            | vk::Format::ASTC_8X8_SRGB_BLOCK\n            | vk::Format::ASTC_10X5_SRGB_BLOCK\n            | vk::Format::ASTC_10X6_SRGB_BLOCK\n            | vk::Format::ASTC_10X8_SRGB_BLOCK\n            | vk::Format::ASTC_10X10_SRGB_BLOCK\n            | vk::Format::ASTC_12X10_SRGB_BLOCK\n            | vk::Format::ASTC_12X12_SRGB_BLOCK\n    )\n}\n\nfn colorspace_supported(colorspace: vk::ColorSpaceKHR) -> bool {\n    matches!(\n        colorspace,\n        vk::ColorSpaceKHR::SRGB_NONLINEAR\n            | vk::ColorSpaceKHR::EXTENDED_SRGB_LINEAR_EXT\n            | vk::ColorSpaceKHR::DISPLAY_P3_LINEAR_EXT\n            | vk::ColorSpaceKHR::DISPLAY_P3_NONLINEAR_EXT\n            | vk::ColorSpaceKHR::DCI_P3_NONLINEAR_EXT\n            | vk::ColorSpaceKHR::BT709_LINEAR_EXT\n            | vk::ColorSpaceKHR::BT709_NONLINEAR_EXT\n            | vk::ColorSpaceKHR::HDR10_ST2084_EXT\n    )\n}\n\nunsafe extern \"system\" fn vulkan_debug_utils_callback(\n    _message_severity: vk::DebugUtilsMessageSeverityFlagsEXT,\n    message_type: vk::DebugUtilsMessageTypeFlagsEXT,\n    p_callback_data: *const vk::DebugUtilsMessengerCallbackDataEXT,\n    _userdata: *mut c_void,\n) -> vk::Bool32 {\n    let _ = std::panic::catch_unwind(|| {\n        let message = unsafe { CStr::from_ptr((*p_callback_data).p_message) }.to_string_lossy();\n        let ty = format!(\"{:?}\", message_type).to_lowercase();\n\n        eprintln!(\"VULKAN[{}]: {}\", ty, message);\n    });\n\n    // Must always return false.\n    vk::FALSE\n}\n"
  },
  {
    "path": "test-apps/bin/cursorlock.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\n// Adapted from:\n// https://bevyengine.org/examples/camera/first-person-view-model/\n\nuse bevy::color::palettes::tailwind;\nuse bevy::input::mouse::MouseMotion;\nuse bevy::pbr::NotShadowCaster;\nuse bevy::prelude::*;\nuse bevy::render::view::RenderLayers;\nuse bevy::window::{CursorGrabMode, PrimaryWindow};\n\nfn main() {\n    App::new()\n        .add_plugins(DefaultPlugins)\n        .add_systems(Startup, (spawn_view_model, spawn_world_model, spawn_lights))\n        .add_systems(Update, (move_player, toggle_cursor_lock, update_cursor))\n        .insert_resource(CursorLocked(true))\n        .run();\n}\n\n#[derive(Debug, Component)]\nstruct Player;\n\n#[derive(Debug, Component, Deref, DerefMut)]\nstruct CameraSensitivity(Vec2);\n\nimpl Default for CameraSensitivity {\n    fn default() -> Self {\n        Self(\n            // These factors are just arbitrary mouse sensitivity values.\n            // It's often nicer to have a faster horizontal sensitivity than vertical.\n            // We use a component for them so that we can make them user-configurable at runtime\n            // for accessibility reasons.\n            // It also allows you to inspect them in an editor if you `Reflect` the component.\n            Vec2::new(0.003, 0.002),\n        )\n    }\n}\n\n#[derive(Debug, Component)]\nstruct WorldModelCamera;\n\n#[derive(Debug, Resource)]\nstruct CursorLocked(bool);\n\n/// Used implicitly by all entities without a `RenderLayers` component.\n/// Our world model camera and all objects other than the player are on this\n/// layer. The light source belongs to both layers.\nconst DEFAULT_RENDER_LAYER: usize = 0;\n\n/// Used by the view model camera and the player's arm.\n/// The light source belongs to both layers.\nconst VIEW_MODEL_RENDER_LAYER: usize = 1;\n\nfn spawn_view_model(\n    mut commands: Commands,\n    mut meshes: ResMut<Assets<Mesh>>,\n    mut materials: ResMut<Assets<StandardMaterial>>,\n) {\n    let arm = meshes.add(Cuboid::new(0.1, 0.1, 0.5));\n    let arm_material = materials.add(Color::from(tailwind::TEAL_200));\n\n    commands\n        .spawn((\n            Player,\n            CameraSensitivity::default(),\n            Transform::from_xyz(0.0, 1.0, 0.0),\n            Visibility::default(),\n        ))\n        .with_children(|parent| {\n            parent.spawn((\n                WorldModelCamera,\n                Camera3d::default(),\n                Projection::from(PerspectiveProjection {\n                    fov: 90.0_f32.to_radians(),\n                    ..default()\n                }),\n            ));\n\n            // Spawn view model camera.\n            parent.spawn((\n                Camera3d::default(),\n                Camera {\n                    // Bump the order to render on top of the world model.\n                    order: 1,\n                    ..default()\n                },\n                Projection::from(PerspectiveProjection {\n                    fov: 70.0_f32.to_radians(),\n                    ..default()\n                }),\n                // Only render objects belonging to the view model.\n                RenderLayers::layer(VIEW_MODEL_RENDER_LAYER),\n            ));\n\n            // Spawn the player's right arm.\n            parent.spawn((\n                Mesh3d(arm),\n                MeshMaterial3d(arm_material),\n                Transform::from_xyz(0.2, -0.1, -0.25),\n                // Ensure the arm is only rendered by the view model camera.\n                RenderLayers::layer(VIEW_MODEL_RENDER_LAYER),\n                // The arm is free-floating, so shadows would look weird.\n                NotShadowCaster,\n            ));\n        });\n}\n\nfn spawn_world_model(\n    mut commands: Commands,\n    mut meshes: ResMut<Assets<Mesh>>,\n    mut materials: ResMut<Assets<StandardMaterial>>,\n) {\n    let floor = meshes.add(Plane3d::new(Vec3::Y, Vec2::splat(10.0)));\n    let cube = meshes.add(Cuboid::new(2.0, 0.5, 1.0));\n    let material = materials.add(Color::WHITE);\n\n    // The world model camera will render the floor and the cubes spawned in this\n    // system. Assigning no `RenderLayers` component defaults to layer 0.\n\n    commands.spawn((Mesh3d(floor), MeshMaterial3d(material.clone())));\n\n    commands.spawn((\n        Mesh3d(cube.clone()),\n        MeshMaterial3d(material.clone()),\n        Transform::from_xyz(0.0, 0.25, -3.0),\n    ));\n\n    commands.spawn((\n        Mesh3d(cube),\n        MeshMaterial3d(material),\n        Transform::from_xyz(0.75, 1.75, 0.0),\n    ));\n}\n\nfn spawn_lights(mut commands: Commands) {\n    commands.spawn((\n        PointLight {\n            color: Color::from(tailwind::ROSE_300),\n            shadows_enabled: true,\n            ..default()\n        },\n        Transform::from_xyz(-2.0, 4.0, -0.75),\n        // The light source illuminates both the world model and the view model.\n        RenderLayers::from_layers(&[DEFAULT_RENDER_LAYER, VIEW_MODEL_RENDER_LAYER]),\n    ));\n}\n\nfn move_player(\n    mut mouse_motion: EventReader<MouseMotion>,\n    mut player: Query<&mut Transform, With<Player>>,\n    cursor_locked: Res<CursorLocked>,\n) {\n    if !cursor_locked.0 {\n        return;\n    }\n\n    let mut transform = player.single_mut();\n    for motion in mouse_motion.read() {\n        let yaw = -motion.delta.x * 0.003;\n        let pitch = -motion.delta.y * 0.002;\n        // Order of rotations is important, see <https://gamedev.stackexchange.com/a/136175/103059>\n        transform.rotate_y(yaw);\n        transform.rotate_local_x(pitch);\n    }\n}\n\nfn toggle_cursor_lock(input: Res<ButtonInput<KeyCode>>, mut cursor_locked: ResMut<CursorLocked>) {\n    if input.just_pressed(KeyCode::Escape) {\n        cursor_locked.0 = !cursor_locked.0\n    }\n}\n\nfn update_cursor(\n    cursor_locked: Res<CursorLocked>,\n    mut q_windows: Query<&mut Window, With<PrimaryWindow>>,\n) {\n    if !cursor_locked.is_changed() {\n        return;\n    }\n\n    let mut primary_window = q_windows.single_mut();\n    if cursor_locked.0 {\n        primary_window.cursor_options.grab_mode = CursorGrabMode::Locked;\n        primary_window.cursor_options.visible = false;\n    } else {\n        primary_window.cursor_options.grab_mode = CursorGrabMode::None;\n        primary_window.cursor_options.visible = true;\n    }\n}\n"
  },
  {
    "path": "test-apps/bin/latency.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse bevy::{\n    prelude::*,\n    window::{PresentMode, PrimaryWindow, WindowResolution},\n};\nuse clap::Parser;\n\nconst BLOCK_SIZE: f32 = 32.0;\nconst STARTING_POS: Vec3 = Vec3::new(-BLOCK_SIZE / 2.0, BLOCK_SIZE / 2.0, 0.0);\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Resource)]\nenum InputMode {\n    Keyboard,\n    Mouse,\n    Gamepad,\n}\n\n#[derive(Debug, Parser)]\n#[command(name = \"latency-test\")]\n#[command(about = \"The Magic Mirror latency test app\", long_about = None)]\nstruct Cli {\n    /// Mouse mode.\n    #[arg(long)]\n    mouse: bool,\n    #[arg(long)]\n    gamepad: bool,\n}\n\n#[derive(Component)]\nstruct Box(i8);\n\nfn main() {\n    let args = Cli::parse();\n\n    let input_mode = match (args.mouse, args.gamepad) {\n        (true, true) => {\n            eprintln!(\"at most one of --mouse and --gamepad must be specified\");\n            std::process::exit(1);\n        }\n        (true, false) => InputMode::Mouse,\n        (false, true) => InputMode::Gamepad,\n        _ => InputMode::Keyboard,\n    };\n\n    App::new()\n        .add_plugins(DefaultPlugins.set(WindowPlugin {\n            primary_window: Some(Window {\n                title: \"Latency Test\".to_string(),\n                resolution: WindowResolution::new(BLOCK_SIZE * 8.0, BLOCK_SIZE * 8.0),\n                present_mode: PresentMode::Mailbox,\n                ..Default::default()\n            }),\n            ..Default::default()\n        }))\n        .insert_resource(ClearColor(Color::BLACK))\n        .insert_resource(input_mode)\n        .add_systems(Startup, setup)\n        .add_systems(Update, move_box)\n        .run();\n}\n\nfn setup(mut commands: Commands, input_mode: Res<InputMode>) {\n    let starting_pos = if *input_mode == InputMode::Mouse || *input_mode == InputMode::Gamepad {\n        STARTING_POS\n    } else {\n        // Offscreen.\n        Vec3::new(BLOCK_SIZE * -100.0, BLOCK_SIZE * 100.0, 0.0)\n    };\n\n    commands.spawn(Camera2d::default());\n    commands.spawn((\n        Sprite {\n            color: Color::WHITE,\n            custom_size: Some(Vec2::new(BLOCK_SIZE, BLOCK_SIZE)),\n            anchor: bevy::sprite::Anchor::TopLeft,\n            ..default()\n        },\n        Transform::from_translation(starting_pos),\n        Box(-1),\n    ));\n}\n\nfn move_box(\n    keyboard_input: Res<ButtonInput<KeyCode>>,\n    input_mode: Res<InputMode>,\n    gamepads: Query<&Gamepad>,\n    q_windows: Query<&Window, With<PrimaryWindow>>,\n    q_camera: Query<(&Camera, &GlobalTransform)>,\n    mut q_box: Query<(&mut Box, &mut Transform)>,\n) {\n    let (mut b, mut transform) = q_box.single_mut();\n    let window = q_windows.single();\n    let (camera, camera_transform) = q_camera.single();\n\n    match *input_mode {\n        InputMode::Gamepad => {\n            for gamepad in &gamepads {\n                if gamepad.just_pressed(GamepadButton::South) {\n                    transform.translation = STARTING_POS;\n                }\n\n                let rx = gamepad.get(GamepadAxis::RightStickX).unwrap();\n                let ry = gamepad.get(GamepadAxis::RightStickY).unwrap();\n                transform.translation += Vec3::new(rx, ry, 0.0);\n            }\n        }\n        InputMode::Mouse => {\n            if let Some(position) = window\n                .cursor_position()\n                .and_then(|cursor| camera.viewport_to_world(camera_transform, cursor).ok())\n                .map(|ray| ray.origin.truncate())\n            {\n                transform.translation.x = position.x - BLOCK_SIZE / 2.0;\n                transform.translation.y = position.y + BLOCK_SIZE / 2.0;\n            }\n        }\n        InputMode::Keyboard => {\n            if keyboard_input.just_pressed(KeyCode::Space) {\n                b.0 = (b.0 + 1) % 64;\n                let y = b.0 / 8;\n                let x = b.0 % 8;\n\n                transform.translation.x = BLOCK_SIZE * (-4.0 + x as f32);\n                transform.translation.y = BLOCK_SIZE * (4.0 - y as f32);\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "test-apps/build.rs",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nuse std::path::PathBuf;\n\nextern crate slang;\n\nfn main() {\n    let mut session = slang::GlobalSession::new();\n    let out_dir = std::env::var(\"OUT_DIR\").map(PathBuf::from).unwrap();\n\n    compile_shader(\n        &mut session,\n        \"src/color-test.slang\",\n        out_dir.join(\"color-test/frag.spv\").to_str().unwrap(),\n        \"frag\",\n        slang::Stage::Fragment,\n    );\n\n    compile_shader(\n        &mut session,\n        \"src/color-test.slang\",\n        out_dir.join(\"color-test/vert.spv\").to_str().unwrap(),\n        \"vert\",\n        slang::Stage::Vertex,\n    );\n}\n\nfn compile_shader(\n    session: &mut slang::GlobalSession,\n    in_path: &str,\n    out_path: &str,\n    entry_point: &str,\n    stage: slang::Stage,\n) {\n    std::fs::create_dir_all(PathBuf::from(out_path).parent().unwrap())\n        .expect(\"failed to create output directory\");\n\n    let mut compile_request = session.create_compile_request();\n\n    compile_request\n        .set_codegen_target(slang::CompileTarget::Spirv)\n        .set_optimization_level(slang::OptimizationLevel::Maximal)\n        .set_target_profile(session.find_profile(\"glsl_460\"));\n\n    let entry_point = compile_request\n        .add_translation_unit(slang::SourceLanguage::Slang, None)\n        .add_source_file(in_path)\n        .add_entry_point(entry_point, stage);\n\n    let shader_bytecode = compile_request\n        .compile()\n        .expect(\"Shader compilation failed.\");\n\n    std::fs::write(out_path, shader_bytecode.get_entry_point_code(entry_point))\n        .expect(\"failed to write shader bytecode to file\");\n\n    println!(\"cargo::rerun-if-changed={}\", in_path);\n}\n"
  },
  {
    "path": "test-apps/src/color-test.slang",
    "content": "// Copyright 2024 Colin Marc <hi@colinmarc.com>\n//\n// SPDX-License-Identifier: MIT\n\nstruct PushConstants\n{\n    float2 size;\n    float2 mouse;\n    float mul;\n    int color_space;\n};\n\nstatic const int VK_COLOR_SPACE_SRGB_NONLINEAR_EXT = 0;\nstatic const int VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1_000_104_001;\nstatic const int VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1_000_104_002;\nstatic const int VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT = 1_000_104_003;\nstatic const int VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1_000_104_004;\nstatic const int VK_COLOR_SPACE_BT709_LINEAR_EXT = 1_000_104_005;\nstatic const int VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1_000_104_006;\nstatic const int VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1_000_104_007;\nstatic const int VK_COLOR_SPACE_HDR10_ST2084_EXT = 1_000_104_008;\nstatic const int VK_COLOR_SPACE_HDR10_HLG_EXT = 1_000_104_010;\nstatic const int VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1_000_104_014;\n\n[[vk::push_constant]]\nPushConstants pc;\n\nstruct VertOutput\n{\n    float2 uv : TextureCoord;\n    float2 mouse : MouseCoord;\n    float4 position : SV_Position;\n};\n\n[shader(\"vertex\")]\nVertOutput vert(uint vertexID: SV_VertexID)\n{\n    VertOutput output;\n\n    float2 aspect;\n    if (pc.size.x > pc.size.y)\n    {\n        aspect = float2(pc.size.x / pc.size.y, 1.0);\n    }\n    else\n    {\n        aspect = float2(1.0, pc.size.y / pc.size.x);\n    }\n\n    let uv = float2((vertexID << 1) & 2, vertexID & 2);\n    output.uv = uv;\n    output.mouse = pc.mouse * aspect;\n    output.position = float4((uv * 2.0 - 1.0) / aspect, 0.0, 1.0);\n    return output;\n}\n\n// Adapted from \"Color Wheel\", by GoldenCrystal:\n// https://www.shadertoy.com/view/MsXXzX\nstatic const float M_PI = 3.14159265358979323846;\nstatic const float AA = 250;\n\nfloat3 hue(float2 pos)\n{\n    float theta = 3.0 + 3.0 * atan2(pos.x, pos.y) / M_PI;\n    float3 color = float3(0.0);\n    return clamp(abs(((theta + float3(0.0, 4.0, 2.0)) % 6.0) - 3.0) - 1.0, 0.0, 1.0);\n}\n\nfloat4 color_wheel(float2 coord, float2 mouse)\n{\n    float l = length(coord);\n    float m = length(mouse);\n\n    float4 color = float4(0.0);\n\n    if (l >= 0.75 && l <= 1.0)\n    {\n        l = 1.0 - abs((l - 0.875) * 8.0);\n        l = clamp(l * AA * 0.0625, 0.0, 1.0); // Antialiasing approximation\n\n        color = float4(l * hue(coord), l);\n    }\n    else if (l < 0.75)\n    {\n        float3 picked;\n\n        if (m < 0.75 || m > 1.0)\n        {\n            mouse = float2(0.0, -1.0);\n            picked = float3(1.0, 0.0, 0.0);\n        }\n        else\n        {\n            picked = hue(mouse);\n        }\n\n        coord = coord / 0.75;\n        mouse = normalize(mouse);\n\n        float sat = 1.5 - (dot(coord, mouse) + 0.5); // [0.0,1.5]\n\n        if (sat < 1.5)\n        {\n            float h = sat / sqrt(3.0);\n            float2 om = float2(cross(float3(mouse, 0.0), float3(0.0, 0.0, 1.0)).xy);\n            float lum = dot(coord, om);\n\n            if (abs(lum) <= h)\n            {\n                let l = clamp((h - abs(lum)) * AA * 0.5, 0.0, 1.0) * clamp((1.5 - sat) / 1.5 * AA * 0.5, 0.0, 1.0); // Fake antialiasing\n                return float4(l * lerp(picked, float3(0.5 * (lum + h) / h), sat / 1.5), l);\n            }\n        }\n    }\n\n    return color;\n}\n\n[shader(\"fragment\")]\nfloat4 frag(\n    float2 uv: TextureCoord,\n    float2 mouse: MouseCoord)\n    : SV_Target\n{\n    float2 uv = uv.xy * 2.0 - 1.0;\n    float2 mouse = mouse * 2.0;\n\n    uv = uv / 0.75;\n    mouse = mouse / 0.75;\n\n    let color =  color_wheel(uv, mouse) * pc.mul;\n    switch (pc.color_space)\n    {\n    case VK_COLOR_SPACE_SRGB_NONLINEAR_EXT:\n        return delinearize_srgb(color);\n    case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT:\n        return delinearize_srgb(color);\n    case VK_COLOR_SPACE_BT709_NONLINEAR_EXT:\n        return delinearize_bt709(color);\n    case VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT:\n        return delinearize_dci_p3(color);\n    case VK_COLOR_SPACE_HDR10_ST2084_EXT:\n        return delinearize_pq(color);\n    default:\n        return color;\n    }\n}\n\nfloat4 delinearize_srgb(float4 color)\n{\n    return float4(\n        delinearize_srgb(color.r),\n        delinearize_srgb(color.g),\n        delinearize_srgb(color.b),\n        color.a);\n}\n\nfloat delinearize_srgb(float channel)\n{\n     return channel > 0.0031308 ? 1.055 * pow(channel, 1.0 / 2.4) - 0.055 : 12.92 * channel;\n}\n\nfloat4 delinearize_bt709(float4 color)\n{\n    return float4(\n        delinearize_bt709(color.r),\n        delinearize_bt709(color.g),\n        delinearize_bt709(color.b),\n        color.a);\n}\n\nfloat delinearize_bt709(float channel)\n{\n    return channel >= 0.018 ? 1.099 * pow(channel, 1.0 / 2.2) - 0.099 : 4.5 * channel;\n}\n\nfloat4 delinearize_dci_p3(float4 color)\n{\n    return float4(\n        delinearize_dci_p3(color.r),\n        delinearize_dci_p3(color.g),\n        delinearize_dci_p3(color.b),\n        color.a);\n}\n\nfloat delinearize_dci_p3(float channel)\n{\n    return pow(channel, 1.0/2.6);\n}\n\nstatic const float PQ_M1 = 0.1593017578125;\nstatic const float PQ_M2 = 78.84375;\nstatic const float PQ_C1 = 0.8359375;\nstatic const float PQ_C2 = 18.8515625;\nstatic const float PQ_C3 = 18.6875;\n\nstatic const float REFERENCE_WHITE = 203.0;\n\nfloat4 delinearize_pq(float4 color)\n{\n    return float4(\n        delinearize_pq(color.r),\n        delinearize_pq(color.g),\n        delinearize_pq(color.b),\n        color.a);\n}\n\nfloat delinearize_pq(float channel)\n{\n        let c = pow(channel * REFERENCE_WHITE / 10000.0, PQ_M1);\n    return pow(\n        (PQ_C1 + PQ_C2 * c) / (1.0 + PQ_C3 * c),\n        PQ_M2);\n}\n"
  }
]