Full Code of google/audion for AI

main 11805e3151d3 cached
101 files
271.5 KB
71.7k tokens
335 symbols
1 requests
Download .txt
Showing preview only (296K chars total). Download the full file or copy to clipboard to get everything.
Repository: google/audion
Branch: main
Commit: 11805e3151d3
Files: 101
Total size: 271.5 KB

Directory structure:
gitextract_0gd149fy/

├── .babelrc
├── .editorconfig
├── .eslintrc.json
├── .github/
│   └── workflows/
│       └── nodejs-ci.yml
├── .gitignore
├── .husky/
│   ├── .gitignore
│   └── pre-commit
├── .jsdoc.json
├── .prettierrc
├── LICENSE
├── README.md
├── fixtures/
│   └── oscillatorGainParam.ts
├── package.json
├── simulations/
│   ├── updateGraphRender.html
│   ├── updateGraphRender.ts
│   └── webpack.config.js
├── src/
│   ├── .jest.config.json
│   ├── build/
│   │   ├── make-chrome-extension.js
│   │   └── manifest.json.mustache
│   ├── chrome/
│   │   ├── API.js
│   │   ├── Debugger.js
│   │   ├── DebuggerPageDomain.ts
│   │   ├── DebuggerWebAudioDomain.ts
│   │   ├── DevTools.js
│   │   ├── Runtime.js
│   │   ├── Types.js
│   │   └── index.js
│   ├── custom.d.ts
│   ├── devtools/
│   │   ├── DebuggerAttachEventController.ts
│   │   ├── DebuggerEvents.ts
│   │   ├── DevtoolsGraphPanel.test.js
│   │   ├── DevtoolsGraphPanel.ts
│   │   ├── Types.ts
│   │   ├── WebAudioEventObserver.test.js
│   │   ├── WebAudioEventObserver.ts
│   │   ├── WebAudioGraphIntegrator.test.js
│   │   ├── WebAudioGraphIntegrator.ts
│   │   ├── WebAudioRealtimeData.ts
│   │   ├── deserializeGraphContext.ts
│   │   ├── layoutGraphContext.ts
│   │   ├── main.ts
│   │   ├── partitionMap.ts
│   │   ├── serializeGraphContext.js
│   │   └── setOptionsToGraphContext.ts
│   ├── devtools.html
│   ├── extraSettingPage/
│   │   ├── options.html
│   │   └── options.js
│   ├── panel/
│   │   ├── GraphSelector.ts
│   │   ├── Observer.runtime.ts
│   │   ├── Types.ts
│   │   ├── components/
│   │   │   ├── WholeGraphButton.css
│   │   │   ├── WholeGraphButton.ts
│   │   │   ├── collectGarbage.css
│   │   │   ├── collectGarbage.ts
│   │   │   ├── detailPanel.css
│   │   │   ├── detailPanel.ts
│   │   │   ├── domUtils.ts
│   │   │   ├── realtimeSummary.ts
│   │   │   ├── selectGraph.css
│   │   │   └── selectGraph.ts
│   │   ├── graph/
│   │   │   ├── AudioEdgeArrowGraphics.ts
│   │   │   ├── AudioEdgeCurvedLineGraphics.ts
│   │   │   ├── AudioEdgeRender.ts
│   │   │   ├── AudioGraphRender.ts
│   │   │   ├── AudioGraphText.ts
│   │   │   ├── AudioGraphTextCacheGroup.ts
│   │   │   ├── AudioNodeBackground.ts
│   │   │   ├── AudioNodeBackgroundRenderCacheGroup.ts
│   │   │   ├── AudioNodePort.ts
│   │   │   ├── AudioNodeRender.ts
│   │   │   ├── AudioPortCacheGroup.ts
│   │   │   ├── Camera.js
│   │   │   ├── GraphicsCache.ts
│   │   │   ├── graphStyle.js
│   │   │   └── graphStyle.ts
│   │   ├── main.ts
│   │   ├── updateGraphRender.ts
│   │   ├── updateGraphSizes.ts
│   │   └── worker.ts
│   ├── panel.html
│   ├── utils/
│   │   ├── Observer.emitter.js
│   │   ├── Observer.test.js
│   │   ├── Observer.ts
│   │   ├── Types.ts
│   │   ├── dlog.js
│   │   ├── error.js
│   │   ├── error.test.js
│   │   ├── index.js
│   │   ├── mapThruWorker.ts
│   │   ├── math.js
│   │   ├── retry.js
│   │   ├── retry.test.js
│   │   ├── rxChrome.ts
│   │   └── rxInterop.ts
│   └── webpack.config.js
├── test/
│   ├── .jest-puppeteer.config.json
│   ├── .jest.config.json
│   ├── README.md
│   ├── browserLaunch.js
│   └── updateGraphRender.js
└── tsconfig.json

================================================
FILE CONTENTS
================================================

================================================
FILE: .babelrc
================================================
{
  "plugins": [[
    "@babel/plugin-transform-modules-commonjs"
  ], [
    "@babel/plugin-proposal-optional-chaining"
  ]],
  "presets": ["@babel/preset-typescript"]
}


================================================
FILE: .editorconfig
================================================
# EditorConfig is awesome: https://EditorConfig.org

# top-most EditorConfig file
root = true

[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = false


================================================
FILE: .eslintrc.json
================================================
{
  "env": {
    "browser": true,
    "es2021": true,
    "node": true
  },
  "extends": ["eslint:recommended", "google"],
  "parserOptions": {
    "ecmaVersion": 12,
    "sourceType": "module"
  },
  "rules": {
    // Indent files with prettier
    "indent": ["off"],
    // Allow triple slash comments
    "spaced-comment": ["error", "always", {"markers": ["/"]}],
    "operator-linebreak": ["off"]
  }
}


================================================
FILE: .github/workflows/nodejs-ci.yml
================================================
name: Node.js CI

on: [push, pull_request]

jobs:
  build:
    runs-on: ubuntu-latest

    steps:
      - uses: actions/checkout@v3
      - name: Use Node.js "18.x"
        uses: actions/setup-node@v3
        with:
          node-version: '18.x'
      - run: npm ci
      - name: Run npm test with xvfb
        uses: coactions/setup-xvfb@v1
        with:
          run: npm test


================================================
FILE: .gitignore
================================================
.DS_Store
# dependencies
node_modules
# build/test
.eslintcache
docs
coverage
/build
/simulations/build
!src/build


================================================
FILE: .husky/.gitignore
================================================
_


================================================
FILE: .husky/pre-commit
================================================
#!/bin/sh
. "$(dirname "$0")/_/husky.sh"

npx lint-staged


================================================
FILE: .jsdoc.json
================================================
{
  "source": {
    "include": ["./src/"],
    "includePattern": ".+\\.js(doc)?$",
    "excludePattern": "(^|\\/|\\\\)_|\\.test\\.js$"
  },
  "opts": {
    "encoding": "utf8",
    "recurse": true,
    "private": false,
    "lenient": true,
    "destination": "./docs",
    "template": "./node_modules/@pixi/jsdoc-template",
    "readme": "README.md"
  },
  "plugins": ["plugins/markdown"]
}


================================================
FILE: .prettierrc
================================================
{
  "tabWidth": 2,
  "useTabs": false,
  "trailingComma": "all",
  "singleQuote": true,
  "bracketSpacing": false
}


================================================
FILE: LICENSE
================================================

                                 Apache License
                           Version 2.0, January 2004
                        http://www.apache.org/licenses/

   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

   1. Definitions.

      "License" shall mean the terms and conditions for use, reproduction,
      and distribution as defined by Sections 1 through 9 of this document.

      "Licensor" shall mean the copyright owner or entity authorized by
      the copyright owner that is granting the License.

      "Legal Entity" shall mean the union of the acting entity and all
      other entities that control, are controlled by, or are under common
      control with that entity. For the purposes of this definition,
      "control" means (i) the power, direct or indirect, to cause the
      direction or management of such entity, whether by contract or
      otherwise, or (ii) ownership of fifty percent (50%) or more of the
      outstanding shares, or (iii) beneficial ownership of such entity.

      "You" (or "Your") shall mean an individual or Legal Entity
      exercising permissions granted by this License.

      "Source" form shall mean the preferred form for making modifications,
      including but not limited to software source code, documentation
      source, and configuration files.

      "Object" form shall mean any form resulting from mechanical
      transformation or translation of a Source form, including but
      not limited to compiled object code, generated documentation,
      and conversions to other media types.

      "Work" shall mean the work of authorship, whether in Source or
      Object form, made available under the License, as indicated by a
      copyright notice that is included in or attached to the work
      (an example is provided in the Appendix below).

      "Derivative Works" shall mean any work, whether in Source or Object
      form, that is based on (or derived from) the Work and for which the
      editorial revisions, annotations, elaborations, or other modifications
      represent, as a whole, an original work of authorship. For the purposes
      of this License, Derivative Works shall not include works that remain
      separable from, or merely link (or bind by name) to the interfaces of,
      the Work and Derivative Works thereof.

      "Contribution" shall mean any work of authorship, including
      the original version of the Work and any modifications or additions
      to that Work or Derivative Works thereof, that is intentionally
      submitted to Licensor for inclusion in the Work by the copyright owner
      or by an individual or Legal Entity authorized to submit on behalf of
      the copyright owner. For the purposes of this definition, "submitted"
      means any form of electronic, verbal, or written communication sent
      to the Licensor or its representatives, including but not limited to
      communication on electronic mailing lists, source code control systems,
      and issue tracking systems that are managed by, or on behalf of, the
      Licensor for the purpose of discussing and improving the Work, but
      excluding communication that is conspicuously marked or otherwise
      designated in writing by the copyright owner as "Not a Contribution."

      "Contributor" shall mean Licensor and any individual or Legal Entity
      on behalf of whom a Contribution has been received by Licensor and
      subsequently incorporated within the Work.

   2. Grant of Copyright License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      copyright license to reproduce, prepare Derivative Works of,
      publicly display, publicly perform, sublicense, and distribute the
      Work and such Derivative Works in Source or Object form.

   3. Grant of Patent License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      (except as stated in this section) patent license to make, have made,
      use, offer to sell, sell, import, and otherwise transfer the Work,
      where such license applies only to those patent claims licensable
      by such Contributor that are necessarily infringed by their
      Contribution(s) alone or by combination of their Contribution(s)
      with the Work to which such Contribution(s) was submitted. If You
      institute patent litigation against any entity (including a
      cross-claim or counterclaim in a lawsuit) alleging that the Work
      or a Contribution incorporated within the Work constitutes direct
      or contributory patent infringement, then any patent licenses
      granted to You under this License for that Work shall terminate
      as of the date such litigation is filed.

   4. Redistribution. You may reproduce and distribute copies of the
      Work or Derivative Works thereof in any medium, with or without
      modifications, and in Source or Object form, provided that You
      meet the following conditions:

      (a) You must give any other recipients of the Work or
          Derivative Works a copy of this License; and

      (b) You must cause any modified files to carry prominent notices
          stating that You changed the files; and

      (c) You must retain, in the Source form of any Derivative Works
          that You distribute, all copyright, patent, trademark, and
          attribution notices from the Source form of the Work,
          excluding those notices that do not pertain to any part of
          the Derivative Works; and

      (d) If the Work includes a "NOTICE" text file as part of its
          distribution, then any Derivative Works that You distribute must
          include a readable copy of the attribution notices contained
          within such NOTICE file, excluding those notices that do not
          pertain to any part of the Derivative Works, in at least one
          of the following places: within a NOTICE text file distributed
          as part of the Derivative Works; within the Source form or
          documentation, if provided along with the Derivative Works; or,
          within a display generated by the Derivative Works, if and
          wherever such third-party notices normally appear. The contents
          of the NOTICE file are for informational purposes only and
          do not modify the License. You may add Your own attribution
          notices within Derivative Works that You distribute, alongside
          or as an addendum to the NOTICE text from the Work, provided
          that such additional attribution notices cannot be construed
          as modifying the License.

      You may add Your own copyright statement to Your modifications and
      may provide additional or different license terms and conditions
      for use, reproduction, or distribution of Your modifications, or
      for any such Derivative Works as a whole, provided Your use,
      reproduction, and distribution of the Work otherwise complies with
      the conditions stated in this License.

   5. Submission of Contributions. Unless You explicitly state otherwise,
      any Contribution intentionally submitted for inclusion in the Work
      by You to the Licensor shall be under the terms and conditions of
      this License, without any additional terms or conditions.
      Notwithstanding the above, nothing herein shall supersede or modify
      the terms of any separate license agreement you may have executed
      with Licensor regarding such Contributions.

   6. Trademarks. This License does not grant permission to use the trade
      names, trademarks, service marks, or product names of the Licensor,
      except as required for reasonable and customary use in describing the
      origin of the Work and reproducing the content of the NOTICE file.

   7. Disclaimer of Warranty. Unless required by applicable law or
      agreed to in writing, Licensor provides the Work (and each
      Contributor provides its Contributions) on an "AS IS" BASIS,
      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      implied, including, without limitation, any warranties or conditions
      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      PARTICULAR PURPOSE. You are solely responsible for determining the
      appropriateness of using or redistributing the Work and assume any
      risks associated with Your exercise of permissions under this License.

   8. Limitation of Liability. In no event and under no legal theory,
      whether in tort (including negligence), contract, or otherwise,
      unless required by applicable law (such as deliberate and grossly
      negligent acts) or agreed to in writing, shall any Contributor be
      liable to You for damages, including any direct, indirect, special,
      incidental, or consequential damages of any character arising as a
      result of this License or out of the use or inability to use the
      Work (including but not limited to damages for loss of goodwill,
      work stoppage, computer failure or malfunction, or any and all
      other commercial damages or losses), even if such Contributor
      has been advised of the possibility of such damages.

   9. Accepting Warranty or Additional Liability. While redistributing
      the Work or Derivative Works thereof, You may choose to offer,
      and charge a fee for, acceptance of support, warranty, indemnity,
      or other liability obligations and/or rights consistent with this
      License. However, in accepting such obligations, You may act only
      on Your own behalf and on Your sole responsibility, not on behalf
      of any other Contributor, and only if You agree to indemnify,
      defend, and hold each Contributor harmless for any liability
      incurred by, or claims asserted against, such Contributor by reason
      of your accepting any such warranty or additional liability.

   END OF TERMS AND CONDITIONS

   APPENDIX: How to apply the Apache License to your work.

      To apply the Apache License to your work, attach the following
      boilerplate notice, with the fields enclosed by brackets "[]"
      replaced with your own identifying information. (Don't include
      the brackets!)  The text should be enclosed in the appropriate
      comment syntax for the file format. We also recommend that a
      file or class name and description of purpose be included on the
      same "printed page" as the copyright notice for easier
      identification within third-party archives.

   Copyright 2021 Google, Inc.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.


================================================
FILE: README.md
================================================
# Audion: Web Audio Graph Visualizer

[![Node.js CI](https://github.com/GoogleChrome/audion/actions/workflows/nodejs-ci.yml/badge.svg)](https://github.com/GoogleChrome/audion/actions/workflows/nodejs-ci.yml)

Audion is a Chrome extension that adds a panel to DevTools. This panel
visualizes the audio graph (programmed with Web Audio API) in real-time.
Soon you will be able to install the extension from Chrome Web Store page.

![Google Doodle Hiphop](https://raw.githubusercontent.com/GoogleChrome/audion/main/images/hiphop-doodle.png)

## Usage

1. [Install the extension](https://chrome.google.com/webstore/detail/audion/cmhomipkklckpomafalojobppmmidlgl)
   from Chrome Web Store.
   1. Alternatively, you can clone this repository and build the extension
      locally. Follow
      [this instruction](https://developer.chrome.com/docs/extensions/mv3/faq/#faq-dev-01)
      to load the local build.
1. [Open Chrome Developer Tools](https://developer.chrome.com/docs/devtools/open/).
   You should be able to find “Web Audio” panel in the top. Select the panel.
1. Visit or reload a page that uses Web Audio API. If the page is loaded before
   opening Developer Tools, you need to reload the page for the extension to
   work correctly.
1. You can pan and zoom with the mouse and wheel. Click the “autofit” button to
   fit the graph within the panel.

## Development

### Build and test the extension

1. Install NodeJS 14 or later.
1. Install dependencies with `npm ci` or `npm install`.
1. Run `npm test` to build and test the extension.

#### Install the development copy of the extension

1. Open `chrome://extensions` in Chrome.
1. Turn on `Developer mode` if it is not already active.
1. Load an unpacked extension with the `Load unpacked` button. In the file
   modal that opens, select the `audion` directory inside of the `build`
   directory under the copy of this repository.

#### Use and make changes to the extension

1. Open the added `Web Audio` panel in an inspector window with a page that
   uses Web Audio API.
1. Make changes to the extension and rebuild with `npm test` or `npm run build`.
1. Open `chrome://extensions`, click `Update` to reload the rebuilt extension.
   Close and reopen any tab and inspector to get the rebuilt extension's panel.

### Use extra debugging information

1. Open the extension option panel and check "Click here to show more debug
   info".
2. Right click the visualizer panel and click "Inspect" to the extension's
   DevTools panel, and see the console for the extra debugging information.

## Acknowledgments

Special thanks to [Chi Zeng](https://github.com/chihuahua) (Google),
[Gaoping Huang](https://github.com/gaopinghuang0),
[Michael "Z" Goddard](https://github.com/mzgoddard)
([Bocoup](https://bocoup.com/)) and
[Tenghui Zhang](https://github.com/TenghuiZhang) for their contribution on this
project.

## Contribution

If you have found an error in this library, please file an issue at:
https://github.com/GoogleChrome/audion/issues.

Patches are encouraged, and may be submitted by forking this project and
submitting a pull request through GitHub. See CONTRIBUTING for more detail.

## License

Copyright 2021 Google Inc. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of the
License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.


================================================
FILE: fixtures/oscillatorGainParam.ts
================================================
/**
 * Event sequences that would be produced by an audio context with oscillator
 * and gain nodes connecting outputs to params.
 *
 * @file
 */

import {WebAudioDebuggerEvent} from '../src/chrome/DebuggerWebAudioDomain';
import {Audion} from '../src/devtools/Types';

/**
 * A sequence of events produced by WebAudioEventObservable from a context
 * connect some oscillator and gain nodes, especially connecting an output to
 * another gain node's gain param.
 *
 * @example
 *   // unit and integration tests can replace
 *   new WebAudioEventObservable()
 *   // with something like
 *   from(OSCILLATOR_GAIN_PARAM_EVENTS)
 *   // or something over time such as
 *   interval(50).pipe(map((_, i) =>
 *     OSCILLATOR_GAIN_PARAM_EVENTS[i]))
 *
 * @example
 *   // context that creates this sequence from
 *   // WebAudioEventObservable
 *   const audioContext = new AudioContext();
 *   const delayNode = new DelayNode(audioContext,
 *     {delayTime: delayTime});
 *   const inputNode = new GainNode(audioContext);
 *   const outputNode = new GainNode(audioContext);
 *   const depthNode = new GainNode(audioContext,
 *     {gain: width});
 *   const oscillatorNode = new OscillatorNode(audioContext,
 *     {type: "sine", frequency: speed});
 *   inputNode.connect(delayNode);
 *   delayNode.connect(outputNode);
 *   oscillatorNode.connect(depthNode);
 *   depthNode.connect(delayNode.delayTime);
 *
 * @see https://github.com/GoogleChrome/audion/issues/117
 */
export const OSCILLATOR_GAIN_PARAM_EVENTS: Audion.WebAudioEvent[] = [
  {
    method: WebAudioDebuggerEvent.contextCreated,
    params: {
      context: {
        callbackBufferSize: 256,
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        contextState: 'suspended',
        contextType: 'realtime',
        maxOutputChannelCount: 2,
        sampleRate: 48000,
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioNodeCreated,
    params: {
      node: {
        channelCount: 2,
        channelCountMode: 'explicit',
        channelInterpretation: 'speakers',
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        nodeId: '57a4d84b-6165-495e-9ad7-2ad82497d423',
        nodeType: 'AudioDestination',
        numberOfInputs: 1,
        numberOfOutputs: 0,
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioListenerCreated,
    params: {
      listener: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        listenerId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 0,
        maxValue: 3.4028234663852886e38,
        minValue: -3.4028234663852886e38,
        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',
        paramId: '63a77a6c-1779-42df-bedc-c68c5171722f',
        paramType: 'positionX',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 0,
        maxValue: 3.4028234663852886e38,
        minValue: -3.4028234663852886e38,
        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',
        paramId: 'e15f2c0e-f466-4d2a-92a2-c3fe23e591f5',
        paramType: 'positionY',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 0,
        maxValue: 3.4028234663852886e38,
        minValue: -3.4028234663852886e38,
        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',
        paramId: 'bbabbcc8-91eb-4014-9351-43e1742644e9',
        paramType: 'positionZ',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 0,
        maxValue: 3.4028234663852886e38,
        minValue: -3.4028234663852886e38,
        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',
        paramId: '4e3f5c2d-6b59-4a69-ab4f-da62db30e7db',
        paramType: 'forwardX',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 0,
        maxValue: 3.4028234663852886e38,
        minValue: -3.4028234663852886e38,
        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',
        paramId: 'd2425aaa-dc91-4e60-ba57-22be7b26f941',
        paramType: 'forwardY',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: -1,
        maxValue: 3.4028234663852886e38,
        minValue: -3.4028234663852886e38,
        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',
        paramId: '1842fc18-6b51-402b-97f1-c56d4681866a',
        paramType: 'forwardZ',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 0,
        maxValue: 3.4028234663852886e38,
        minValue: -3.4028234663852886e38,
        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',
        paramId: '872a56b9-ed99-47ea-9957-bda9307fac5b',
        paramType: 'upX',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 1,
        maxValue: 3.4028234663852886e38,
        minValue: -3.4028234663852886e38,
        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',
        paramId: '4acf61c7-363f-44af-9857-c5e8c8ea5629',
        paramType: 'upY',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 0,
        maxValue: 3.4028234663852886e38,
        minValue: -3.4028234663852886e38,
        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',
        paramId: '4b818074-5b96-42c3-b2e6-fcdd350e37bb',
        paramType: 'upZ',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioNodeCreated,
    params: {
      node: {
        channelCount: 2,
        channelCountMode: 'max',
        channelInterpretation: 'speakers',
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        nodeId: 'e5bd5ec5-abb8-426a-bad8-65f723970c76',
        nodeType: 'Delay',
        numberOfInputs: 1,
        numberOfOutputs: 1,
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 0,
        maxValue: 1,
        minValue: 0,
        nodeId: 'e5bd5ec5-abb8-426a-bad8-65f723970c76',
        paramId: 'a88ea483-fc15-4c2b-ab0c-597af8e069b9',
        paramType: 'delayTime',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioNodeCreated,
    params: {
      node: {
        channelCount: 2,
        channelCountMode: 'max',
        channelInterpretation: 'speakers',
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        nodeId: '61b107eb-24ad-4f11-b811-72b2c5e7e79f',
        nodeType: 'Gain',
        numberOfInputs: 1,
        numberOfOutputs: 1,
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 1,
        maxValue: 3.4028234663852886e38,
        minValue: -3.4028234663852886e38,
        nodeId: '61b107eb-24ad-4f11-b811-72b2c5e7e79f',
        paramId: '03e13b59-a58f-4883-8479-d7a048ebe80a',
        paramType: 'gain',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioNodeCreated,
    params: {
      node: {
        channelCount: 2,
        channelCountMode: 'max',
        channelInterpretation: 'speakers',
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        nodeId: '78b78fae-b32e-4993-a2b4-7523c08e16c0',
        nodeType: 'Gain',
        numberOfInputs: 1,
        numberOfOutputs: 1,
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 1,
        maxValue: 3.4028234663852886e38,
        minValue: -3.4028234663852886e38,
        nodeId: '78b78fae-b32e-4993-a2b4-7523c08e16c0',
        paramId: 'b6ea1b98-2dda-43d0-8a52-49492fcafdde',
        paramType: 'gain',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioNodeCreated,
    params: {
      node: {
        channelCount: 2,
        channelCountMode: 'max',
        channelInterpretation: 'speakers',
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        nodeId: 'd8ac44f0-f099-40ff-9cf4-949148fca53f',
        nodeType: 'Gain',
        numberOfInputs: 1,
        numberOfOutputs: 1,
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 1,
        maxValue: 3.4028234663852886e38,
        minValue: -3.4028234663852886e38,
        nodeId: 'd8ac44f0-f099-40ff-9cf4-949148fca53f',
        paramId: '38ec329f-650c-4c35-805c-32c559b47ea7',
        paramType: 'gain',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioNodeCreated,
    params: {
      node: {
        channelCount: 2,
        channelCountMode: 'max',
        channelInterpretation: 'speakers',
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        nodeId: '59200b98-60e1-43cf-88f6-d0a33d5643cf',
        nodeType: 'Oscillator',
        numberOfInputs: 0,
        numberOfOutputs: 1,
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 0,
        maxValue: 153600,
        minValue: -153600,
        nodeId: '59200b98-60e1-43cf-88f6-d0a33d5643cf',
        paramId: '0b2b73d2-bc98-423b-a19c-1a0651e06d20',
        paramType: 'detune',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.audioParamCreated,
    params: {
      param: {
        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
        defaultValue: 440,
        maxValue: 24000,
        minValue: -24000,
        nodeId: '59200b98-60e1-43cf-88f6-d0a33d5643cf',
        paramId: '42dddc62-c058-473e-9f48-a678a708c001',
        paramType: 'frequency',
        rate: 'a-rate',
      },
    },
  },
  {
    method: WebAudioDebuggerEvent.nodesConnected,
    params: {
      contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
      destinationId: 'e5bd5ec5-abb8-426a-bad8-65f723970c76',
      destinationInputIndex: 0,
      sourceId: '61b107eb-24ad-4f11-b811-72b2c5e7e79f',
      sourceOutputIndex: 0,
    },
  },
  {
    method: WebAudioDebuggerEvent.nodesConnected,
    params: {
      contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
      destinationId: '78b78fae-b32e-4993-a2b4-7523c08e16c0',
      destinationInputIndex: 0,
      sourceId: 'e5bd5ec5-abb8-426a-bad8-65f723970c76',
      sourceOutputIndex: 0,
    },
  },
  {
    method: WebAudioDebuggerEvent.nodesConnected,
    params: {
      contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
      destinationId: 'd8ac44f0-f099-40ff-9cf4-949148fca53f',
      destinationInputIndex: 0,
      sourceId: '59200b98-60e1-43cf-88f6-d0a33d5643cf',
      sourceOutputIndex: 0,
    },
  },
  {
    method: WebAudioDebuggerEvent.nodeParamConnected,
    params: {
      contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',
      destinationId: 'a88ea483-fc15-4c2b-ab0c-597af8e069b9',
      sourceId: 'd8ac44f0-f099-40ff-9cf4-949148fca53f',
      sourceOutputIndex: 0,
    },
  },
  {
    method: WebAudioDebuggerEvent.contextWillBeDestroyed,
    params: {contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62'},
  },
];


================================================
FILE: package.json
================================================
{
  "name": "audion",
  "private": true,
  "version": "3.0.9",
  "description": "A Chrome DevTools extension traces Web Audio API calls and visualizes in the DevTools.",
  "repository": {
    "type": "git",
    "url": "git+https://github.com/GoogleChrome/audion.git"
  },
  "keywords": [],
  "author": "",
  "license": "Apache-2.0",
  "bugs": {
    "url": "https://github.com/GoogleChrome/audion/issues"
  },
  "homepage": "https://github.com/GoogleChrome/audion#readme",
  "main": "index.js",
  "engines": {
    "node": "18"
  },
  "dependencies": {
    "@pixi/unsafe-eval": "^7.2.4",
    "dagre": "^0.8.5",
    "pixi.js": "^7.2.4",
    "rxjs": "^7.8.1",
    "taffydb": "^2.7.3"
  },
  "devDependencies": {
    "@babel/core": "^7.14.6",
    "@babel/plugin-proposal-optional-chaining": "^7.16.7",
    "@babel/plugin-transform-modules-commonjs": "^7.14.5",
    "@babel/preset-typescript": "^7.16.7",
    "@pixi/jsdoc-template": "^2.6.0",
    "@types/dagre": "^0.7.46",
    "@types/graphlib": "^2.1.8",
    "babel-jest": "^29.5.0",
    "copy-webpack-plugin": "^11.0.0",
    "css-loader": "^6.6.0",
    "devtools-protocol": "^0.0.924232",
    "eslint": "^8.40.0",
    "eslint-config-google": "^0.14.0",
    "file-loader": "^6.2.0",
    "husky": ">=6",
    "jest": "^27.0.6",
    "jest-puppeteer": "^5.0.4",
    "jsdoc": "^4.0.2",
    "lint-staged": ">=10",
    "mustache": "^4.2.0",
    "pinst": ">=2",
    "prettier": "^2.3.2",
    "puppeteer": "^9.1.1",
    "raw-loader": "^4.0.2",
    "rimraf": "^3.0.2",
    "source-map-loader": "^3.0.0",
    "style-loader": "^3.2.1",
    "ts-loader": "^9.2.6",
    "typescript": "^4.4.3",
    "webpack": "^5.44.0",
    "webpack-cli": "^4.7.2",
    "yazl": "^2.5.1"
  },
  "scripts": {
    "build:chrome-extension": "node src/build/make-chrome-extension.js",
    "build:clean": "rimraf build",
    "build:webpack": "webpack --mode production --config src/webpack.config.js",
    "build": "npm run build:clean && npm run build:webpack && npm run build:chrome-extension",
    "clean": "rimraf build docs src/coverage simulations/build",
    "dev": "webpack --mode development --config src/webpack.config.js && npm run build:chrome-extension",
    "postinstall": "husky install",
    "postpublish": "pinst --enable",
    "prepublishOnly": "pinst --disable",
    "test:integration:build": "npm run test:integration:clean && npm run test:integration:webpack",
    "test:integration:clean": "rimraf simulations/build",
    "test:integration:webpack": "webpack --mode development --config simulations/webpack.config.js",
    "test:integration:run": "JEST_PUPPETEER_CONFIG=test/.jest-puppeteer.config.json jest --config test/.jest.config.json",
    "test:integration": "npm run build && npm run test:integration:build && npm run test:integration:run",
    "test:jsdoc": "jsdoc -c .jsdoc.json",
    "test:lint:eslint": "eslint src/**/*.js",
    "test:lint:prettier": "prettier --check src/**/*.{js,ts}",
    "test:lint": "npm run test:lint:eslint && npm run test:lint:prettier",
    "test:unit": "jest --config src/.jest.config.json",
    "test": "npm run test:lint && npm run test:jsdoc && npm run test:unit && npm run test:integration"
  },
  "lint-staged": {
    "*.{js}": "eslint --cache --fix",
    "*.{js,ts,json,css,md}": "prettier --write"
  }
}


================================================
FILE: simulations/updateGraphRender.html
================================================
<div class="graph" style="height: 100%"></div>
<script src="./build/updateGraphRender.js"></script>


================================================
FILE: simulations/updateGraphRender.ts
================================================
import {
  auditTime,
  EMPTY,
  filter,
  finalize,
  from,
  interval,
  map,
  pipe,
  switchMap,
  take,
} from 'rxjs';

import {layoutGraphContext} from '../src/devtools/layoutGraphContext';
import {deserializeGraphContext} from '../src/devtools/deserializeGraphContext';
import {serializeGraphContext} from '../src/devtools/serializeGraphContext';
import {WebAudioRealtimeData} from '../src/devtools/WebAudioRealtimeData';
import {integrateWebAudioGraph} from '../src/devtools/WebAudioGraphIntegrator';

import {updateGraphRender} from '../src/panel/updateGraphRender';
import {AudioGraphRender} from '../src/panel/graph/AudioGraphRender';

import {OSCILLATOR_GAIN_PARAM_EVENTS} from '../fixtures/oscillatorGainParam';
import {updateGraphSizes} from '../src/panel/updateGraphSizes';

function main() {
  const graphContainer = document.querySelector('.graph') as HTMLElement;
  const graphRender = new AudioGraphRender({
    elementContainer: graphContainer,
  });
  graphRender.init();
  graphContainer.appendChild(graphRender.pixiView);

  const simulation = () =>
    pipe(
      integrateWebAudioGraph({
        pollContext() {
          return EMPTY;
        },
      } as unknown as WebAudioRealtimeData),
      auditTime(1),
      map(serializeGraphContext),
      filter((graphContext) => graphContext.graph !== null),
      map(updateGraphSizes(graphRender)),
      map(deserializeGraphContext),
      map(layoutGraphContext),
      map(serializeGraphContext),
      map(updateGraphRender(graphRender)),
    );

  interval(50)
    .pipe(
      take(OSCILLATOR_GAIN_PARAM_EVENTS.length),
      switchMap((_, i) =>
        from(
          OSCILLATOR_GAIN_PARAM_EVENTS.slice(-1).concat(
            OSCILLATOR_GAIN_PARAM_EVENTS.slice(
              0,
              i % (OSCILLATOR_GAIN_PARAM_EVENTS.length - 1),
            ),
            OSCILLATOR_GAIN_PARAM_EVENTS.slice(
              (i + 1) % (OSCILLATOR_GAIN_PARAM_EVENTS.length - 1),
              OSCILLATOR_GAIN_PARAM_EVENTS.length - 1,
            ),
          ),
        ),
      ),
      simulation(),
      finalize(() => graphContainer.classList.add('complete')),
    )
    .subscribe();
}

main();


================================================
FILE: simulations/webpack.config.js
================================================
const {resolve} = require('path');

const srcConfig = require('../src/webpack.config');

module.exports = (env, argv) => ({
  ...srcConfig(env, argv),
  entry: {
    updateGraphRender: resolve(__dirname, './updateGraphRender'),
  },
  output: {
    path: resolve(__dirname, './build'),
  },
});


================================================
FILE: src/.jest.config.json
================================================
{
  "collectCoverage": true,
  "injectGlobals": false,
  "transform": {
    "\\.[jt]sx?$": "babel-jest"
  },
  "coveragePathIgnorePatterns": ["<rootDir>/chrome/"]
}


================================================
FILE: src/build/make-chrome-extension.js
================================================
/**
 * A nodejs script that copies files, writes a extension manifest, and zips it
 * all up.
 *
 * @namespace makeChromeExtension
 */

const fs = require('fs').promises;
const {createWriteStream} = require('fs');
const path = require('path');

const mustache = require('mustache');
const {ZipFile} = require('yazl');

main();

/**
 * Copy files, generate extension manifest, and zip the unpacked extension.
 *
 * Calls other methods in this script.
 *
 * @memberof makeChromeExtension
 */
async function main() {
  await Promise.all([
    copyFiles({
      src: '..',
      dest: '../../build/audion',
      files: ['panel.html', 'devtools.html'],
    }),
    generateManifest({
      view: {version: require('../../package.json').version},
      dest: '../../build/audion',
    }),
  ]);
  await zipChromeExtension({
    src: '../../build',
    dir: 'audion',
  });
}

/**
 * Copy file paths from a src directory to a dest directory.
 *
 * @param {object} options
 * @memberof makeChromeExtension
 */
async function copyFiles({src, dest, files, cwd = __dirname}) {
  await Promise.all(
    files.map(async (file) => {
      await mkdir(path.resolve(cwd, dest, path.dirname(file)));
      await fs.copyFile(
        path.resolve(cwd, src, file),
        path.resolve(cwd, dest, file),
      );
    }),
  );
}

/**
 * Generate a extension manifest from a template file.
 *
 * @param {object} options
 * @memberof makeChromeExtension
 */
async function generateManifest({
  view,
  dest,
  file = 'manifest.json',
  cwd = __dirname,
}) {
  await mkdir(path.resolve(cwd, dest, path.dirname(file)));
  await fs.writeFile(
    path.resolve(cwd, dest, file),
    mustache.render(
      await fs.readFile(
        path.resolve(__dirname, 'manifest.json.mustache'),
        'utf8',
      ),
      view,
    ),
  );
}

/**
 * Zip the unpacked chrome extension.
 *
 * @param {object} options
 * @memberof makeChromeExtension
 */
async function zipChromeExtension({
  src,
  cwd = __dirname,
  dir,
  file = `${dir}.zip`,
}) {
  await unlink(path.resolve(cwd, src, file));
  const files = await readdirRecursive(path.resolve(cwd, src, dir));

  const output = createWriteStream(path.resolve(cwd, src, file));
  const zip = new ZipFile();
  const zipDone = new Promise((resolve, reject) =>
    zip.outputStream.pipe(output).on('close', resolve).on('error', reject),
  );
  for (const file of files) {
    zip.addFile(path.resolve(cwd, src, dir, file), file);
  }
  zip.end();

  await zipDone;
}

/**
 * Read entry names in a directory recursively.
 * @param {string} dir directory to recursively read
 * @return {Promise<Array<string>>} array of paths relative to `dir`
 * @memberof makeChromeExtension
 */
async function readdirRecursive(dir) {
  return (
    await Promise.all(
      (
        await fs.readdir(dir)
      ).map(async (file) => {
        try {
          return (await readdirRecursive(path.resolve(dir, file))).map(
            (subfile) => path.join(file, subfile),
          );
        } catch (err) {
          if (err.code === 'ENOTDIR') {
            return file;
          }
          throw err;
        }
      }),
    )
  ).flat();
}

/**
 * Create a directory if it does not already exist.
 *
 * @param {string} dirpath directory to create
 * @memberof makeChromeExtension
 */
async function mkdir(dirpath) {
  try {
    await fs.mkdir(dirpath, {recursive: true});
  } catch (err) {
    if (err.code === 'EEXIST') {
      return;
    }
    throw err;
  }
}

/**
 * Unlink a file from the filesystem if it exists.
 *
 * @param {string} filepath file to unlink
 * @memberof makeChromeExtension
 */
async function unlink(filepath) {
  try {
    await fs.unlink(filepath);
  } catch (err) {
    if (err.code === 'ENOENT') {
      return;
    }
    throw err;
  }
}


================================================
FILE: src/build/manifest.json.mustache
================================================
{
  "manifest_version": 3,
  "name": "Audion",
  "version": "{{version}}",
  "description": "Web Audio DevTools Extension (graph visualizer)",
  "devtools_page": "devtools.html",
  "options_ui": {
      "page": "options.html",
      "open_in_tab": false
  },
  "permissions": [
    "debugger"
  ]
}


================================================
FILE: src/chrome/API.js
================================================
/// <reference path="./Debugger.js" />
/// <reference path="./DevTools.js" />
/// <reference path="./Runtime.js" />

/**
 * Top level chrome extension API type. Contains references of each accessible
 * extension api.
 *
 * @typedef Chrome.API
 * @property {Chrome.Debugger} debugger
 * @property {Chrome.DevTools} devtools
 * @property {Chrome.Runtime} runtime
 */


================================================
FILE: src/chrome/Debugger.js
================================================
/// <reference path="Types.js" />

/**
 * [Chrome extension api][1] to the [Chrome Debugger Protocol][2]. Used by this
 * extension to access the [Web Audio domain][3].
 *
 * [1]: https://developer.chrome.com/docs/extensions/reference/debugger/
 * [2]: https://chromedevtools.github.io/devtools-protocol/
 * [3]: ChromeDebuggerWebAudioDomain.html
 *
 * @typedef Chrome.Debugger
 * @property {function(
 *   Chrome.DebuggerDebuggee, string, function(): void
 * ): void} attach
 * @property {function(Chrome.DebuggerDebuggee, function(): void): void} detach
 * @property {Chrome.Event<function(object, string): void>} onDetach
 * @property {Chrome.Event<Chrome.DebuggerOnEventListener>} onEvent
 * @property {Chrome.DebuggerSendCommand} sendCommand
 * @see https://developer.chrome.com/docs/extensions/reference/debugger/
 * @see https://chromedevtools.github.io/devtools-protocol/
 */

/**
 * @callback Chrome.DebuggerSendCommand
 * @param {Chrome.DebuggerDebuggee} target
 * @param {string} method
 * @param {*} [commandParams]
 * @param {*} [callback]
 */

/**
 * A debuggee identifier.
 *
 * Either tabId or extensionId must be specified.
 *
 * @typedef Chrome.DebuggerDebuggee
 * @property {string} [extensionId]
 * @property {string} [tabId]
 * @property {string} [targetId]
 * @see https://developer.chrome.com/docs/extensions/reference/debugger/#type-Debuggee
 */

/**
 * Arguments passed to Debugger onEvent listeners.
 *
 * @callback Chrome.DebuggerOnEventListener
 * @param {Chrome.DebuggerDebuggee} source
 * @param {string} method
 * @param {*} [params]
 * @return {void}
 */


================================================
FILE: src/chrome/DebuggerPageDomain.ts
================================================
/**
 * @file
 * Strings passed to `chrome.debugger.sendCommand` and received from
 * `chrome.debugger.onEvent` callbacks.
 */

import {ProtocolMapping} from 'devtools-protocol/types/protocol-mapping';

/** @see https://chromedevtools.github.io/devtools-protocol/tot/Page/#methods */
export enum PageDebuggerMethod {
  disable = 'Page.disable',
  enable = 'Page.enable',
}

/** @see https://chromedevtools.github.io/devtools-protocol/tot/Page/#events */
export enum PageDebuggerEvent {
  domContentEventFired = 'Page.domContentEventFired',
  frameAttached = 'Page.frameAttached',
  frameDetached = 'Page.frameDetached',
  frameNavigated = 'Page.frameNavigated',
  frameRequestedNavigation = 'Page.frameRequestedNavigation',
  frameStartedLoading = 'Page.frameStartedLoading',
  frameStoppedLoading = 'Page.frameStoppedLoading',
  lifecycleEvent = 'Page.lifecycleEvent',
  loadEventFired = 'Page.loadEventFired',
}

/** @see https://chromedevtools.github.io/devtools-protocol/tot/Page/#types */
export type PageDebuggerEventParams<Name extends PageDebuggerEvent> =
  ProtocolMapping.Events[Name];


================================================
FILE: src/chrome/DebuggerWebAudioDomain.ts
================================================
/**
 * @file
 * Strings passed to `chrome.debugger.sendCommand` and received from
 * `chrome.debugger.onEvent` callbacks.
 */

import {ProtocolMapping} from 'devtools-protocol/types/protocol-mapping';

/** @see https://chromedevtools.github.io/devtools-protocol/tot/WebAudio/#methods */
export enum WebAudioDebuggerMethod {
  disable = 'WebAudio.disable',
  enable = 'WebAudio.enable',
  getRealtimeData = 'WebAudio.getRealtimeData',
}

/** @see https://chromedevtools.github.io/devtools-protocol/tot/WebAudio/#events */
export enum WebAudioDebuggerEvent {
  audioListenerCreated = 'WebAudio.audioListenerCreated',
  audioListenerWillBeDestroyed = 'WebAudio.audioListenerWillBeDestroyed',
  audioNodeCreated = 'WebAudio.audioNodeCreated',
  audioNodeWillBeDestroyed = 'WebAudio.audioNodeWillBeDestroyed',
  audioParamCreated = 'WebAudio.audioParamCreated',
  audioParamWillBeDestroyed = 'WebAudio.audioParamWillBeDestroyed',
  contextChanged = 'WebAudio.contextChanged',
  contextCreated = 'WebAudio.contextCreated',
  contextWillBeDestroyed = 'WebAudio.contextWillBeDestroyed',
  nodeParamConnected = 'WebAudio.nodeParamConnected',
  nodeParamDisconnected = 'WebAudio.nodeParamDisconnected',
  nodesConnected = 'WebAudio.nodesConnected',
  nodesDisconnected = 'WebAudio.nodesDisconnected',
}

/** @see https://chromedevtools.github.io/devtools-protocol/tot/WebAudio/#types */
export type WebAudioDebuggerEventParams<Name extends WebAudioDebuggerEvent> =
  ProtocolMapping.Events[Name];


================================================
FILE: src/chrome/DevTools.js
================================================
/// <reference path="Types.js" />

/**
 * [Chrome extension api][1] to devtool inspector available to a extension's
 * devtools page specified by the extension manifest's `"devtools_page"`.
 *
 * [1]: https://developer.chrome.com/docs/extensions/mv3/devtools/
 *
 * @typedef Chrome.DevTools
 * @property {Chrome.DevToolsInspectedWindow} inspectedWindow
 * @property {Chrome.DevtoolsNetwork} network
 * @property {Chrome.DevToolsPanels} panels
 */

/**
 * [Extension api][1] for the tab inspected by this `"devtools_page"` instance.
 *
 * [1]: https://developer.chrome.com/docs/extensions/reference/devtools_inspectedWindow/
 *
 * @typedef Chrome.DevToolsInspectedWindow
 * @property {string} tabId
 */

/**
 * @typedef Chrome.DevtoolsNetwork
 * @property {Chrome.Event<function(string): void>} onNavigated
 */

/**
 * [Extension api][1] to manage panels this extension adds.
 *
 * [1]: https://developer.chrome.com/docs/extensions/reference/devtools_panels/
 *
 * @typedef Chrome.DevToolsPanels
 * @property {Chrome.DevToolsPanelsCreateFunction} create
 * @property {'default' | 'dark'} themeName
 */

/**
 * [`chrome.devtools.panels.create(...)`][1]
 *
 * [1]: https://developer.chrome.com/docs/extensions/reference/devtools_panels/#method-create
 *
 * @callback Chrome.DevToolsPanelsCreateFunction
 * @param {string} title
 * @param {string} icon
 * @param {string} pageUrl
 * @param {Chrome.DevToolsPanelsCreateCallback} onPanelCreated
 * @return {void}
 */

/**
 * @callback Chrome.DevToolsPanelsCreateCallback
 * @param {Chrome.DevToolsPanel} panel
 * @return {void}
 */

/**
 * [Panel][1] created by [`chrome.devtools.panels.create`][2].
 *
 * [1]: https://developer.chrome.com/docs/extensions/reference/devtools_panels/#type-ExtensionPanel
 * [2]: Chrome.html#.DevToolsPanelsCreateFunction
 *
 * @typedef Chrome.DevToolsPanel
 * @property {Chrome.Event<function(): void>} onHidden
 * @property {Chrome.Event<function(): void>} onShown
 */


================================================
FILE: src/chrome/Runtime.js
================================================
/// <reference path="Types.js" />

/**
 * [Chrome extension api][1] about the extension the host platform and
 * communication betwen different extension contexts.
 *
 * [1]: https://developer.chrome.com/docs/extensions/reference/runtime/
 *
 * @typedef Chrome.Runtime
 * @property {function(): Chrome.RuntimePort} connect
 * @property {function(string): string} getURL
 * @property {Chrome.RuntimeError} lastError
 * @property {Chrome.Event<Chrome.RuntimeOnConnectCallback>} onConnect
 */

/**
 * @typedef Chrome.RuntimeError
 * @property {string} [message]
 * @see https://developer.chrome.com/docs/extensions/reference/runtime/#property-lastError
 */

/**
 * Callback passed to [`chrome.runtime.onConnect`][1].
 *
 * [1]: https://developer.chrome.com/docs/extensions/reference/runtime/#event-onConnect
 *
 * @callback Chrome.RuntimeOnConnectCallback
 * @param {Chrome.RuntimePort} port
 * @return {void}
 */

/**
 * [Port][1] to another chrome extension runtime context.
 *
 * [1]: https://developer.chrome.com/docs/extensions/reference/runtime/#type-Port
 *
 * @typedef Chrome.RuntimePort
 * @property {function(): void} disconnect
 * @property {Chrome.Event<function(Chrome.RuntimePort): void>} onDisconnect
 * @property {Chrome.Event<function(*, Chrome.RuntimePort): void>} onMessage
 * @property {function(*): void} postMessage
 */


================================================
FILE: src/chrome/Types.js
================================================
/**
 * Types provided by the [chrome extension api][1].
 *
 * [1]: https://developer.chrome.com/docs/extensions/reference/
 *
 * @namespace Chrome
 */

/**
 * Generic [event emitter][1] in chrome extension types.
 *
 * [1]: https://developer.chrome.com/docs/extensions/reference/events/#type-Event
 *
 * @typedef Chrome.Event
 * @property {Chrome.EventCallback<T>} addListener
 * @property {Chrome.EventCallback<T>} removeListener
 * @template {function} T
 */

/**
 * Function taking an event listener passed to a {@link Chrome.Event} instance.
 *
 * @callback Chrome.EventCallback
 * @param {T} callback
 * @template {function} T
 */


================================================
FILE: src/chrome/index.js
================================================
/// <reference path="API.js" />
/// <reference path="Types.js" />

/**
 * Global chrome extension api instance.
 *
 * Normally available on the global context `chrome` identifier. Use this export
 * to assist in testing use of the chrome extension api from inside this
 * extension.
 *
 * @type {Chrome.API}
 * @memberof Chrome
 * @alias chrome
 */
export const chrome = getChrome();

/**
 * Return a no-operation implementation of Chrome.API. Used in testing.
 *
 * @return {Chrome.API}
 * @memberof Chrome
 */
function noopChrome() {
  /**
   * @return {Chrome.Event<*>}
   */
  function noopEvent() {
    return {addListener() {}, removeListener() {}};
  }
  return {
    debugger: {
      attach() {},
      detach() {},
      onDetach: noopEvent(),
      onEvent: noopEvent(),
      sendCommand() {},
    },
    devtools: {
      inspectedWindow: {tabId: 'tab'},
      network: {onNavigated: noopEvent()},
      panels: {create() {}},
    },
    runtime: {
      connect() {
        return {
          onDisconnect: noopEvent(),
          onMessage: noopEvent(),
          disconnect() {},
          postMessage(message) {},
        };
      },
      getURL(url) {
        return url;
      },
      /**
       * If a called chrome api method errored, lastError is set to that error
       * while the provided callback is run. Otherwise lastError is not set.
       */
      lastError: undefined,
      onConnect: noopEvent(),
    },
  };
}

/**
 * Return the global scope.
 *
 * @return {*}
 * @memberof Chrome
 */
function getGlobal() {
  if (typeof window === 'object') return window;
  if (typeof self === 'object') return self;
  if (typeof globalThis === 'object') return globalThis;
  if (typeof global === 'object') return global;
  if (typeof process === 'object') return process;
  throw new Error('Cannot find global object');
}

/**
 * Return a {@link Chrome.API} instance. Return a copy from
 * {@link Chrome.noopChrome} if running under a unit test environment.
 *
 * @return {Chrome.API}
 * @memberof Chrome
 */
function getChrome() {
  const g = getGlobal();
  if (
    'chrome' in g &&
    typeof g.chrome === 'object' &&
    typeof g.chrome.devtools === 'object'
  ) {
    return g.chrome;
  }
  return noopChrome();
}


================================================
FILE: src/custom.d.ts
================================================
declare module '*.svg' {
  const content: any;
  export default content;
}

declare module '*.css' {
  const content: any;
  export default content;
}


================================================
FILE: src/devtools/DebuggerAttachEventController.ts
================================================
import {
  BehaviorSubject,
  combineLatest,
  concat,
  defer,
  EMPTY,
  Observable,
  of,
  Subject,
  Subscriber,
} from 'rxjs';
import {
  catchError,
  delay,
  distinctUntilChanged,
  exhaustMap,
  filter,
  finalize,
  map,
  share,
  take,
} from 'rxjs/operators';

import {chrome} from '../chrome';
import {PageDebuggerMethod} from '../chrome/DebuggerPageDomain';
import {WebAudioDebuggerMethod} from '../chrome/DebuggerWebAudioDomain';

/**
 * Permission value in regards to calling `chrome.debugger.attach`.
 *
 * When the extension calls `chrome.debugger.attach` a notification will display
 * in devtools that the extension is debugging the tab. Attaching when the user
 * does not expect it and then see this notification is not desired. The user
 * needs to grant permission for the extension the privilege to attach, or
 * reject prior permission.
 *
 * Permission could be implied when the extension's panel is opened.
 *
 * Permission should be rejected when the debugging notification is canceled or
 * dismissed.
 *
 * Permission could be granted more explicitly by a panel component when the
 * panel is visible but the extension does not have permission.
 *
 * WebAudioEventObserver will be instructed with rules like the above by other
 * functions outside of this file.
 */
enum AttachPermission {
  /**
   * Initial value.
   *
   * When WebAudioEventObserver is created, it does not know if permission has
   * been granted or not and should treat this as **not having** permission.
   */
  UNKNOWN,

  /**
   * Permission has been granted by a user action. WebAudioEventObserver may
   * attach to `chrome.debugger`.
   */
  TEMPORARY,

  /**
   * Permission has been rejected. WebAudioEventObserver must not attach to
   * `chrome.debugger`.
   */
  REJECTED,
}

/**
 * Value used to indicate if the `chrome.debugger` attachment and
 * receiving `chrome.debugger.onEvent` events are "active".
 */
enum BinaryTransition {
  DEACTIVATING = 'deactivating',
  IS_INACTIVE = 'isInactive',
  ACTIVATING = 'activating',
  IS_ACTIVE = 'isActive',
}

export interface DebuggerAttachEventState {
  permission: AttachPermission;
  attachInterest: number;
  attachState: BinaryTransition;
  pageEventInterest: number;
  pageEventState: BinaryTransition;
  webAudioEventInterest: number;
  webAudioEventState: BinaryTransition;
}

/** Chrome Devtools Protocol version to attach to. */
const debuggerVersion = '1.3';

/** Chrome tab to attach the debugger to. */
const {tabId} = chrome.devtools.inspectedWindow;

export enum ChromeDebuggerAPIEventName {
  detached = 'ChromeDebuggerAPI.detached',
}

export interface ChromeDebuggerAPIDetachEventParams {
  reason: 'canceled_by_user' | 'target_closed';
}

export interface ChromeDebuggerAPIDetachEvent {
  method: ChromeDebuggerAPIEventName.detached;
  params: ChromeDebuggerAPIDetachEventParams;
}

export type ChromeDebuggerAPIEvent = ChromeDebuggerAPIDetachEvent;

export type ChromeDebuggerAPIEventParams = ChromeDebuggerAPIEvent['params'];

/**
 * Control attachment to chrome.debugger depending on if the user has given
 * permission and how many parts of the extension need attachment.
 *
 * @memberof Audion
 * @alias DebuggerAttachEventController
 */
export class DebuggerAttachEventController {
  /** Does user permit extension to use `chrome.debugger`. */
  permission$: PermissionSubject;
  /** How many subscriptions want to attach to `chrome.debugger`. */
  attachInterest$: CounterSubject;
  attachState$: Observable<BinaryTransition>;
  /**
   * How many subscriptions want to receive page events through
   * `chrome.debugger.onEvent`.
   */
  pageEventInterest$: CounterSubject;
  pageEventState$: Observable<BinaryTransition>;
  /**
   * How many subscriptions want to receive web audio events through
   * `chrome.debugger.onEvent`.
   */
  webAudioEventInterest$: CounterSubject;
  webAudioEventState$: Observable<BinaryTransition>;

  combinedState$: Observable<DebuggerAttachEventState>;

  debuggerEvent$: Observable<ChromeDebuggerAPIEvent>;

  constructor() {
    // Create an interface of subjects to track changes in state with the
    // `chrome.debugger` api.
    const debuggerSubject = {
      // Does the extension have permission from the user to use `chrome.debugger` api.
      permission: new PermissionSubject(),
      // How many entities want to attach to the debugger to call `sendCommand`
      // or listen to `onEvent`.
      attachInterest: new CounterSubject(0),
      // attachState must be IS_ACTIVE for `chrome.debugger.sendCommand` to be used.
      attachState: new BinaryTransitionSubject({
        initialState: BinaryTransition.IS_INACTIVE,
        activateAction: () => attach({tabId}, debuggerVersion),
        deactivateAction: () => detach({tabId}),
      }),
      // How many entities want to listen to page events through `onEvent`.
      pageEventInterest: new CounterSubject(0),
      // must be IS_ACTIVE for `onEvent` to receive events.
      pageEventState: new BinaryTransitionSubject({
        initialState: BinaryTransition.IS_INACTIVE,
        activateAction: () => sendCommand({tabId}, PageDebuggerMethod.enable),
        deactivateAction: () =>
          sendCommand({tabId}, PageDebuggerMethod.disable),
      }),
      // How many entities want to listen to web audio events through `onEvent`.
      webAudioEventInterest: new CounterSubject(0),
      // webAudioEventState must be IS_ACTIVE for `onEvent` to receive events.
      webAudioEventState: new BinaryTransitionSubject({
        initialState: BinaryTransition.IS_INACTIVE,
        activateAction: () =>
          sendCommand({tabId}, WebAudioDebuggerMethod.enable),
        deactivateAction: () =>
          sendCommand({tabId}, WebAudioDebuggerMethod.disable),
      }),
    };
    this.permission$ = debuggerSubject.permission;
    this.attachInterest$ = debuggerSubject.attachInterest;
    this.attachState$ = debuggerSubject.attachState;
    this.pageEventInterest$ = debuggerSubject.pageEventInterest;
    this.pageEventState$ = debuggerSubject.pageEventState;
    this.webAudioEventInterest$ = debuggerSubject.webAudioEventInterest;
    this.webAudioEventState$ = debuggerSubject.webAudioEventState;

    // Observable of changes to state derived from debuggerSubject.
    const debuggerState$ = (this.combinedState$ =
      // Push objects mapping of keys in debuggerSubject to values pushed from
      // that debuggerSubject member.
      combineLatest(debuggerSubject).pipe(
        // Filter out combined state that is not different from the last value.
        distinctUntilChanged(
          (previous, current) =>
            previous.permission === current.permission &&
            previous.attachInterest === current.attachInterest &&
            previous.attachState === current.attachState &&
            previous.pageEventInterest === current.pageEventInterest &&
            previous.pageEventState === current.pageEventState &&
            previous.webAudioEventInterest === current.webAudioEventInterest &&
            previous.webAudioEventState === current.webAudioEventState,
        ),
        // Make one subscription debuggerSubject once for many subscribers.
        share(),
      ));

    // The following subscriptions govern debuggerSubject.

    // Govern attachment to `chrome.debugger`.
    debuggerState$.subscribe({
      next: (state) => {
        // When debugger state has permission to attach to `chrome.debugger` and
        // something wants to use `chrome.debugger`, activate the attachment.
        // Otherwise deactivate the attachment.
        if (
          state.permission === AttachPermission.TEMPORARY &&
          state.attachInterest > 0
        ) {
          debuggerSubject.attachState.activate();
        } else {
          debuggerSubject.attachState.deactivate();
        }
      },
    });

    this.debuggerEvent$ = onDebuggerDetach$.pipe(
      map(([, reason]) => {
        return {
          method: ChromeDebuggerAPIEventName.detached,
          params: {reason},
        } as ChromeDebuggerAPIDetachEvent;
      }),
    );

    // Govern permission rejection and externally induced detachment.
    onDebuggerDetach$.subscribe({
      next([, reason]) {
        if (reason === 'canceled_by_user') {
          // Reject permission to use `chrome.debugger` in this extension. We
          // understand this event to be an explicit rejection from the
          // extension's user.
          debuggerSubject.permission.reject();
        }

        // Immediately go to the inactive state. Detachment was initiated
        // outside the extension and does not need to be requested.
        debuggerSubject.attachState.next(BinaryTransition.IS_INACTIVE);
      },
    });

    // Govern receiving events through `chrome.debugger.onEvent`.
    debuggerState$.subscribe(
      activateEventWhileAttached(
        debuggerSubject.pageEventState,
        ({pageEventInterest}) => pageEventInterest > 0,
      ),
    );
    debuggerState$.subscribe(
      activateEventWhileAttached(
        debuggerSubject.webAudioEventState,
        ({webAudioEventInterest}) => webAudioEventInterest > 0,
      ),
    );
  }

  /**
   * Attach to the debugger if not already, and call chrome.debugger.sendCommand.
   * @param method Chrome devtools protocol method like 'HeapProfiler.collectGarbage'.
   * @returns observable that completes once done without pushing any values
   */
  sendCommand(method: string): Observable<never> {
    this.attachInterest$.increment();
    return this.attachState$.pipe(
      filter((state) => state === BinaryTransition.IS_ACTIVE),
      take(1),
      exhaustMap(() => sendCommand({tabId}, method)),
      finalize(() => this.attachInterest$.decrement()),
    );
  }
}

function activateEventWhileAttached(
  eventState: BinaryTransitionSubject,
  interestExists: (state: DebuggerAttachEventState) => boolean,
): Partial<Subscriber<DebuggerAttachEventState>> {
  return {
    next(state) {
      if (
        state.attachState === BinaryTransition.IS_ACTIVE &&
        interestExists(state)
      ) {
        // Start receiving events. The attachemnt is active and some entities
        // are listening for events.
        eventState.activate();
      } else {
        if (state.attachState === BinaryTransition.IS_ACTIVE) {
          // Stop receiving events. The attachment is still active but no
          // entities are listening for events.
          eventState.deactivate();
        } else {
          // "Skip" deactivation of receiving events and immediately go to the
          // inactive state. The process of detachment either requested by the
          // extension or initiated otherwise has implicitly stopped reception
          // of events.
          eventState.next(BinaryTransition.IS_INACTIVE);
        }
      }
    },
  };
}

/**
 * Create a function that returns an observable that completes when the api
 * calls back.
 * @param method `chrome` api method whose last argument is a callback
 * @param thisArg `this` inside of the method
 * @returns observable that completes when the method is done
 */
function bindChromeCallback<P extends any[]>(
  method: (...args: [...params: P, callback: () => void]) => void,
  thisArg = null,
) {
  return (...args: P) =>
    new Observable<never>((subscriber) => {
      method.call(thisArg, ...args, () => {
        if (chrome.runtime.lastError) {
          subscriber.error(chrome.runtime.lastError);
        } else {
          subscriber.complete();
        }
      });
    });
}

/**
 * Return an observable that pushes events from a `chrome` api event.
 * @param event `chrome` api event
 * @returns observable of `chrome` api events
 */
function fromChromeEvent<A extends any[]>(
  event: Chrome.Event<(...args: A) => any>,
) {
  return new Observable<A>((subscriber) => {
    const listener = (...args: A) => {
      subscriber.next(args);
    };
    event.addListener(listener);
    return () => {
      event.removeListener(listener);
    };
  });
}

/**
 * Call `chrome.debugger.attach`.
 *
 * @see
 * https://developer.chrome.com/docs/extensions/reference/debugger/#method-attach
 */
const attach = bindChromeCallback(chrome.debugger.attach, chrome.debugger);

/**
 * Call `chrome.debugger.detach`.
 *
 * @see
 * https://developer.chrome.com/docs/extensions/reference/debugger/#method-detach
 */
const detach = bindChromeCallback(chrome.debugger.detach, chrome.debugger);

/**
 * Call `chrome.debugger.sendCommand`.
 *
 * @see
 * https://developer.chrome.com/docs/extensions/reference/debugger/#method-sendCommand
 */
const sendCommand = bindChromeCallback(
  chrome.debugger.sendCommand as (
    target: Chrome.DebuggerDebuggee,
    method: string,
    params?,
    callback?,
  ) => void,
  chrome.debugger,
);

/**
 * Observable of `chrome.debugger.onDetach` events.
 */
const onDebuggerDetach$ = fromChromeEvent<
  [target: Chrome.DebuggerDebuggee, reason: string]
>(chrome.debugger.onDetach);

/**
 * Store if user allows the extension to use `chrome.debugger` api.
 */
export class PermissionSubject extends BehaviorSubject<AttachPermission> {
  constructor() {
    super(AttachPermission.UNKNOWN);
  }

  /**
   * Permit use of `chrome.debugger`.
   */
  grantTemporary() {
    if (this.value === AttachPermission.UNKNOWN) {
      this.next(AttachPermission.TEMPORARY);
    }
  }

  /**
   * Reject use of `chrome.debugger`.
   */
  reject() {
    if (this.value !== AttachPermission.REJECTED) {
      this.next(AttachPermission.REJECTED);
    }
  }
}

/**
 * Description of a transition in BinaryTransitionSubject.
 */
interface BinaryTransitionDescription {
  /** The state the Subject must start in to perform this transition. */
  beginningState: BinaryTransition;
  /** The state the Subject is in while performing this transition. */
  intermediateState: BinaryTransition;
  /** The state the Subject is in after action is successfully. */
  successState: BinaryTransition;
  /** The state the Subject is in after action is unsuccessful. */
  errorState: BinaryTransition;
  /**
   * Delegate that does some work to modify other application state to the
   * desired state.
   */
  action: () => Observable<void>;
}

/**
 * Control a transition between inactive and active state. To perform a
 * transition the subject enters a intermediate state and calls a delegate to do
 * some action. After the action completes successfully the subject enters the
 * desired state.
 */
class BinaryTransitionSubject extends BehaviorSubject<BinaryTransition> {
  private readonly activateTransition: BinaryTransitionDescription;
  private readonly deactivateTransition: BinaryTransitionDescription;

  constructor({
    initialState,
    activateAction,
    deactivateAction,
  }: {
    initialState: BinaryTransition;
    activateAction: () => Observable<void>;
    deactivateAction: () => Observable<void>;
  }) {
    super(initialState);
    this.activateTransition = {
      beginningState: BinaryTransition.IS_INACTIVE,
      intermediateState: BinaryTransition.ACTIVATING,
      successState: BinaryTransition.IS_ACTIVE,
      errorState: BinaryTransition.IS_INACTIVE,
      action: activateAction,
    };
    this.deactivateTransition = {
      beginningState: BinaryTransition.IS_ACTIVE,
      intermediateState: BinaryTransition.DEACTIVATING,
      successState: BinaryTransition.IS_INACTIVE,
      errorState: BinaryTransition.IS_INACTIVE,
      action: deactivateAction,
    };
  }

  /**
   * Transition to a desired state.
   *
   * Change the subject value if it is set to beginningState to intermediateState and once action completes successfuly, set to successState.
   * @param description
   */
  transition(description: BinaryTransitionDescription) {
    if (this.value === description.beginningState) {
      concat(
        of(description.intermediateState),
        description.action(),
        defer(() =>
          this.value === description.intermediateState
            ? of(description.successState)
            : EMPTY,
        ),
      )
        .pipe(
          catchError((err) => {
            console.error(err);
            if (
              err.message.startsWith('Another debugger is already attached')
            ) {
              return this.value === description.intermediateState
                ? of(description.successState)
                : EMPTY;
            }
            return of(
              this.value === description.intermediateState
                ? description.errorState
                : description.beginningState,
            );
          }),
        )
        .subscribe({next: this.next.bind(this)});
    }
  }

  /**
   * If subject is inactive, transition to active.
   */
  activate() {
    this.transition(this.activateTransition);
  }

  /**
   * If subject is active, transition to inactive.
   */
  deactivate() {
    this.transition(this.deactivateTransition);
  }
}

/**
 * Observable counting some discrete value.
 */
export class CounterSubject extends BehaviorSubject<number> {
  /**
   * Increase value by 1.
   */
  increment() {
    this.next(this.value + 1);
  }

  /**
   * Decrease value by 1.
   */
  decrement() {
    this.next(this.value - 1);
  }
}


================================================
FILE: src/devtools/DebuggerEvents.ts
================================================
import {filter, map, Observable} from 'rxjs';
import {chrome} from '../chrome';
import {fromChromeEvent} from '../utils/rxChrome';
import {DebuggerAttachEventController} from './DebuggerAttachEventController';
import {Audion} from './Types';

type DebuggerDomain = 'page' | 'webAudio';

interface DebuggerEventsOptions<D extends DebuggerDomain> {
  domain: D;
}

type DebuggerDomainEvent<D extends DebuggerDomain> = D extends 'page'
  ? Audion.PageEvent
  : D extends 'webAudio'
  ? Audion.WebAudioEvent
  : never;

export class DebuggerEventsObservable<
  D extends DebuggerDomain,
> extends Observable<DebuggerDomainEvent<D>> {
  constructor(
    public attachController: DebuggerAttachEventController,
    public options: DebuggerEventsOptions<D>,
  ) {
    super((subscriber) => {
      attachController.attachInterest$.increment();
      attachController[options.domain + 'EventInterest$'].increment();
      const subscription = fromChromeEvent(chrome.debugger.onEvent)
        .pipe(
          map(([debuggeeId, method, params]) => ({method, params})),
          filter(({method}) =>
            method.toLowerCase().startsWith(options.domain.toLowerCase()),
          ),
        )
        .subscribe(subscriber);
      subscription.add(() => {
        attachController.attachInterest$.decrement();
        attachController[options.domain + 'EventInterest$'].decrement();
      });
      return subscription;
    });
  }
}


================================================
FILE: src/devtools/DevtoolsGraphPanel.test.js
================================================
/// <reference path="../chrome/Types.js" />
/// <reference path="../chrome/DebuggerWebAudioDomain.ts" />
/// <reference path="../utils/Types.ts" />
/// <reference path="Types.ts" />

import {beforeEach, describe, expect, it, jest} from '@jest/globals';

import dagre from 'dagre';
import {BehaviorSubject, Observable, partition, Subject} from 'rxjs';
import {map} from 'rxjs/operators';

import {chrome} from '../chrome';

import {DevtoolsGraphPanel} from './DevtoolsGraphPanel';
import {serializeGraphContext} from './serializeGraphContext';

jest.mock('../chrome');

/**
 * @type {Object<*, Audion.GraphContext>}
 */
const mockGraphs = {
  0: {
    id: 'context0000',
    /** @type {ChromeDebuggerWebAudio.BaseAudioContext} */
    context: {
      contextId: 'context0000',
      contextType: 'realtime',
      contextState: 'running',
      sampleRate: 48000,
      maxOutputChannelCount: 2,
      callbackBufferSize: 1000,
    },
    graph: new dagre.graphlib.Graph(),
    nodes: {},
  },
  1: {
    id: 'context0000',
    /** @type {ChromeDebuggerWebAudio.BaseAudioContext} */
    context: {
      contextId: 'context0000',
      contextType: 'realtime',
      contextState: 'suspended',
      sampleRate: 48000,
      maxOutputChannelCount: 2,
      callbackBufferSize: 1000,
    },
    graph: new dagre.graphlib.Graph(),
    nodes: {},
  },
  2: {
    id: 'context0000',
    context: null,
    graph: null,
    nodes: null,
  },
};
describe('DevtoolsGraphPanel', () => {
  let nextGraph = (graph) => {};
  /** @type {Subject<Audion.GraphContext>} */
  let subject;
  /** @type {Chrome.RuntimePort} */
  let port;

  beforeEach(() => {
    jest.resetAllMocks();

    subject = new Subject();
    nextGraph = (value) => subject.next(value);

    /** @type {BehaviorSubject<boolean>} */
    const gate = new BehaviorSubject();
    const [gateOpen, gateClose] = partition(gate, Boolean).map(map(() => {}));

    const panel = new DevtoolsGraphPanel(
      subject.pipe(
        map(serializeGraphContext),
        map((graphContext) => ({graphContext})),
        subscribeWhen(gateOpen, gateClose),
      ),
    );

    panel.onPanelShown$.pipe(map(() => true)).subscribe(gate);

    port = mockPort();
  });

  it('creates a panel with chrome.devtools', () => {
    expect(chrome.devtools.panels.create).toBeCalled();
    simulateCreatePanel();
  });

  it('subscribes to debugger events only after panel is shown', () => {
    expect(subject.observed).toBe(false);

    const panel = simulateCreatePanel();
    simulateConnectPort(port);

    expect(subject.observed).toBe(false);

    // Send onShown event to panel creation callback.
    simulateShowPanel(panel);

    expect(subject.observed).toBe(true);
  });

  it('posts graphs when connected', () => {
    // Send onShown event to panel creation callback.
    const panel = simulateCreatePanel();
    simulateConnectPort(port);
    simulateShowPanel(panel);

    nextGraph(mockGraphs[0]);
    nextGraph(mockGraphs[1]);
    expect(port.postMessage).toBeCalledTimes(2);
    expect(port.postMessage.mock.calls[0]).toMatchInlineSnapshot(`
Array [
  Object {
    "graphContext": Object {
      "context": Object {
        "callbackBufferSize": 1000,
        "contextId": "context0000",
        "contextState": "running",
        "contextType": "realtime",
        "maxOutputChannelCount": 2,
        "sampleRate": 48000,
      },
      "graph": Object {
        "edges": Array [],
        "nodes": Array [],
        "options": Object {
          "compound": false,
          "directed": true,
          "multigraph": false,
        },
      },
      "id": "context0000",
      "nodes": Object {},
    },
  },
]
`);
    expect(port.postMessage.mock.calls[1]).toMatchInlineSnapshot(`
Array [
  Object {
    "graphContext": Object {
      "context": Object {
        "callbackBufferSize": 1000,
        "contextId": "context0000",
        "contextState": "suspended",
        "contextType": "realtime",
        "maxOutputChannelCount": 2,
        "sampleRate": 48000,
      },
      "graph": Object {
        "edges": Array [],
        "nodes": Array [],
        "options": Object {
          "compound": false,
          "directed": true,
          "multigraph": false,
        },
      },
      "id": "context0000",
      "nodes": Object {},
    },
  },
]
`);
  });

  it('posts null graph when context is destroyed', () => {
    // Send onShown event to panel creation callback.
    const panel = simulateCreatePanel();
    simulateConnectPort(port);
    simulateShowPanel(panel);

    nextGraph(mockGraphs[0]);
    nextGraph(mockGraphs[2]);
    expect(port.postMessage).toBeCalledTimes(2);
    expect(port.postMessage.mock.calls[0]).toMatchInlineSnapshot(`
Array [
  Object {
    "graphContext": Object {
      "context": Object {
        "callbackBufferSize": 1000,
        "contextId": "context0000",
        "contextState": "running",
        "contextType": "realtime",
        "maxOutputChannelCount": 2,
        "sampleRate": 48000,
      },
      "graph": Object {
        "edges": Array [],
        "nodes": Array [],
        "options": Object {
          "compound": false,
          "directed": true,
          "multigraph": false,
        },
      },
      "id": "context0000",
      "nodes": Object {},
    },
  },
]
`);
    expect(port.postMessage.mock.calls[1]).toMatchInlineSnapshot(`
Array [
  Object {
    "graphContext": Object {
      "context": null,
      "graph": null,
      "id": "context0000",
      "nodes": null,
    },
  },
]
`);
  });

  it('stops posting graphs once disconnected', () => {
    const panel = simulateCreatePanel();
    simulateConnectPort(port);
    simulateShowPanel(panel);

    nextGraph(mockGraphs[0]);

    if (jest.isMockFunction(port.onDisconnect.addListener)) {
      /** @type {function} */ (
        port.onDisconnect.addListener.mock.calls[0][0]
      )();
    }

    nextGraph(mockGraphs[1]);

    expect(port.postMessage).toBeCalledTimes(1);
    expect(port.postMessage.mock.calls[0]).toMatchInlineSnapshot(`
Array [
  Object {
    "graphContext": Object {
      "context": Object {
        "callbackBufferSize": 1000,
        "contextId": "context0000",
        "contextState": "running",
        "contextType": "realtime",
        "maxOutputChannelCount": 2,
        "sampleRate": 48000,
      },
      "graph": Object {
        "edges": Array [],
        "nodes": Array [],
        "options": Object {
          "compound": false,
          "directed": true,
          "multigraph": false,
        },
      },
      "id": "context0000",
      "nodes": Object {},
    },
  },
]
`);
  });
});

/**
 * Simulate chrome api as if devtool panel was shown.
 * @param {Chrome.DevToolsPanels} panel panel to simulating showing
 */
function simulateShowPanel(panel) {
  const panelOnShownCallback = panel.onShown.addListener.mock.calls[0][0];
  panelOnShownCallback();
}

/**
 * Simulate chrome api as if devtool panel was created.
 * @param {Chrome.DevToolsPanel} [panel] panel to simulate creating
 * @return {Chrome.DevToolsPanel} created mock panel
 */
function simulateCreatePanel(panel = mockPanel()) {
  const panelCreateCallback = chrome.devtools.panels.create.mock.calls[0][3];
  panelCreateCallback(panel);
  return panel;
}

/**
 * Simulate chrome api as if runtime port was created.
 * @param {Chrome.RuntimePort} [port] port to simulate connecting
 * @return {Chrome.RuntimePort} connected port
 */
function simulateConnectPort(port = mockPort()) {
  const runtimeOnConnectCallback =
    chrome.runtime.onConnect.addListener.mock.calls[0][0];
  runtimeOnConnectCallback(port);
  return port;
}

/** @return {Chrome.Event<*>} */
function mockEvent() {
  return {addListener: jest.fn(), removeListener: jest.fn()};
}

/** @return {Chrome.RuntimePort} */
function mockPort() {
  return {
    onDisconnect: mockEvent(),
    onMessage: mockEvent(),
    postMessage: jest.fn(),
  };
}

/**
 * @return {Chrome.DevToolsPanel} mock version of a devtool panel
 */
function mockPanel() {
  return {onHidden: mockEvent(), onShown: mockEvent()};
}

/**
 * @param {Observable<void>} subscribeNotifier
 * @param {Observable<void>} unsubscribeNotifier
 * @return {function(Observable<T>): Observable<T>}
 * @template T
 */
function subscribeWhen(subscribeNotifier, unsubscribeNotifier) {
  return (source) => {
    return new Observable((subscriber) => {
      let subscription = null;
      let subscribe = () => {
        const oldSubscribe = subscribe;
        subscribe = () => {};
        subscription = source.subscribe(subscriber);
        unsubscribe = () => {
          unsubscribe = () => {};
          subscription.unsubscribe();
          subscription = null;
          subscribe = oldSubscribe;
        };
      };
      let unsubscribe = () => {};
      const onSubscription = subscribeNotifier.subscribe({
        next() {
          subscribe();
        },
      });
      const offSubscription = unsubscribeNotifier.subscribe({
        next() {
          unsubscribe();
        },
      });
      return () => {
        onSubscription.unsubscribe();
        offSubscription.unsubscribe();
        unsubscribe();
      };
    });
  };
}


================================================
FILE: src/devtools/DevtoolsGraphPanel.ts
================================================
/** DevTools panel that renders the Web Audio graph and more debugging information. */

import {chrome} from '../chrome';
import {Audion} from './Types';

import {fromEventPattern, Observable, Subject} from 'rxjs';
import {map, takeUntil} from 'rxjs/operators';

function fromChromeEvent<T>(
  event: Chrome.Event<(msg: T) => void>,
): Observable<T> {
  return fromEventPattern(
    (handler) => event.addListener(handler),
    (handler) => event.removeListener(handler),
  );
}

/**
 * Manage a devtools panel rendering a graph of a web audio context.
 */
export class DevtoolsGraphPanel {
  requests$: Observable<Audion.DevtoolsRequest>;

  onPanelShown$: Observable<void>;

  /**
   * Create a DevtoolsGraphPanel.
   */
  constructor(graphs$: Observable<Audion.DevtoolsMessage>) {
    const requests$ = (this.requests$ = new Subject());

    const onPanelShown$ = (this.onPanelShown$ = new Subject<void>());
    chrome.devtools.panels.create('Web Audio', '', 'panel.html', (panel) => {
      fromChromeEvent(panel.onShown).subscribe(onPanelShown$);
    });

    fromChromeEvent(chrome.runtime.onConnect).subscribe({
      next(port) {
        fromChromeEvent(port.onMessage)
          .pipe(map(([message]) => message))
          .subscribe(requests$);

        graphs$.pipe(takeUntil(fromChromeEvent(port.onDisconnect))).subscribe({
          next(graphs) {
            port.postMessage(graphs);
          },
        });
      },
    });
  }
}


================================================
FILE: src/devtools/Types.ts
================================================
/// <reference path="../chrome/DebuggerWebAudioDomain.ts" />

import {Protocol} from 'devtools-protocol/types/protocol';
import {
  PageDebuggerEvent,
  PageDebuggerEventParams,
} from '../chrome/DebuggerPageDomain';

import {
  WebAudioDebuggerEvent,
  WebAudioDebuggerEventParams,
} from '../chrome/DebuggerWebAudioDomain';

import {Utils} from '../utils/Types';

/** @namespace Audion */

/**
 * @typedef Audion.WebAudioEvent
 * @property {Method} method
 * @property {Params} params
 */

export namespace Audion {
  export type ContextRealtimeData = Protocol.WebAudio.ContextRealtimeData;

  export enum GraphEdgeType {
    NODE = 'node',
    PARAM = 'param',
  }

  export interface GraphNodeEdge {
    sourceOutputIndex: number;
    destinationType: GraphEdgeType.NODE;
    destinationInputIndex: number;
  }

  export interface GraphParamEdge {
    sourceOutputIndex: number;
    destinationType: GraphEdgeType.PARAM;
    destinationParamId: string;
    destinationParamIndex: number;
  }

  export type GraphEdge = GraphNodeEdge | GraphParamEdge;

  export interface GraphlibEdge<V = GraphEdge> {
    v: string;
    w: string;
    name: string;
    value: V;
  }

  export interface GraphContext {
    id: Protocol.WebAudio.GraphObjectId;
    eventCount: number;
    context: Protocol.WebAudio.BaseAudioContext;
    realtimeData: ContextRealtimeData;
    nodes: {[key: string]: GraphNode};
    params: {[key: string]: Protocol.WebAudio.AudioParam};
    graph: any;
  }

  export interface GraphContextMessage {
    graphContext: Audion.GraphContext;
  }

  export interface GraphContextsById {
    [key: string]: Audion.GraphContext;
  }

  export interface AllGraphsMessage {
    allGraphs: GraphContextsById;
  }

  export type DevtoolsMessage = GraphContextMessage | AllGraphsMessage;

  export enum DevtoolsRequestType {
    COLLECT_GARBAGE = 'collectGarbage',
  }

  export interface DevtoolsCollectGarbageRequest {
    type: DevtoolsRequestType.COLLECT_GARBAGE;
  }

  export type DevtoolsRequest = DevtoolsCollectGarbageRequest;

  export interface DevtoolsObserver extends Utils.Observer<DevtoolsMessage> {}

  export interface GraphNode {
    node: Protocol.WebAudio.AudioNode;
    params: Protocol.WebAudio.AudioParam[];
    edges: Protocol.WebAudio.NodesConnectedEvent[];
  }

  export type PageEvent<N extends PageDebuggerEvent = PageDebuggerEvent> = {
    method: N;
    params: PageDebuggerEventParams<N>[0];
  };

  export type WebAudioEvent<
    N extends WebAudioDebuggerEvent = WebAudioDebuggerEvent,
  > = {
    method: N;
    params: WebAudioDebuggerEventParams<N>[0];
  };
}

/**
 * @typedef Audion.GraphContext
 * @property {ChromeDebuggerWebAudioDomain.GraphObjectId} id
 * @property {ChromeDebuggerWebAudioDomain.BaseAudioContext} context
 * @property {Object<string, Audion.GraphNode>} nodes
 * @property {Object<string, ChromeDebuggerWebAudioDomain.AudioParam>} params
 * @property {object} graph
 */

/**
 * @typedef Audion.GraphContextMessage
 * @property {Audion.GraphContext} graphContext
 */

/**
 * @typedef Audion.AllGraphsMessage
 * @property {Object<string, Audion.GraphContext>} allGraphs
 */

/**
 * @typedef {Audion.GraphContextMessage
 *   | Audion.AllGraphsMessage
 *   } Audion.DevtoolsMessage
 */

/**
 * @typedef {Utils.Observer<Audion.DevtoolsMessage>} Audion.DevtoolsObserver
 */


================================================
FILE: src/devtools/WebAudioEventObserver.test.js
================================================
/// <reference path="../chrome/DebuggerWebAudioDomain.ts" />

import {beforeEach, describe, expect, it, jest} from '@jest/globals';

import {chrome} from '../chrome';
import {WebAudioDebuggerEvent} from '../chrome/DebuggerWebAudioDomain';

import {DebuggerAttachEventController} from './DebuggerAttachEventController';
import {WebAudioEventObservable} from './WebAudioEventObserver';

jest.mock('../chrome');

describe('WebAudioEventObserver', () => {
  let webAudioEvents$;

  beforeEach(() => {
    jest.clearAllMocks();

    const attachController = new DebuggerAttachEventController();
    attachController.permission$.grantTemporary();
    webAudioEvents$ = new WebAudioEventObservable(attachController);
  });

  it('attaches to chrome.debugger', () => {
    const sub = webAudioEvents$.subscribe();

    expect(chrome.debugger.attach).toBeCalled();
    if (jest.isMockFunction(chrome.debugger.attach)) {
      /** @type {function} */ (chrome.debugger.attach.mock.calls[0][2])();
    }
    expect(chrome.debugger.sendCommand).toBeCalled();
    expect(chrome.debugger.onDetach.addListener).toBeCalled();
    expect(chrome.debugger.onEvent.addListener).toBeCalled();

    sub.unsubscribe();
  });

  it('does not reattach when user triggers detach', () => {
    const sub = webAudioEvents$.subscribe();

    if (jest.isMockFunction(chrome.debugger.attach)) {
      /** @type {function} */ (chrome.debugger.attach.mock.calls[0][2])();
    }
    expect(chrome.debugger.attach).toBeCalledTimes(1);
    if (
      jest.isMockFunction(chrome.debugger.onDetach.addListener) &&
      chrome.debugger.onDetach.addListener.mock.calls.length > 0
    ) {
      /** @type {function} */ (
        chrome.debugger.onDetach.addListener.mock.calls[0][0]
      )({tabId: 'tab'}, 'canceled_by_user');
    }
    expect(chrome.debugger.attach).toBeCalledTimes(1);

    sub.unsubscribe();
  });

  it('detachs from chrome.debugger on unsubscribe', () => {
    const sub = webAudioEvents$.subscribe();

    if (jest.isMockFunction(chrome.debugger.attach)) {
      /** @type {function} */ (chrome.debugger.attach.mock.calls[0][2])();
    }
    expect(chrome.debugger.sendCommand).toBeCalledTimes(1);
    if (jest.isMockFunction(chrome.debugger.sendCommand)) {
      /** @type {function} */ (chrome.debugger.sendCommand.mock.calls[0][2])();
    }
    sub.unsubscribe();
    expect(chrome.debugger.detach).toBeCalled();
    if (jest.isMockFunction(chrome.debugger.sendCommand)) {
      /** @type {function} */ (chrome.debugger.sendCommand.mock.calls[1][2])();
    }
    expect(chrome.debugger.sendCommand).toBeCalledTimes(2);
    if (jest.isMockFunction(chrome.debugger.detach)) {
      /** @type {function} */ (chrome.debugger.detach.mock.calls[0][1])();
    }
    expect(chrome.debugger.onDetach.removeListener).toBeCalled();
    expect(chrome.debugger.onEvent.removeListener).toBeCalled();
  });

  it('forwards to WebAudio debugger protocol events', () => {
    const nextMock = jest.fn();
    const sub = webAudioEvents$.subscribe(nextMock);
    if (jest.isMockFunction(chrome.debugger.attach)) {
      /** @type {function} */ (chrome.debugger.attach.mock.calls[0][2])();
    }
    /** @type {ChromeDebuggerWebAudioDomain.ContextCreatedEvent} */
    const contextCreated = {
      context: {
        contextId: '0',
        contextType: 'realtime',
        contextState: 'running',
        sampleRate: 48000,
        callbackBufferSize: 1000,
        maxOutputChannelCount: 2,
      },
    };
    if (jest.isMockFunction(chrome.debugger.onEvent.addListener)) {
      /** @type {function} */ (
        chrome.debugger.onEvent.addListener.mock.calls[0][0]
      )('tab', WebAudioDebuggerEvent.contextCreated, contextCreated);
    }
    expect(nextMock).toBeCalledWith({
      method: WebAudioDebuggerEvent.contextCreated,
      params: contextCreated,
    });

    sub.unsubscribe();
  });
});


================================================
FILE: src/devtools/WebAudioEventObserver.ts
================================================
import {chrome} from '../chrome';
import {Audion} from './Types';

import {Observable} from 'rxjs';
import {
  CounterSubject,
  DebuggerAttachEventController,
  PermissionSubject,
} from './DebuggerAttachEventController';
import {WebAudioDebuggerEvent} from '../chrome/DebuggerWebAudioDomain';

/**
 * @memberof Audion
 * @alias WebAudioEventObserver
 */
export class WebAudioEventObservable extends Observable<Audion.WebAudioEvent> {
  debuggerAttachController: DebuggerAttachEventController;

  /** Does user permit extension to use `chrome.debugger`. */
  permission$: PermissionSubject;
  /** How many subscriptions want to attach to `chrome.debugger`. */
  attachInterest$: CounterSubject;
  /**
   * How many subscriptions want to receive events through
   * `chrome.debugger.onEvent`.
   */
  webAudioEventInterest$: CounterSubject;

  constructor(debuggerAttachController: DebuggerAttachEventController) {
    super((subscriber) => {
      this.debuggerAttachController = debuggerAttachController;
      this.permission$ = debuggerAttachController.permission$;
      this.attachInterest$ = debuggerAttachController.attachInterest$;
      this.webAudioEventInterest$ =
        debuggerAttachController.webAudioEventInterest$;

      const onEvent: Chrome.DebuggerOnEventListener = (
        debuggeeId,
        method: WebAudioDebuggerEvent,
        params,
      ) => {
        subscriber.next({method, params});
      };

      const onDetach = () => {
        // TODO: Show a warning if the DevTools are still open and allow the
        // user to re-attach manually, e.g. by pressing a button.
        // See: https://developer.chrome.com/docs/extensions/reference/debugger/#type-DetachReason
      };

      chrome.debugger.onDetach.addListener(onDetach);
      chrome.debugger.onEvent.addListener(onEvent);

      this.attachInterest$.increment();
      this.webAudioEventInterest$.increment();

      return () => {
        chrome.debugger.onDetach.removeListener(onDetach);
        chrome.debugger.onEvent.removeListener(onEvent);

        this.attachInterest$.decrement();
        this.webAudioEventInterest$.decrement();
      };
    });
  }
}


================================================
FILE: src/devtools/WebAudioGraphIntegrator.test.js
================================================
/// <reference path="../chrome/DebuggerWebAudioDomain.ts" />

import {beforeEach, describe, expect, it, jest} from '@jest/globals';
import {EMPTY, from, Observable, Subject, throwError} from 'rxjs';
import {concatWith, filter, takeUntil} from 'rxjs/operators';

import {WebAudioDebuggerEvent} from '../chrome/DebuggerWebAudioDomain';

import {integrateWebAudioGraph} from './WebAudioGraphIntegrator';

// FIX: prettier isn't wrapping this next line.
// eslint-disable-next-line max-len
import * as oscillatorGainFixture from '../../fixtures/oscillatorGainParam';

// Node.js environment doesn't provide some browser-specific APIs
// (e.g. performance.now(), localStorage.getItem() and localStorage.setItem())
// Mocking these ensures no errors are thrown when running tests.
global.performance = {
  now: jest.fn(() => Date.now()),
};

const localStorageMock = {
  getItem: jest.fn(),
  setItem: jest.fn(),
};
global.localStorage = localStorageMock;

describe('WebAudioGraphIntegrator', () => {
  let nextWebAudioEvent = (value) => {};
  let nextGraphContext = jest.fn();

  beforeEach(() => {
    const subject = new Subject();
    nextGraphContext = jest.fn();
    nextWebAudioEvent = (value) => subject.next(value);
    const webAudioRealtime = {
      pollContext() {
        return new Observable();
      },
    };
    subject
      .pipe(integrateWebAudioGraph(webAudioRealtime))
      .subscribe(nextGraphContext);
  });

  it('adds new context', () => {
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.contextCreated,
      params: MockWebAudioEvents.contextCreated[0],
    });
    expect(nextGraphContext).toBeCalledTimes(1);
    expect(nextGraphContext.mock.calls[0]).toMatchInlineSnapshot(`
Array [
  Object {
    "context": Object {
      "callbackBufferSize": 1000,
      "contextId": "context0000",
      "contextState": "running",
      "contextType": "realtime",
      "maxOutputChannelCount": 2,
      "sampleRate": 48000,
    },
    "eventCount": 1,
    "graph": Graph {
      "_defaultEdgeLabelFn": [Function],
      "_defaultNodeLabelFn": [Function],
      "_edgeLabels": Object {},
      "_edgeObjs": Object {},
      "_in": Object {},
      "_isCompound": false,
      "_isDirected": true,
      "_isMultigraph": true,
      "_label": Object {},
      "_nodes": Object {},
      "_out": Object {},
      "_preds": Object {},
      "_sucs": Object {},
    },
    "id": "context0000",
    "nodes": Object {},
    "params": Object {},
    "realtimeData": Object {
      "callbackIntervalMean": 0,
      "callbackIntervalVariance": 0,
      "currentTime": 0,
      "renderCapacity": 0,
    },
  },
]
`);
  });
  it('changes context', () => {
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.contextCreated,
      params: MockWebAudioEvents.contextCreated[0],
    });
    expect(nextGraphContext).toBeCalledTimes(1);
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.contextChanged,
      params: MockWebAudioEvents.contextChanged[0],
    });
    expect(nextGraphContext).toBeCalledTimes(2);
    expect(nextGraphContext.mock.calls[1]).toMatchInlineSnapshot(`
Array [
  Object {
    "context": Object {
      "callbackBufferSize": 1000,
      "contextId": "context0000",
      "contextState": "suspended",
      "contextType": "realtime",
      "maxOutputChannelCount": 2,
      "sampleRate": 48000,
    },
    "eventCount": 2,
    "graph": Graph {
      "_defaultEdgeLabelFn": [Function],
      "_defaultNodeLabelFn": [Function],
      "_edgeLabels": Object {},
      "_edgeObjs": Object {},
      "_in": Object {},
      "_isCompound": false,
      "_isDirected": true,
      "_isMultigraph": true,
      "_label": Object {},
      "_nodes": Object {},
      "_out": Object {},
      "_preds": Object {},
      "_sucs": Object {},
    },
    "id": "context0000",
    "nodes": Object {},
    "params": Object {},
    "realtimeData": Object {
      "callbackIntervalMean": 0,
      "callbackIntervalVariance": 0,
      "currentTime": 0,
      "renderCapacity": 0,
    },
  },
]
`);
  });
  it('removes old context', () => {
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.contextCreated,
      params: MockWebAudioEvents.contextCreated[0],
    });
    expect(nextGraphContext).toBeCalledTimes(1);
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.contextWillBeDestroyed,
      params: MockWebAudioEvents.contextWillBeDestroyed[0],
    });
    expect(nextGraphContext).toBeCalledTimes(2);
    expect(nextGraphContext.mock.calls[1]).toMatchInlineSnapshot(`
Array [
  Object {
    "context": null,
    "eventCount": 2,
    "graph": null,
    "id": "context0000",
    "nodes": null,
    "params": null,
    "realtimeData": null,
  },
]
`);
  });
  it('adds new node', () => {
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.contextCreated,
      params: MockWebAudioEvents.contextCreated[0],
    });
    expect(nextGraphContext).toBeCalledTimes(1);
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.audioNodeCreated,
      params: MockWebAudioEvents.audioNodeCreated[0],
    });
    expect(nextGraphContext).toBeCalledTimes(2);
    expect(nextGraphContext.mock.calls[1]).toMatchInlineSnapshot(`
Array [
  Object {
    "context": Object {
      "callbackBufferSize": 1000,
      "contextId": "context0000",
      "contextState": "running",
      "contextType": "realtime",
      "maxOutputChannelCount": 2,
      "sampleRate": 48000,
    },
    "eventCount": 2,
    "graph": Graph {
      "_defaultEdgeLabelFn": [Function],
      "_defaultNodeLabelFn": [Function],
      "_edgeLabels": Object {},
      "_edgeObjs": Object {},
      "_in": Object {
        "node0000": Object {},
      },
      "_isCompound": false,
      "_isDirected": true,
      "_isMultigraph": true,
      "_label": Object {},
      "_nodeCount": 1,
      "_nodes": Object {
        "node0000": Object {
          "color": null,
          "height": 50,
          "id": "node0000",
          "label": "gain",
          "type": "gain",
          "width": 150,
        },
      },
      "_out": Object {
        "node0000": Object {},
      },
      "_preds": Object {
        "node0000": Object {},
      },
      "_sucs": Object {
        "node0000": Object {},
      },
    },
    "id": "context0000",
    "nodes": Object {
      "node0000": Object {
        "edges": Array [],
        "node": Object {
          "channelCountMode": "max",
          "channelInterpretation": "discrete",
          "contextId": "context0000",
          "nodeId": "node0000",
          "nodeType": "gain",
          "numberOfInputs": 1,
          "numberOfOutputs": 1,
        },
        "params": Array [],
      },
    },
    "params": Object {},
    "realtimeData": Object {
      "callbackIntervalMean": 0,
      "callbackIntervalVariance": 0,
      "currentTime": 0,
      "renderCapacity": 0,
    },
  },
]
`);
  });
  it('removes old node', () => {
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.contextCreated,
      params: MockWebAudioEvents.contextCreated[0],
    });
    expect(nextGraphContext).toBeCalledTimes(1);
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.audioNodeCreated,
      params: MockWebAudioEvents.audioNodeCreated[0],
    });
    expect(nextGraphContext).toBeCalledTimes(2);
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.audioNodeWillBeDestroyed,
      params: MockWebAudioEvents.audioNodeWillBeDestroyed[0],
    });
    expect(nextGraphContext).toBeCalledTimes(3);
    expect(nextGraphContext.mock.calls[2]).toMatchInlineSnapshot(`
Array [
  Object {
    "context": Object {
      "callbackBufferSize": 1000,
      "contextId": "context0000",
      "contextState": "running",
      "contextType": "realtime",
      "maxOutputChannelCount": 2,
      "sampleRate": 48000,
    },
    "eventCount": 3,
    "graph": Graph {
      "_defaultEdgeLabelFn": [Function],
      "_defaultNodeLabelFn": [Function],
      "_edgeLabels": Object {},
      "_edgeObjs": Object {},
      "_in": Object {},
      "_isCompound": false,
      "_isDirected": true,
      "_isMultigraph": true,
      "_label": Object {},
      "_nodeCount": 0,
      "_nodes": Object {},
      "_out": Object {},
      "_preds": Object {},
      "_sucs": Object {},
    },
    "id": "context0000",
    "nodes": Object {},
    "params": Object {},
    "realtimeData": Object {
      "callbackIntervalMean": 0,
      "callbackIntervalVariance": 0,
      "currentTime": 0,
      "renderCapacity": 0,
    },
  },
]
`);
  });
  it('adds new node edge connection', () => {
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.contextCreated,
      params: MockWebAudioEvents.contextCreated[0],
    });
    expect(nextGraphContext).toBeCalledTimes(1);
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.audioNodeCreated,
      params: MockWebAudioEvents.audioNodeCreated[0],
    });
    expect(nextGraphContext).toBeCalledTimes(2);
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.audioNodeCreated,
      params: MockWebAudioEvents.audioNodeCreated[1],
    });
    expect(nextGraphContext).toBeCalledTimes(3);
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.nodesConnected,
      params: MockWebAudioEvents.nodesConnected[0],
    });
    expect(nextGraphContext).toBeCalledTimes(4);
    expect(nextGraphContext.mock.calls[3]).toMatchInlineSnapshot(`
Array [
  Object {
    "context": Object {
      "callbackBufferSize": 1000,
      "contextId": "context0000",
      "contextState": "running",
      "contextType": "realtime",
      "maxOutputChannelCount": 2,
      "sampleRate": 48000,
    },
    "eventCount": 4,
    "graph": Graph {
      "_defaultEdgeLabelFn": [Function],
      "_defaultNodeLabelFn": [Function],
      "_edgeCount": 1,
      "_edgeLabels": Object {
        "node0001node00000,0": Object {
          "destinationInputIndex": 0,
          "destinationType": "node",
          "sourceOutputIndex": 0,
        },
      },
      "_edgeObjs": Object {
        "node0001node00000,0": Object {
          "name": "0,0",
          "v": "node0001",
          "w": "node0000",
        },
      },
      "_in": Object {
        "node0000": Object {
          "node0001node00000,0": Object {
            "name": "0,0",
            "v": "node0001",
            "w": "node0000",
          },
        },
        "node0001": Object {},
      },
      "_isCompound": false,
      "_isDirected": true,
      "_isMultigraph": true,
      "_label": Object {},
      "_nodeCount": 2,
      "_nodes": Object {
        "node0000": Object {
          "color": null,
          "height": 50,
          "id": "node0000",
          "label": "gain",
          "type": "gain",
          "width": 150,
        },
        "node0001": Object {
          "color": null,
          "height": 50,
          "id": "node0001",
          "label": "bufferSource",
          "type": "bufferSource",
          "width": 150,
        },
      },
      "_out": Object {
        "node0000": Object {},
        "node0001": Object {
          "node0001node00000,0": Object {
            "name": "0,0",
            "v": "node0001",
            "w": "node0000",
          },
        },
      },
      "_preds": Object {
        "node0000": Object {
          "node0001": 1,
        },
        "node0001": Object {},
      },
      "_sucs": Object {
        "node0000": Object {},
        "node0001": Object {
          "node0000": 1,
        },
      },
    },
    "id": "context0000",
    "nodes": Object {
      "node0000": Object {
        "edges": Array [],
        "node": Object {
          "channelCountMode": "max",
          "channelInterpretation": "discrete",
          "contextId": "context0000",
          "nodeId": "node0000",
          "nodeType": "gain",
          "numberOfInputs": 1,
          "numberOfOutputs": 1,
        },
        "params": Array [],
      },
      "node0001": Object {
        "edges": Array [
          Object {
            "contextId": "context0000",
            "destinationId": "node0000",
            "sourceId": "node0001",
          },
        ],
        "node": Object {
          "channelCountMode": "max",
          "channelInterpretation": "discrete",
          "contextId": "context0000",
          "nodeId": "node0001",
          "nodeType": "bufferSource",
          "numberOfInputs": 0,
          "numberOfOutputs": 1,
        },
        "params": Array [],
      },
    },
    "params": Object {},
    "realtimeData": Object {
      "callbackIntervalMean": 0,
      "callbackIntervalVariance": 0,
      "currentTime": 0,
      "renderCapacity": 0,
    },
  },
]
`);
  });
  it('removes old node edge connection', () => {
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.contextCreated,
      params: MockWebAudioEvents.contextCreated[0],
    });
    expect(nextGraphContext).toBeCalledTimes(1);
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.audioNodeCreated,
      params: MockWebAudioEvents.audioNodeCreated[0],
    });
    expect(nextGraphContext).toBeCalledTimes(2);
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.audioNodeCreated,
      params: MockWebAudioEvents.audioNodeCreated[1],
    });
    expect(nextGraphContext).toBeCalledTimes(3);
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.nodesConnected,
      params: MockWebAudioEvents.nodesConnected[0],
    });
    expect(nextGraphContext).toBeCalledTimes(4);
    nextWebAudioEvent({
      method: WebAudioDebuggerEvent.nodesDisconnected,
      params: MockWebAudioEvents.nodesDisconnected[0],
    });
    expect(nextGraphContext).toBeCalledTimes(5);
    expect(nextGraphContext.mock.calls[4]).toMatchInlineSnapshot(`
Array [
  Object {
    "context": Object {
      "callbackBufferSize": 1000,
      "contextId": "context0000",
      "contextState": "running",
      "contextType": "realtime",
      "maxOutputChannelCount": 2,
      "sampleRate": 48000,
    },
    "eventCount": 5,
    "graph": Graph {
      "_defaultEdgeLabelFn": [Function],
      "_defaultNodeLabelFn": [Function],
      "_edgeCount": 0,
      "_edgeLabels": Object {},
      "_edgeObjs": Object {},
      "_in": Object {
        "node0000": Object {},
        "node0001": Object {},
      },
      "_isCompound": false,
      "_isDirected": true,
      "_isMultigraph": true,
      "_label": Object {},
      "_nodeCount": 2,
      "_nodes": Object {
        "node0000": Object {
          "color": null,
          "height": 50,
          "id": "node0000",
          "label": "gain",
          "type": "gain",
          "width": 150,
        },
        "node0001": Object {
          "color": null,
          "height": 50,
          "id": "node0001",
          "label": "bufferSource",
          "type": "bufferSource",
          "width": 150,
        },
      },
      "_out": Object {
        "node0000": Object {},
        "node0001": Object {},
      },
      "_preds": Object {
        "node0000": Object {},
        "node0001": Object {},
      },
      "_sucs": Object {
        "node0000": Object {},
        "node0001": Object {},
      },
    },
    "id": "context0000",
    "nodes": Object {
      "node0000": Object {
        "edges": Array [],
        "node": Object {
          "channelCountMode": "max",
          "channelInterpretation": "discrete",
          "contextId": "context0000",
          "nodeId": "node0000",
          "nodeType": "gain",
          "numberOfInputs": 1,
          "numberOfOutputs": 1,
        },
        "params": Array [],
      },
      "node0001": Object {
        "edges": Array [
          Object {
            "contextId": "context0000",
            "destinationId": "node0000",
            "sourceId": "node0001",
          },
        ],
        "node": Object {
          "channelCountMode": "max",
          "channelInterpretation": "discrete",
          "contextId": "context0000",
          "nodeId": "node0001",
          "nodeType": "bufferSource",
          "numberOfInputs": 0,
          "numberOfOutputs": 1,
        },
        "params": Array [],
      },
    },
    "params": Object {},
    "realtimeData": Object {
      "callbackIntervalMean": 0,
      "callbackIntervalVariance": 0,
      "currentTime": 0,
      "renderCapacity": 0,
    },
  },
]
`);
  });

  describe('simulate graphs', () => {
    describe('oscillator -> gain param', () => {
      const events = oscillatorGainFixture.OSCILLATOR_GAIN_PARAM_EVENTS;
      const simulation = () =>
        integrateWebAudioGraph({
          pollContext() {
            return EMPTY;
          },
        });
      const eventSource = from(events);

      for (let i = 0; i < events.length; i++) {
        const errorEvent = events[i];
        const falseSource = eventSource.pipe(
          takeUntil((event) => event === errorEvent),
          concatWith([throwError(() => new Error())]),
        );
        it(`falsify #${i} ${errorEvent.method}`, () => {
          const subscriber = mockSubscriber();
          falseSource.pipe(simulation()).subscribe(subscriber);
          expect(subscriber.error).toBeCalled();
        });
      }

      it(`all events`, () => {
        const subscriber = mockSubscriber();
        eventSource.pipe(simulation()).subscribe(subscriber);
        expect(subscriber.next).toBeCalled();
        expect(subscriber.error).not.toBeCalled();
      });

      for (let i = 0; i < events.length; i++) {
        const skipEvent = events[i];
        const skipSource = eventSource.pipe(filter((ev) => ev !== skipEvent));
        it(`skip event #${i} ${skipEvent.method}`, () => {
          const subscriber = mockSubscriber();
          skipSource.pipe(simulation()).subscribe(subscriber);
          expect(subscriber.error).not.toBeCalled();
        });
      }
    });
  });
});

/**
 * @type {Object<EventName,
 *   Object<*, WebAudioDebuggerEvent>>}
 */
const MockWebAudioEvents = {
  audioNodeCreated: {
    /** @type {ChromeDebuggerWebAudioDomain.AudioNodeCreatedEvent} */
    0: {
      node: {
        contextId: 'context0000',
        nodeId: 'node0000',
        nodeType: 'gain',
        channelCountMode: 'max',
        channelInterpretation: 'discrete',
        numberOfInputs: 1,
        numberOfOutputs: 1,
      },
    },
    /** @type {ChromeDebuggerWebAudioDomain.AudioNodeCreatedEvent} */
    1: {
      node: {
        contextId: 'context0000',
        nodeId: 'node0001',
        nodeType: 'bufferSource',
        channelCountMode: 'max',
        channelInterpretation: 'discrete',
        numberOfInputs: 0,
        numberOfOutputs: 1,
      },
    },
  },
  audioNodeWillBeDestroyed: {
    /** @type {ChromeDebuggerWebAudioDomain.AudioNodeWillBeDestroyedEvent} */
    0: {
      contextId: 'context0000',
      nodeId: 'node0000',
    },
  },
  contextChanged: {
    /** @type {ChromeDebuggerWebAudioDomain.ContextChangedEvent} */
    0: {
      context: {
        contextId: 'context0000',
        contextType: 'realtime',
        contextState: 'suspended',
        sampleRate: 48000,
        callbackBufferSize: 1000,
        maxOutputChannelCount: 2,
      },
    },
  },
  contextCreated: {
    /** @type {ChromeDebuggerWebAudioDomain.ContextCreatedEvent} */
    0: {
      context: {
        contextId: 'context0000',
        contextType: 'realtime',
        contextState: 'running',
        sampleRate: 48000,
        callbackBufferSize: 1000,
        maxOutputChannelCount: 2,
      },
    },
  },
  contextWillBeDestroyed: {
    /** @type {ChromeDebuggerWebAudioDomain.ContextWillBeDestroyedEvent} */
    0: {
      contextId: 'context0000',
    },
  },
  nodesConnected: {
    /** @type {ChromeDebuggerWebAudioDomain.NodesConnectedEvent} */
    0: {
      contextId: 'context0000',
      sourceId: 'node0001',
      destinationId: 'node0000',
    },
  },
  nodesDisconnected: {
    /** @type {ChromeDebuggerWebAudioDomain.NodesDisconnectedEvent} */
    0: {
      contextId: 'context0000',
      sourceId: 'node0001',
      destinationId: 'node0000',
    },
  },
};

/**
 * @return {Subscriber}
 */
function mockSubscriber() {
  return {next: jest.fn(), complete: jest.fn(), error: jest.fn()};
}


================================================
FILE: src/devtools/WebAudioGraphIntegrator.ts
================================================
import * as dagre from 'dagre';
import * as graphlib from 'graphlib';
import {ProtocolMapping} from 'devtools-protocol/types/protocol-mapping';
import {DLOG} from '../utils/dlog';
import {
  EMPTY,
  isObservable,
  merge,
  NEVER,
  Observable,
  of,
  OperatorFunction,
  pipe,
  Subject,
} from 'rxjs';
import {
  map,
  filter,
  catchError,
  mergeMap,
  takeUntil,
  take,
  ignoreElements,
  finalize,
  share,
} from 'rxjs/operators';

import {WebAudioDebuggerEvent} from '../chrome/DebuggerWebAudioDomain';

import {Audion} from './Types';
import {
  INITIAL_CONTEXT_REALTIME_DATA,
  RealtimeDataErrorMessage,
  WebAudioRealtimeData,
  WebAudioRealtimeDataReason,
} from './WebAudioRealtimeData';
import {
  ChromeDebuggerAPIEventName,
  ChromeDebuggerAPIEvent,
} from './DebuggerAttachEventController';
import {
  PageDebuggerEvent,
  PageDebuggerEventParams,
} from '../chrome/DebuggerPageDomain';

enum GraphContextDestroyReasonMessage {
  RECEIVE_WILL_DESTROY_EVENT = `ReceiveWillDestroyEvent`,
  CANNOT_FIND_REALTIME_DATA = `CannotFindRealtimeData`,
}

type MutableContexts = {
  [key: string]: {
    graphContext: Audion.GraphContext;
    graphContextDestroyed$: Subject<GraphContextDestroyReasonMessage>;
    realtimeDataGraphContext$: Observable<Audion.GraphContext>;
  };
};

interface EventHelpers {
  realtimeData: WebAudioRealtimeData;
}

type IntegratableEventName =
  | PageDebuggerEvent
  | WebAudioDebuggerEvent
  | ChromeDebuggerAPIEventName;

type IntegratableEvent =
  | Audion.PageEvent
  | Audion.WebAudioEvent
  | ChromeDebuggerAPIEvent;

type IntegratableEventMapping = {
  [K in IntegratableEventName]: ProtocolMapping.Events extends {
    [key in K]: [infer P];
  }
    ? P
    : ChromeDebuggerAPIEvent extends {method: K; params: infer P}
    ? P
    : never;
};

type EventHandlers =
  | {
      readonly [K in IntegratableEventName]: (
        helpers: EventHelpers,
        contexts: MutableContexts,
        event: IntegratableEventMapping[K],
      ) => Observable<Audion.GraphContext> | Audion.GraphContext | void;
    };

export const getTimestampAsString = () => {
  return '[' + performance.now().toFixed(2) + '] ';
};

const EVENT_HANDLERS: Partial<EventHandlers> = {
  [WebAudioDebuggerEvent.audioNodeCreated]: (
    helpers,
    contexts,
    audioNodeCreated,
  ) => {
    const node = audioNodeCreated.node;
    const {contextId, nodeId, nodeType} = node;
    const space = contexts[contextId];
    if (!space) {
      return;
    }

    DLOG(`A new AudioNode has been created.`, {
      contextId,
      nodeId,
    });

    const context = space.graphContext;
    context.eventCount += 1;

    if (context.nodes[nodeId]) {
      DLOG(`Duplicate WebAudio.audioNodeCreated event`, {
        contextId,
        nodeId,
      });
      return;
    }

    context.nodes[nodeId] = {
      node,
      params: [],
      edges: [],
    };
    context.graph.setNode(nodeId, {
      id: nodeId,
      label: nodeType,
      type: nodeType,
      color: null,
      width: 150,
      height: 50,
    });
    return context;
  },

  [WebAudioDebuggerEvent.audioNodeWillBeDestroyed]: (
    helpers,
    contexts,
    audioNodeDestroyed,
  ) => {
    const {contextId, nodeId} = audioNodeDestroyed;

    DLOG(`An existing AudioNode has been destroyed.`, {
      contextId,
      nodeId,
    });

    const space = contexts[contextId];
    if (!space) {
      return;
    }

    const context = space.graphContext;
    context.eventCount += 1;

    context.graph.removeNode(nodeId);
    const node = context.nodes[nodeId];
    if (node && node.params) {
      for (const audioParam of node.params) {
        delete context.params[audioParam.paramId];
      }
    }
    delete context.nodes[nodeId];
    return context;
  },

  [WebAudioDebuggerEvent.audioParamCreated]: (
    helpers,
    contexts,
    audioParamCreated,
  ) => {
    const {param} = audioParamCreated;
    const {contextId, nodeId, paramId: paramIdCreated} = param;

    DLOG(`A new AudioParam has been created.`, {
      contextId,
      nodeId,
      paramIdCreated,
    });

    const space = contexts[contextId];
    if (!space) {
      return;
    }

    const context = space.graphContext;
    context.eventCount += 1;

    const node = context.nodes[nodeId];
    if (!node) {
      return;
    }

    if (node.params.some(({paramId}) => paramId === paramIdCreated)) {
      DLOG(`Duplicate WebAudio.audioParamCreated event`, {
        contextId,
        nodeId,
        paramIdCreated,
      });
      return;
    }

    node.params.push(param);
    context.params[paramIdCreated] = param;
    return context;
  },

  [WebAudioDebuggerEvent.audioParamWillBeDestroyed]: (
    helpers,
    contexts,
    audioParamWillBeDestroyed,
  ) => {
    const {
      contextId,
      nodeId,
      paramId: paramIdCreated,
    } = audioParamWillBeDestroyed;

    DLOG(`An existing AudioParam has been destroyed.`, {
      contextId,
      nodeId,
      paramIdCreated,
    });

    const space = contexts[contextId];
    if (!space) {
      return;
    }

    const context = space.graphContext;
    context.eventCount += 1;

    const node = context.nodes[nodeId];
    if (node && node.params) {
      removeAll(node.params, ({paramId}) => paramId === paramIdCreated);
    }
    delete context.params[paramIdCreated];
    return context;
  },

  [WebAudioDebuggerEvent.contextChanged]: (
    helpers,
    contexts,
    contextChanged,
  ) => {
    const {contextId} = contextChanged.context;
    const space = contexts[contextId];
    if (!space) {
      DLOG(
        `Unexpected WebAudio.contextChanged event.` +
          `Did not receive an event when Audio Context was created`,
        {
          contextId,
        },
      );
      return;
    }

    DLOG(
      `Some properties in BaseAudioContext have changed.` +
        `properties (id stays the same)`,
      {
        contextId,
      },
    );

    space.graphContext.context = contextChanged.context;
    space.graphContext.eventCount += 1;
    return space.graphContext;
  },

  [WebAudioDebuggerEvent.audioListenerCreated]: (
    helpers,
    contexts,
    contextChanged,
  ) => {
    const {contextId} = contextChanged.listener;

    DLOG(`An AudioListener has been created.`, {
      contextId,
    });
    return;
  },

  [WebAudioDebuggerEvent.audioListenerWillBeDestroyed]: (
    helpers,
    contexts,
    contextChanged,
  ) => {
    const {contextId} = contextChanged;

    DLOG(`An AudioListener will be destroyed.`, {
      contextId,
    });
    return;
  },

  [WebAudioDebuggerEvent.contextCreated]: (
    helpers,
    contexts,
    contextCreated,
  ) => {
    const {contextId, contextType} = contextCreated.context;

    if (contexts[contextId]) {
      // Duplicate or out of order context created event.
      console.warn(
        getTimestampAsString() +
          `Duplicate ${WebAudioDebuggerEvent.contextCreated} event.`,
        contextCreated,
      );
      return;
    } else {
      console.debug(
        getTimestampAsString() +
          `Audio Context (${contextId.slice(-6)}-${contextType}) created.` +
          `Adding the context to the tracked set.`,
      );
    }

    const graph = new dagre.graphlib.Graph({multigraph: true});
    graph.setGraph({});
    graph.setDefaultEdgeLabel(() => {
      return {};
    });

    // Request realtime data for realtime and offline contexts. We use this
    // information to help confirm the existence of this new context. Events
    // that normally mark when contexts are destroyed may not arrive and so we
    // need this extra way to determine when the contexts no longer exist.
    const realtimeData$ = helpers.realtimeData.pollContext(contextId);
    const graphContextDestroyed$ =
      new Subject<GraphContextDestroyReasonMessage>();

    const realtimeDataGraphContext$ = realtimeData$.pipe(
      map((realtimeData) => {
        const space = contexts[contextId];
        if (space) {
          space.graphContext = {
            ...space.graphContext,
            realtimeData,
          };
          return space.graphContext;
        }
      }),
      filter((context): context is Audion.GraphContext => Boolean(context)),
      catchError((reason, caught) => {
        reason = WebAudioRealtimeDataReason.parseReason(reason);

        if (WebAudioRealtimeDataReason.isCannotFindReason(reason)) {
          const space = contexts[contextId];
          space?.graphContextDestroyed$?.next(
            GraphContextDestroyReasonMessage.CANNOT_FIND_REALTIME_DATA,
          );

          if (!space) {
            DLOG(
              `Error requesting realtime data for context,` +
                `Context was likely cleaned up during requests for real time data.`,
              {
                reason,
                contextId,
              },
            );
          }

          return EMPTY;
        } else if (WebAudioRealtimeDataReason.isRealtimeOnlyReason(reason)) {
          // Non-realtime/offline contexts do not have realtime data and will
          // produce this error when that data is requested.
        } else {
          console.error(
            getTimestampAsString() +
              `Unexpected error requesting realtime data for context '${contextId}'.` +
              `"${WebAudioRealtimeDataReason.toString(reason)}"`,
          );
        }
        // Redirect back to the caught observable. We want to keep receiving
        // realtime data values or errors until we receive CANNOT_FIND error.
        return caught;
      }),

      takeUntil(graphContextDestroyed$),
    );

    contexts[contextId] = {
      graphContext: {
        id: contextId,
        eventCount: 1,
        context: contextCreated.context,
        realtimeData: INITIAL_CONTEXT_REALTIME_DATA,
        nodes: {},
        params: {},
        // TODO: dagre's graphlib typings are inaccurate, which is why we use
        // graphlib's types. Revert to dagre's types once the issue is fixed:
        // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/47439
        graph: graph as unknown as graphlib.Graph,
      },
      graphContextDestroyed$,
      realtimeDataGraphContext$,
    };

    return merge(
      of(contexts[contextId].graphContext),
      graphContextDestroyed$.pipe(
        share(),
        take(1),
        mergeMap((message) => {
          if (
            message ===
            GraphContextDestroyReasonMessage.CANNOT_FIND_REALTIME_DATA
          ) {
            DLOG(
              `Audio Context cannot be found. ` +
                `Removing the context from the tracked set.`,
              {
                contextId,
              },
            );
          } else if (
            message ===
            GraphContextDestroyReasonMessage.RECEIVE_WILL_DESTROY_EVENT
          ) {
            DLOG(
              `Audio Context will be destroyed.` +
                `Removing the context from the tracked set.`,
              {
                contextId,
              },
            );
          }

          const space = contexts[contextId];
          if (space) {
            delete contexts[contextId];
            return of({
              id: contextId,
              eventCount: space.graphContext?.eventCount + 1,
              context: null,
              realtimeData: null,
              nodes: null,
              params: null,
              graph: null,
            });
          } else {
            DLOG(
              `Audio Context could not be removed from the tracked set.` +
                `It was not tracked.`,
              {
                contextId,
              },
            );
          }
          return EMPTY;
        }),
      ),
      contexts[contextId].realtimeDataGraphContext$,
    );
  },

  [WebAudioDebuggerEvent.contextWillBeDestroyed]: (
    helpers,
    contexts,
    contextDestroyed,
  ) => {
    const {contextId} = contextDestroyed;
    const space = contexts[contextId];
    space?.graphContextDestroyed$?.next(
      GraphContextDestroyReasonMessage.RECEIVE_WILL_DESTROY_EVENT,
    );

    DLOG(`A BaseAudioContext will be destroyed.`, {
      contextId,
    });
  },

  [WebAudioDebuggerEvent.nodeParamConnected]: (
    helpers,
    contexts,
    nodeParamConnected,
  ) => {
    const {
      contextId,
      sourceId: sourceNodeId,
      sourceOutputIndex = 0,
      destinationId: destinationParamId,
    } = nodeParamConnected;

    DLOG(`An AudioNode is connected to an AudioParam.`, {
      contextId,
      sourceNodeId,
      destinationParamId,
    });

    const space = contexts[contextId];
    if (!space) {
      return;
    }

    const context = space.graphContext;
    context.eventCount += 1;

    const sourceNode = context.nodes[sourceNodeId];
    if (!sourceNode) {
      return;
    }
    const destinationParam = context.params[destinationParamId];
    if (!destinationParam) {
      return;
    }
    const destinationNodeId = destinationParam.nodeId;
    const destinationNode = context.nodes[destinationNodeId];
    if (!destinationNode) {
      return;
    }

    sourceNode.edges.push(nodeParamConnected);
    context.graph.setEdge(
      sourceNodeId,
      destinationNodeId,
      {
        sourceOutputIndex,
        destinationType: Audion.GraphEdgeType.PARAM,
        destinationParamId,
        destinationParamIndex: destinationNode.params.findIndex(
          ({paramId}) => paramId === destinationParamId,
        ),
      } as Audion.GraphEdge,
      sourceOutputIndex.toString(),
    );
    return context;
  },

  [WebAudioDebuggerEvent.nodeParamDisconnected]: (
    helpers,
    contexts,
    nodesDisconnected,
  ) => {
    const {
      contextId,
      sourceId: sourceNodeId,
      sourceOutputIndex = 0,
      destinationId: destinationParamId,
    } = nodesDisconnected;

    DLOG(`An AudioNode is disconnected to an AudioParam.`, {
      contextId,
      sourceNodeId,
      destinationParamId,
    });

    const space = contexts[contextId];
    if (!space) {
      return;
    }

    const context = space.graphContext;
    context.eventCount += 1;

    const sourceNode = context.nodes[sourceNodeId];
    if (!sourceNode) {
      return;
    }

    const {edges} = sourceNode;
    removeAll(
      edges,
      (edge) =>
        edge.destinationId === destinationParamId &&
        edge.sourceOutputIndex === sourceOutputIndex,
    );
    context.graph.removeEdge(
      sourceNodeId,
      destinationParamId,
      sourceOutputIndex.toString(),
    );
    return context;
  },

  [WebAudioDebuggerEvent.nodesConnected]: (
    helpers,
    contexts,
    nodesConnected,
  ) => {
    const {
      contextId,
      sourceId,
      sourceOutputIndex = 0,
      destinationId,
      destinationInputIndex = 0,
    } = nodesConnected;

    DLOG(`Two AudioNodes are connected.`, {
      contextId,
      sourceId,
      destinationId,
    });

    const space = contexts[contextId];
    if (!space) {
      return;
    }

    const context = space.graphContext;
    context.eventCount += 1;

    const sourceNode = context.nodes[sourceId];
    if (!sourceNode) {
      return;
    }
    const destinationNode = context.nodes[destinationId];
    if (!destinationNode) {
      return;
    }

    sourceNode.edges.push(nodesConnected);
    context.graph.setEdge(
      sourceId,
      destinationId,
      {
        sourceOutputIndex,
        destinationType: Audion.GraphEdgeType.NODE,
        destinationInputIndex,
      } as Audion.GraphNodeEdge,
      `${sourceOutputIndex},${destinationInputIndex}`,
    );
    return context;
  },

  [WebAudioDebuggerEvent.nodesDisconnected]: (
    helpers,
    contexts,
    nodesDisconnected,
  ) => {
    const {
      contextId,
      sourceId,
      sourceOutputIndex = 0,
      destinationId,
      destinationInputIndex = 0,
    } = nodesDisconnected;

    DLOG(
      `Notifies AudioNodes is disconnected. The destination` +
        `can be null, and it means all the outgoing connections` +
        `from the source are disconnected.`,
      {contextId, sourceId, destinationId},
    );

    const space = contexts[contextId];
    if (!space) {
      return;
    }

    const context = space.graphContext;
    context.eventCount += 1;

    const sourceNode = context.nodes[sourceId];
    if (!sourceNode) {
      return;
    }

    const {edges} = sourceNode;
    removeAll(
      edges,
      (edge) =>
        edge.destinationId === destinationId &&
        edge.sourceOutputIndex === sourceOutputIndex &&
        edge.destinationInputIndex === destinationInputIndex,
    );
    context.graph.removeEdge(
      sourceId,
      destinationId,
      `${sourceOutputIndex},${destinationInputIndex}`,
    );
    return context;
  },

  [PageDebuggerEvent.frameNavigated]: (helpers, contexts) => {
    console.debug(
      getTimestampAsString() +
        `Checking if tracked Audio Contexts (${Object.keys(contexts)
          .map((contextId) => contextId.slice(-6))
          .join(`, `)}) exist after frame navigated.`,
    );

    return ensureContextsExist(contexts, helpers);
  },

  [PageDebuggerEvent.loadEventFired]: (helpers, contexts) => {
    console.debug(
      getTimestampAsString() +
        `Checking if tracked Audio Contexts (${Object.keys(contexts)
          .map((contextId) => contextId.slice(-6))
          .join(`, `)}) exist after load event.`,
    );

    return ensureContextsExist(contexts, helpers);
  },

  [ChromeDebuggerAPIEventName.detached]: (
    helpers,
    contexts,
    debuggerDetached,
  ) => {
    if (debuggerDetached.reason === `target_closed`) {
      console.debug(
        getTimestampAsString() +
          `Checking if tracked Audio Contexts (${Object.keys(contexts)
            .map((contextId) => contextId.slice(-6))
            .join(
              `, `,
            )}) exist after debugger detached because target was closed.`,
      );

      return ensureContextsExist(contexts, helpers);
    }
  },
};

function ensureContextsExist(
  contexts: MutableContexts,
  helpers: EventHelpers,
): void | Audion.GraphContext | Observable<Audion.GraphContext> {
  return merge(
    ...Object.keys(contexts).map((contextId) =>
      helpers.realtimeData.pollContext(contextId).pipe(
        take(1),
        ignoreElements(),
        catchError((reason) => {
          reason = WebAudioRealtimeDataReason.parseReason(reason);

          if (WebAudioRealtimeDataReason.isCannotFindReason(reason)) {
            const space = contexts[contextId];
            if (space) {
              space?.graphContextDestroyed$?.next(
                GraphContextDestroyReasonMessage.CANNOT_FIND_REALTIME_DATA,
              );
            }
          } else if (WebAudioRealtimeDataReason.isRealtimeOnlyReason(reason)) {
            // OfflineAudioContexts emit this error if they are still alive.
          } else {
            console.error(
              getTimestampAsString() +
                `Unexpected error determining if context "${contextId}" is ` +
                `stale with devtools protocol WebAudio.getRealtimeData.` +
                `"${WebAudioRealtimeDataReason.toString(reason)}"`,
            );
          }

          return EMPTY;
        }),
      ),
    ),
  );
}

function removeAll<T>(array: T[], fn: (value: T) => boolean) {
  if (array) {
    let index = array.findIndex(fn);
    while (index >= 0) {
      array.splice(index, 1);
      index = array.findIndex(fn);
    }
  }
}

/**
 * Collect WebAudio debugger events into per context graphs.
 */
export function integrateWebAudioGraph(
  webAudioRealtimeData: WebAudioRealtimeData,
): OperatorFunction<IntegratableEvent, Audion.GraphContext> {
  const helpers = {realtimeData: webAudioRealtimeData};
  const contexts: MutableContexts = {};
  return pipe(
    mergeMap(({method, params}) => {
      if (EVENT_HANDLERS[method]) {
        const result = EVENT_HANDLERS[method]?.(
          helpers,
          contexts,
          params as any,
        );
        if (typeof result !== 'object' || result === null) return EMPTY;
        if (isObservable(result)) {
          return result;
        }
        return of(result);
      }
      return EMPTY;
    }),
  );
}


================================================
FILE: src/devtools/WebAudioRealtimeData.ts
================================================
import Protocol from 'devtools-protocol';
import {bindCallback, concatMap, interval, Observable} from 'rxjs';
import {map, timeout} from 'rxjs/operators';

import {invariant} from '../utils/error';

import {chrome} from '../chrome';
import {WebAudioDebuggerMethod} from '../chrome/DebuggerWebAudioDomain';

import {Audion} from './Types';
import {bindChromeCallback} from '../utils/rxChrome';

/**
 * Error messages returned by WebAudio.getRealtimeData devtool protocol method.
 */
export enum RealtimeDataErrorMessage {
  /** Error returned when a AudioContext cannot be find. */
  CANNOT_FIND = 'Cannot find BaseAudioContext with such id.',
  /** Error returned when realtime data is requested from an OfflineAudioContext. */
  REALTIME_ONLY = 'ContextRealtimeData is only avaliable for an AudioContext.',
}

interface RealtimeDataReason<Message extends RealtimeDataErrorMessage> {
  message: Message;
}

const {tabId} = chrome.devtools.inspectedWindow;

const sendCommand = bindChromeCallback<
  [{tabId: string}, WebAudioDebuggerMethod.getRealtimeData, any?],
  [{realtimeData: Protocol.WebAudio.ContextRealtimeData}]
>(chrome.debugger.sendCommand, chrome.debugger);

export const INITIAL_CONTEXT_REALTIME_DATA = {
  callbackIntervalMean: 0,
  callbackIntervalVariance: 0,
  currentTime: 0,
  renderCapacity: 0,
} as Audion.ContextRealtimeData;

export class WebAudioRealtimeData {
  private readonly intervalMS = 1000;
  private readonly timeoutMS = 500;

  private readonly interval$ = interval(this.intervalMS);

  pollContext(contextId: string) {
    return this.interval$.pipe(
      concatMap(() =>
        sendCommand({tabId}, WebAudioDebuggerMethod.getRealtimeData, {
          contextId,
        }).pipe(
          timeout({first: this.timeoutMS}),
          map((result) => {
            invariant(
              result && result !== null,
              'ContextRealtimeData not returned for WebAudio context %0.',
              contextId,
            );
            return result.realtimeData;
          }),
        ),
      ),
    );
  }
}

export const WebAudioRealtimeDataReason = {
  parseReason(reason: any) {
    if (reason && reason.message && !reason.code) {
      try {
        reason = JSON.parse(reason.message);
      } catch (e) {}
    }
    return reason;
  },

  toString(reason: any) {
    return reason && reason.message ? reason.message : reason;
  },

  isRealtimeOnlyReason(
    reason: any,
  ): reason is RealtimeDataReason<RealtimeDataErrorMessage.REALTIME_ONLY> {
    return reason && reason.message === RealtimeDataErrorMessage.REALTIME_ONLY;
  },

  isCannotFindReason(
    reason: any,
  ): reason is RealtimeDataReason<RealtimeDataErrorMessage.CANNOT_FIND> {
    return reason && reason.message === RealtimeDataErrorMessage.CANNOT_FIND;
  },
};


================================================
FILE: src/devtools/deserializeGraphContext.ts
================================================
import * as graphlib from 'graphlib';
import {Audion} from './Types';

export interface SerializedGraphContext extends Audion.GraphContext {
  graph: any;
}

export function deserializeGraphContext(
  graphContext: SerializedGraphContext,
): Audion.GraphContext {
  if (graphContext.graph) {
    return {
      ...graphContext,
      // TODO: dagre's graphlib typings are inaccurate, which is why we use
      // graphlib directly here. Revert to dagre's types once the issue is fixed:
      // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/47439
      graph: graphlib.json.read(graphContext.graph),
    };
  } else {
    return graphContext;
  }
}


================================================
FILE: src/devtools/layoutGraphContext.ts
================================================
import * as dagre from 'dagre';

import {Audion} from './Types';

export function layoutGraphContext(
  context: Audion.GraphContext,
): Audion.GraphContext {
  if (context.context && context.graph) {
    // TODO: dagre's graphlib typings are inaccurate, which is why we use
    // graphlib's types. Revert to dagre's types once the issue is fixed:
    // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/47439
    dagre.layout(context.graph as unknown as dagre.graphlib.Graph);
  }
  return context;
}


================================================
FILE: src/devtools/main.ts
================================================
import {merge} from 'rxjs';
import {
  map,
  scan,
  take,
  shareReplay,
  share,
  mergeMap,
  auditTime,
} from 'rxjs/operators';

import {Audion} from './Types';

import {DebuggerAttachEventController} from './DebuggerAttachEventController';
import {DevtoolsGraphPanel} from './DevtoolsGraphPanel';
import {serializeGraphContext} from './serializeGraphContext';
import {integrateWebAudioGraph} from './WebAudioGraphIntegrator';
import {WebAudioRealtimeData} from './WebAudioRealtimeData';
import {partitionMap} from './partitionMap';
import {DebuggerEventsObservable} from './DebuggerEvents';

const attachController = new DebuggerAttachEventController();

const pageEvent$ = new DebuggerEventsObservable(attachController, {
  domain: 'page',
});
const webAudioEvents$ = new DebuggerEventsObservable(attachController, {
  domain: 'webAudio',
});
const webAudioRealtimeData = new WebAudioRealtimeData();

const serializedGraphContext$ = merge(
  pageEvent$,
  webAudioEvents$,
  attachController.debuggerEvent$,
).pipe(
  integrateWebAudioGraph(webAudioRealtimeData),
  // Split graph contexts into an observable for each unique graph context id.
  partitionMap({
    getPartitionId: ({id}) => id,
    isPartitionComplete: ({context}) => context === null,
  }),
  // For each partition, start a timer on the first value in that partition but
  // emit the last value during that timer when the timer completes.
  map(auditTime(16)),
  // Merge all the partitions together.
  mergeMap((source) => source),
  map(serializeGraphContext),
  share(),
);

const allGraphs$ = merge(serializedGraphContext$).pipe(
  // Persistently observe web audio events and integrate events into context
  // objects. Collect those into an object of all current graphs.
  scan<Audion.GraphContext, {[key: string]: Audion.GraphContext}>(
    (allGraphs, graphContext) => {
      if (graphContext.graph) {
        return {...allGraphs, [graphContext.id]: graphContext};
      }
      const {[graphContext.id]: _, ...otherGraphs} = allGraphs;
      return otherGraphs;
    },
    {},
  ),
  shareReplay(),
);

// There must be at least one subscription to keep allGraphs$ up to date if
// panel is connected or otherwise.
allGraphs$.subscribe();

// When the panel is opened it'll connect to the devtools page, immediately send
// the current set of graphs.
const panel = new DevtoolsGraphPanel(
  merge(
    allGraphs$.pipe(
      map((allGraphs) => ({allGraphs})),
      take(1),
    ),
    serializedGraphContext$.pipe(map((graphContext) => ({graphContext}))),
  ),
);

// When the panel is first shown, grant attachController permission to attach to
// the debugger.
panel.onPanelShown$.pipe(take(1)).subscribe({
  next() {
    attachController.permission$.grantTemporary();
  },
});

// Respond to requests from the panel accordingly.
panel.requests$.subscribe({
  next(value) {
    if (value.type === Audion.DevtoolsRequestType.COLLECT_GARBAGE) {
      attachController.sendCommand('HeapProfiler.collectGarbage').subscribe();
    }
  },
});


================================================
FILE: src/devtools/partitionMap.ts
================================================
import {Observable, OperatorFunction, Subject} from 'rxjs';

interface PartitionMapConfig<V> {
  /** Callback that returns id string of partition to push to. */
  getPartitionId: (value: V) => string;
  /** Callback that determines if as of that value the partition is complete. */
  isPartitionComplete: (value: V) => boolean;
}

/**
 * Split input observable's values into an observable of observables of those values.
 *
 * @param config when to create partition observables and complete them
 * @returns an observable that pushs an observable for each created partition
 */
export function partitionMap<V>({
  getPartitionId,
  isPartitionComplete,
}: PartitionMapConfig<V>): OperatorFunction<V, Observable<V>> {
  return (source: Observable<V>) => {
    const partitions = {} as {[key: string]: Subject<V>};
    return new Observable<Observable<V>>((subscriber) => {
      return source.subscribe({
        next(graphChange) {
          const key = getPartitionId(graphChange);
          const isComplete = isPartitionComplete(graphChange);

          // If the key is not in the partition cache, add a new one for that
          // key and push it.
          if (!(key in partitions)) {
            partitions[key] = new Subject<V>();
            subscriber.next(partitions[key]);
          }

          // Push the value through the selected partition.
          partitions[key].next(graphChange);

          // When completeSelector returns true, complete the partition and
          // delete it from the cache.
          if (isComplete) {
            partitions[key].complete();
            delete partitions[key];
          }
        },

        // When source completes, all partitions complete.
        complete: () => subscriber.complete(),

        // When source errors, all partitions error.
        error: (reason) => subscriber.error(reason),
      });
    });
  };
}


================================================
FILE: src/devtools/serializeGraphContext.js
================================================
import dagre from 'dagre';

/**
 * @param {Audion.GraphContext} graphContext
 * @return {Audion.GraphContext}
 */
export function serializeGraphContext(graphContext) {
  if (graphContext.graph) {
    return {
      ...graphContext,
      graph: dagre.graphlib.json.write(graphContext.graph),
    };
  }
  return graphContext;
}


================================================
FILE: src/devtools/setOptionsToGraphContext.ts
================================================
import * as dagre from 'dagre';

import {Audion} from './Types';

export function setOptionsToGraphContext([context, layoutOptions]: [
  Audion.GraphContext,
  dagre.GraphLabel,
]): Audion.GraphContext {
  if (context.context && context.graph) {
    context.graph.setGraph(layoutOptions);
  }
  return context;
}


================================================
FILE: src/devtools.html
================================================
<html>
  <head>
    <title>DevTools: Audion Extension</title>
  </head>
  <body>
    <script src="audion-devtools.js"></script>
  </body>
</html>


================================================
FILE: src/extraSettingPage/options.html
================================================
<!DOCTYPE html>
<html>
<head>
    <title>Audion Addition Setting Options</title>
</head>
<body>
    <label>
        Click here to show more debug info:
        <input type="checkbox" id="showDebugInfo">
    </label>
    <script src="options.js"></script>
</body>
</html>

================================================
FILE: src/extraSettingPage/options.js
================================================
// prettier-ignore
/**
 * Initializes the options page by setting up event listeners and
 * restoring saved options.
 */
document.addEventListener(
  'DOMContentLoaded',
  /**
   * Handles the DOMContentLoaded event to set up the options page.
   */
  function() {
    /**
     * Function to save the options in storage
     */
    function saveOptions() {
      const checkboxValue = document.getElementById('showDebugInfo').checked;
      localStorage.setItem('showExtraDebugLog', checkboxValue);
    }

    /**
     * Function to restore the options from storage
     */
    function restoreOptions() {
      document.getElementById('showDebugInfo').checked =
          localStorage.getItem('showExtraDebugLog') === 'true';
    }

    /**
     * Event listeners
     */
    document
      .getElementById('showDebugInfo')
      .addEventListener('change', saveOptions);
    restoreOptions();
  },
);


================================================
FILE: src/panel/GraphSelector.ts
================================================
import {Observable, combineLatest, BehaviorSubject} from 'rxjs';
import {map, shareReplay, distinctUntilChanged} from 'rxjs/operators';

import {Audion} from '../devtools/Types';

type GraphMap = {[key: string]: Audion.GraphContext};

type GraphMapRX = Observable<GraphMap>;

const EMPTY_GRAPH = {
  graph: {value: {width: 0, height: 0}, nodes: [], edges: []},
} as Audion.GraphContext;

/**
 * Control which graph is observed.
 */
export class GraphSelector {
  options$: Observable<string[]>;
  graphId$: Observable<string>;
  graph$: Observable<Audion.GraphContext>;

  private _graphIdSubject: BehaviorSubject<string>;

  get graphId(): string {
    return this._graphIdSubject.value;
  }

  /**
   * Create a GraphSelector.
   * @param options
   */
  constructor({allGraphs$: allGraphs$}: {allGraphs$: GraphMapRX}) {
    this.options$ = allGraphs$.pipe(
      map((allGraphs) =>
        Object.entries(allGraphs)
          .filter(([key, graphContext]) => graphContext)
          .map(([key]) => key),
      ),
    );

    const graphIdSubject = new BehaviorSubject('');
    this._graphIdSubject = graphIdSubject;
    this.graphId$ = graphIdSubject;

    const props$ = combineLatest({
      id: this.graphId$,
      allGraphs: allGraphs$,
    });

    this.graph$ = props$.pipe(
      map(({id, allGraphs}) => allGraphs[id] || EMPTY_GRAPH),
      distinctUntilChanged(),
      shareReplay(1),
    );
  }

  /**
   * Select the graph to observe.
   * @param graphId
   */
  select(graphId: string) {
    if (graphId !== this.graphId) {
      this._graphIdSubject.next(graphId);
    }
  }
}


================================================
FILE: src/panel/Observer.runtime.ts
================================================
import {Observable} from 'rxjs';
import {share} from 'rxjs/operators';

import {chrome} from '../chrome';

/**
 * Connect to chrome runtime through an observable.
 * @param requests$ observable of requests to send to devtools extension context
 * @returns observable of messages recevied from devtools extension context
 */
export function connect<S, T>(requests$: Observable<S>): Observable<T> {
  return new Observable<T>((subscriber) => {
    const port = chrome.runtime.connect();

    // Send values pushed by requests$ to devtools context.
    const subjectSubscription = requests$.subscribe({
      next(value) {
        port.postMessage(value);
      },
    });

    // Publish messages from devtools context through returned observable.
    const onMessage: (arg0: any, arg1: Chrome.RuntimePort) => void = (
      message,
    ) => {
      subscriber.next(message);
    };
    const onDisconnect = () =>
      subscriber.error(new Error('chrome.runtime disconnected'));

    port.onMessage.addListener(onMessage);
    port.onDisconnect.addListener(onDisconnect);

    return () => {
      subjectSubscription.unsubscribe();

      port.onMessage.removeListener(onMessage);
      port.onDisconnect.removeListener(onDisconnect);
      port.disconnect();
    };
  }).pipe(share());
}


================================================
FILE: src/panel/Types.ts
================================================
import * as PIXI from 'pixi.js';

/** @namespace AudionPanel */

/**
 * @typedef AudionPanel.Point
 * @property {number} x
 * @property {number} y
 */

/**
 * @typedef AudionPanel.Node
 * @property {AudionPanel.Point} position
 * @property {AudionPanel.Point} size
 */

/**
 * @typedef AudionPanel.Port
 * @property {AudionPanel.Node} node
 * @property {AudionPanel.Point} offset
 * @property {number} radius
 * @property {Array} edges
 */

export namespace AudionPanel {
  export interface Point {
    x: number;
    y: number;
  }

  export interface Node {
    position: Point;
    size: Point;

    updatePortDisplay(portType: PortType, portIndex: number): void;
  }

  export enum PortType {
    INPUT = 'input',
    OUTPUT = 'output',
    PARAM = 'param',
  }

  export interface Port {
    node: Node;
    offset: Point;
    radius: number;
    edges: any[];

    updateNodeDisplay(): void;
    drawConnect(graphics: PIXI.Graphics): void;
  }
}


================================================
FILE: src/panel/components/WholeGraphButton.css
================================================
.wholeGraphButton {
  position: absolute;
  top: 5px;
  left: 5px;
  cursor: pointer;
  opacity: 0.8;
  border-radius: 3px;
  width: 20px;
  height: 20px;
}


================================================
FILE: src/panel/components/WholeGraphButton.ts
================================================
import {fromEvent} from 'rxjs';

import style from './WholeGraphButton.css';
import wholeGraphButtonImage from './WholeGraphButton.svg';

/**
 * Render a button. Can be observed for when the button is clicked.
 */
export class WholeGraphButton {
  private readonly view = document.createElement('div');

  readonly click$ = fromEvent(this.view, 'click');

  /** Create a WholeGraphButton. */
  constructor() {
    this.view.className = style.wholeGraphButton;
    this.view.innerHTML = `<img src="${wholeGraphButtonImage}"
      alt="Resize to fit"
      title="Resize to fit" />`;
  }

  /** Render the button. */
  render() {
    return this.view;
  }
}


================================================
FILE: src/panel/components/collectGarbage.css
================================================
:global(.-theme-with-dark-background) .collectIcon {
  --override-icon-mask-background-color: rgb(145 145 145);
}
.collectIcon {
  display: inline-block;
  -webkit-mask: url('./collectGarbage.svg') no-repeat center;
  mask: url('./collectGarbage.svg') no-repeat center;
  width: 28px;
  height: 24px;
  background-color: var(--override-icon-mask-background-color);
  --override-icon-mask-background-color: rgb(110 110 110);
}
:global(.toolbar-button):hover .collectIcon {
  background-color: var(--color-text-primary);
}


================================================
FILE: src/panel/components/collectGarbage.ts
================================================
import {fromEvent, merge, NEVER, Observable} from 'rxjs';
import {map, startWith, switchMap} from 'rxjs/operators';

import {Audion} from '../../devtools/Types';

import {setElementHTML} from './domUtils';
import style from './collectGarbage.css';

/**
 * @returns html representation of the collect garbage icon
 */
function collectGarbageImageHTML(): string {
  return `<span class="${style.collectIcon}"></span>`;
}

/**
 * @param buttonElement$ observable of html elements to listen to events and
 * render a icon in
 * @returns observable of elements when they are modified or actions to be acted
 * on by the extension's devtools context
 */
export function renderCollectGarbage(
  buttonElement$: Observable<HTMLElement>,
): Observable<HTMLElement | Audion.DevtoolsCollectGarbageRequest> {
  // Map clicks to actions to request devtools to collect garbage.
  const collectGarbageAction$ = buttonElement$.pipe(
    switchMap((element) => fromEvent(element, 'click')),
    map(
      () => ({type: 'collectGarbage'} as Audion.DevtoolsCollectGarbageRequest),
    ),
  );

  // Observable that pushs the button icon once and never completes. If the
  // observable completes, setElementHTML will clean up and remove the html.
  const collectGarbageIcon$ = NEVER.pipe(startWith(collectGarbageImageHTML()));

  return merge(
    setElementHTML(buttonElement$, collectGarbageIcon$),
    collectGarbageAction$,
  );
}


================================================
FILE: src/panel/components/detailPanel.css
================================================
.detailPanel > * {
  padding: 0 1rem;
}
.detailPanel h1,
.detailPanel h2,
.detailPanel h3,
.detailPanel h4,
.detailPanel h5,
.detailPanel h6 {
  font-weight: normal;
}
.detailPanel table {
  font-size: 12px;
}
.detailPanel th {
  color: var(--color-text-secondary);
  font-weight: normal;
  text-align: left;
}
.detailPanel th,
.detailPanel td {
  padding: 0.2rem;
}


================================================
FILE: src/panel/components/detailPanel.ts
================================================
import {merge, NEVER, Observable} from 'rxjs';
import {distinctUntilChanged, map, startWith, switchMap} from 'rxjs/operators';

import {Audion} from '../../devtools/Types';
import {setElementHTML, toggleElementClassList} from './domUtils';
import style from './detailPanel.css';

const contextTypeNameMap = {
  realtime: 'AudioContext',
  offline: 'OfflineAudioContext',
};

/**
 * @param context web audio context's context information
 * @returns html representation of context information
 */
function graphContextHTML({
  contextType,
  contextId,
  contextState,
  sampleRate,
  callbackBufferSize,
  maxOutputChannelCount,
}: Audion.GraphContext['context']): string {
  return `<h2>${contextTypeNameMap[contextType] || contextType}</h2>
<p>${contextId.slice(-6)}</p>
<hr>
<table cellspacing="0" cellpadding="0">
<tr><th>State</th><td>${contextState}</td></tr>
<tr><th>Sample Rate</th><td>${sampleRate}</td></tr>
<tr><th>Callback Buffer Size</th><td>${callbackBufferSize}</td></tr>
<tr><th>Max Output Channels</th><td>${maxOutputChannelCount}</td></tr>
</table>
`;
}

/**
 * @param node web audio node's node information
 * @returns html representation of web audio node information
 */
function graphNodeBaseHTML({
  nodeType,
  nodeId,
  channelCount,
  channelCountMode,
  channelInterpretation,
  numberOfInputs,
  numberOfOutputs,
}: Audion.GraphNode['node']): string {
  return `<h2>${nodeType}</h2>
<p>${nodeId}</p>
<hr>
<table cellspacing="0" cellpadding="0">
<tr><th>Channel Count</th><td>${channelCount}</td></tr>
<tr><th>Channel Count Mode</th><td>${channelCountMode}</td></tr>
<tr><th>Channel Interpretation</th><td>${channelInterpretation}</td></tr>
<tr><th>Number of Inputs</th><td>${numberOfInputs}</td></tr>
<tr><th>Number of Outputs</th><td>${numberOfOutputs}</td></tr>
</table>
`;
}

/**
 * @param param web audio node's single parameter information
 * @returns html representation of parameter information
 */
function graphParamHTML({
  paramType,
  paramId,
  rate,
  defaultValue,
  minValue,
  maxValue,
}: Audion.GraphNode['params'][number]): string {
  return `<h4>${paramType}</h4>
<p>${paramId}</p>
<hr>
<table cellspacing="0" cellpadding="0">
<tr><th>Automation Rate</th><td>${rate}</td></tr>
<tr><th>Default Value</th><td>${defaultValue}</td></tr>
<tr><th>Minimum Value</th><td>${minValue}</td></tr>
<tr><th>Maximum Value</th><td>${maxValue}</td></tr>
</table>
`;
}

/**
 * @param node web audio node
 * @returns html representation of a node's node and parameters information
 */
function graphNodeHTML({node, params}: Audion.GraphNode): string {
  return `${graphNodeBaseHTML(node)}
${
  params.length
    ? `<h3>Parameters:</h3>
${params.map(graphParamHTML).join('')}`
    : ''
}
`;
}

/**
 * @param element$ observable of html element to render detail panel into
 * @param contextData$ observable of context data to render
 * @param nodeData$ observable of node data to render
 * @returns observable of html elements as they are modified
 */
export function renderDetailPanel(
  element$: Observable<HTMLElement>,
  contextData$: Observable<Audion.GraphContext>,
  nodeData$: Observable<Audion.GraphNode>,
): Observable<HTMLElement> {
  return merge(
    toggleElementClassList(
      element$,
      NEVER.pipe(startWith([style.detailPanel])),
    ),
    setElementHTML(
      element$,
      contextData$.pipe(
        distinctUntilChanged((previous, current) =>
          previous && previous.context && current && current.context
            ? previous.context.contextId === current.context.contextId
            : false,
        ),
        switchMap((graphContext) =>
          nodeData$.pipe(
            distinctUntilChanged((previous, current) =>
              previous && previous.node && current && current.node
                ? previous.node.nodeId === current.node.nodeId
                : false,
            ),
            map((graphNode) =>
              graphNode && graphNode.node
                ? graphNodeHTML(graphNode)
                : graphContext && graphContext.context
                ? graphContextHTML(graphContext.context)
                : '(no recordings)',
            ),
          ),
        ),
      ),
    ),
  );
}


================================================
FILE: src/panel/components/domUtils.ts
================================================
import {defer, Observable, of} from 'rxjs';
import {finalize, map, scan, switchMap} from 'rxjs/operators';

/**
 * Create a factory that modifies the most latest element from an observable of elements to value from an observable of other values.
 * @param property html element property
 * @returns factory that modifies a latest element with the latest data
 */
export function setElementProperty<
  E extends HTMLElement,
  K extends keyof E,
  T extends E[K],
>(property: K) {
  return function (element$: Observable<E>, data$: Observable<T>) {
    return element$.pipe(
      switchMap((view) =>
        data$.pipe(
          map((value) => {
            if (view) {
              view[property] = value;
            }
            return view;
          }),
          finalize(() => {
            if (view) {
              view[property] = null;
            }
          }),
        ),
      ),
    );
  };
}

/**
 * Set that values can be added to and removed from.
 */
interface PropertySet<T> {
  add(value: T): any;
  remove(value: T): any;
}

/**
 * Description of a change to a PropertySet.
 */
interface PropertySetChange {
  /** Items to remove from the PropertySet. */
  deleteItems: string[];
  /** Items to add to the PropertySet. */
  addItems: string[];
  /** All items to remove if the element changes or finalizes. */
  allItems: string[];
}

/**
 * Create a factory that adds and removes the items contained in a observable of
 * array values to the latest element.
 * @param property html element property
 * @returns factory that adds and removes items on an elements property
 */
export function toggleElementPropertySet<
  E extends HTMLElement,
  K extends {
    [key in keyof E]: E[key] extends PropertySet<string> ? key : never;
  }[any],
  T extends string[],
>(property: K) {
  return function (element$: Observable<E>, data$: Observable<T>) {
    const valueDiff$ = data$.pipe(
      scan(
        ([previous], current) => {
          const allItems = current;
          const deleteItems = previous.filter(
            (value) => !current.includes(value),
          );
          const addItems = allItems.filter(
            (value) => !previous.includes(value),
          );

          return [current, {deleteItems, addItems, allItems}] as [
            T,
            PropertySetChange,
          ];
        },
        [[], {deleteItems: [], addItems: []}] as [T, PropertySetChange],
      ),
      map(([, change]) => change),
    );
    return element$.pipe(
      switchMap((view) =>
        valueDiff$.pipe(
          map((diff) => {
            if (view) {
              for (const value of diff.deleteItems) {
                (view[property] as PropertySet<string>).remove(value);
              }
              for (const value of diff.addItems) {
                (view[property] as PropertySet<string>).add(value);
              }
            }
            return view;
          }),
          finalize(() => {}),
        ),
      ),
    );
  };
}

/**
 * Change to a html element property's map structure.
 */
interface PropertyMapChange {
  /** Keys to remove from the property's map. */
  deleteKeys: string[];
  /** Keys to change to a given value. */
  setKeys: [string, any][];
  /** All keys. Used to remove all keys when the element changes or finalizes. */
  allKeys: string[];
}

export function assignElementProperty<
  E extends HTMLElement,
  K extends keyof E,
  T extends {[key in keyof E[K]]?: E[K][key]},
>(property: K) {
  return function (element$: Observable<E>, data$: Observable<T>) {
    const valueDiff$ = data$.pipe(
      scan(
        ([previous], current) => {
          const allKeys = Object.keys(current);
          const deleteKeys = Object.keys(previous).filter(
            (key) => !(key in current),
          );
          const setKeys = allKeys
            .filter((key) => current[key] !== previous[key])
            .map((key) => [key, current[key]]);

          return [current, {deleteKeys, setKeys, allKeys}] as [
            T,
            PropertyMapChange,
          ];
        },
        [{}, {deleteKeys: [], setKeys: []}] as [T, PropertyMapChange],
      ),
      map(([, change]) => change),
    );
    return element$.pipe(
      switchMap((view) => {
        let finalizeKeys = [];
        return valueDiff$.pipe(
          map((diff) => {
            if (view) {
              for (const key of diff.deleteKeys) {
                view[property][key] = undefined;
              }
              for (const [key, value] of diff.setKeys) {
                view[property][key] = value;
              }
              finalizeKeys = diff.allKeys;
            }
            return view;
          }),
          finalize(() => {
            if (view) {
              for (const key of finalizeKeys) {
                view[property][key] = undefined;
              }
            }
          }),
        );
      }),
    );
  };
}

/**
 * Set latest element's innerText property to latest data string value.
 */
export const setElementText = setElementProperty('innerText');

/**
 * Set latest element's innerHTML property to latest data string value.
 */
export const setElementHTML = setElementProperty('innerHTML');

/**
 * Set latest element's className property to latest data string value.
 */
export const setElementClassName = setElementProperty('className');

/**
 * Add and remove latest data string array to latest element's classList set
 * property.
 */
export const toggleElementClassList = toggleElementPropertySet('classList');

/**
 * Set and delete changes keys of latest data object to latest element's style
 * object map property.
 */
export const assignElementStyle = assignElementProperty('style');

/**
 * @param query css query selector to find an element for
 * @param dom document to query
 * @returns observable of a html element matching the query
 */
export function querySelector(
  query: string,
  dom: {querySelector(...args: any): any} = document,
): Observable<HTMLElement> {
  return defer(() => of(dom.querySelector(query)));
}


================================================
FILE: src/panel/components/realtimeSummary.ts
================================================
import {map, Observable} from 'rxjs';

import {Audion} from '../../devtools/Types';
import {setElementHTML} from './domUtils';

/**
 * Format web audio context performance data in html.
 * @param realtimeData realtime performance data for a web audio context
 * @returns rendered html summary of performance data
 */
export function realtimeSummaryHTML(realtimeData: Audion.ContextRealtimeData) {
  if (!realtimeData) return '';
  const currentTime = realtimeData.currentTime.toFixed(3);
  const callbackIntervalMean = (
    realtimeData.callbackIntervalMean * 1000
  ).toFixed(3);
  const callbackIntervalVariance = (
    Math.sqrt(realtimeData.callbackIntervalVariance) * 1000
  ).toFixed(3);
  const renderCapacity = (realtimeData.renderCapacity * 100).toFixed(3);
  return realtimeData
    ? `<span>Current Time: ${currentTime} s</span>&nbsp;
<span>&#10072;</span>&nbsp;
<span>Callback Interval: &mu; = ${callbackIntervalMean} ms &sigma; = ${callbackIntervalVariance} ms</span>&nbsp;
<span>&#10072;</span>&nbsp;
<span>Render Capacity: ${renderCapacity} %</span>`
    : '';
}

/**
 * Render a summary of web audio context performance.
 * @param element$ current html element to render summary into
 * @param data$ current performance data
 * @returns an element pushed to renderRealtimeSummary after its content is modified
 */
export function renderRealtimeSummary(
  element$: Observable<HTMLElement>,
  data$: Observable<Audion.ContextRealtimeData>,
) {
  const realtimeHTML$ = data$.pipe(map(realtimeSummaryHTML));
  return setElementHTML(element$, realtimeHTML$);
}


================================================
FILE: src/panel/components/selectGraph.css
================================================
.dropdownOption {
  display: flex;
  height: 2rem;
  align-items: center;
  cursor: pointer;
  padding: 0 0.2rem;
}

.dropdownOption:hover,
.dropdownButtonActive {
  background: var(--color-background-elevation-2);
}


================================================
FILE: src/panel/components/selectGraph.ts
================================================
import {
  BehaviorSubject,
  combineLatest,
  fromEvent,
  merge,
  Observable,
  of,
} from 'rxjs';
import {
  distinctUntilChanged,
  filter,
  map,
  switchMap,
  tap,
} from 'rxjs/operators';

import {Audion} from '../../devtools/Types';
import {
  assignElementStyle,
  setElementClassName,
  setElementHTML,
  setElementText,
  toggleElementClassList,
} from './domUtils';
import style from './selectGraph.css';

/**
 * Title of the dropdown toggle button when no graphs are selected or available
 * to select.
 */
const NO_GRAPHS_AVAILABLE_TITLE = '(no recordings)';

/**
 * Render title for an audio graph with only the graphId.
 * @param graphId unique graph identifier
 * @returns rendered graph title
 */
function graphIdTitle(graphId: string) {
  return `unknown (${graphId.slice(-6)})`;
}

/**
 * Render title for an audio graph.
 * @param graph
 * @returns rendered graph title
 */
function graphTitle(graph: Audion.GraphContext) {
  return `${graph.context.contextType} (${graph.id.slice(-6)})`;
}

/**
 * Create a map of graph IDs to rendered graph titles.
 * @param allGraphs map of graph IDs to graph contexts
 * @returns map of graph IDs to rendered graph titles
 */
function graphTitles(allGraphs: Audion.GraphContextsById): {
  [key: string]: string;
} {
  return Object.entries(allGraphs)
    .map(([id, graph]) => [id, graphTitle(graph)])
    .reduce((accum, [id, title]) => {
      accum[id] = title;
      return accum;
    }, {} as {[key: string]: string});
}

/**
 * Render current graph title or some copy to indicate no graph is selected or
 * no graph is available.
 * @param param currently selected graph ID and ID to title map
 * @returns rendered button title text
 */
function buttonTitle([graphId, graphTitles]) {
  return graphId
    ? graphTitles[graphId] || graphIdTitle(graphId)
    : NO_GRAPHS_AVAILABLE_TITLE;
}

/**
 * Render html list of graph options to select from.
 * @param graphTitles graph ID to title map
 * @returns html list of graph titles to select from
 */
const dropdownListHTML = function (graphTitles: {
  [graphId: string]: string;
}): string {
  return Object.entries(graphTitles)
    .map(
      ([graphId, title]) =>
        `<div class="${style.dropdownOption}" data-option="${graphId}"><div class="${style.dropdownOptionTitle}">${title}</div></div>`,
    )
    .join('');
};

/**
 * Test if two maps of graph titles are equivalent.
 *
 * Used to reduce further processing of graph title information like updating
 * the dom with new html for the new set of titles.
 *
 * @param previousTitles map of graph titles
 * @param currentTitles map of graph titles
 * @returns true if maps match
 */
function equalTitles(
  previousTitles: {[graphId: string]: string},
  currentTitles: {[graphId: string]: string},
) {
  const previousEntries = Object.entries(previousTitles);
  const currentEntries = Object.entries(currentTitles);
  return (
    previousEntries.length === currentEntries.length &&
    previousEntries.every(([previousKey, previousValue], index) => {
      const [currentKey, currentValue] = currentEntries[index];
      return previousKey === currentKey && previousValue === currentValue;
    })
  );
}

/**
 * Render a widget displaying the current selected graph title. When clicked
 * show a list of currently available graphs to select from.
 *
 * @param titleElement$ current html element to render dropdown button
 * title into
 * @param dropdownListElement$ current html element to render dropdown
 * list into
 * @param buttonElement$ current html element that when clicked opens the dropdown
 * @param graphId$ currently selected graph id
 * @param allGraphs$ current map of graph ids to graph contexts
 * @returns an element pushed to renderSelectGraph after its content is modified
 */
export function renderSelectGraph(
  titleElement$: Observable<HTMLElement>,
  dropdownListElement$: Observable<HTMLElement>,
  buttonElement$: Observable<HTMLElement>,
  graphId$: Observable<string>,
  allGraphs$: Observable<Audion.GraphContextsById>,
) {
  const distinctGraphId$ = graphId$.pipe(distinctUntilChanged());
  const graphTitles$ = allGraphs$.pipe(
    map(graphTitles),
    distinctUntilChanged(equalTitles),
  );
  const graphIdAndTitles$ = combineLatest([distinctGraphId$, graphTitles$]);

  const dropdownVisible$ = new BehaviorSubject(false);

  const body$ = of(document.body);
  const bodyClick$ = body$.pipe(
    switchMap((element) => fromEvent(element, 'click')),
  );

  const openDropdownAction$ = buttonElement$.pipe(
    switchMap((element) => fromEvent(element, 'click')),
    tap(() => dropdownVisible$.next(!dropdownVisible$.value)),
    filter(() => false),
    map(() => {}),
  );
  const closeDropdownAction$ = combineLatest([
    buttonElement$,
    dropdownListElement$,
  ]).pipe(
    switchMap(([buttonElement, dropdownElement]) =>
      bodyClick$.pipe(
        filter(
          (ev) =>
            ev.target instanceof Element &&
            !(
              buttonElement.contains(ev.target) ||
              dropdownElement.contains(ev.target)
            ),
        ),
      ),
    ),
    tap(() => dropdownVisible$.next(false)),
    filter(() => false),
    map(() => {}),
  );

  const eventAction$ = merge(openDropdownAction$, closeDropdownAction$);

  const titleText$ = graphIdAndTitles$.pipe(map(buttonTitle));
  const buttonClassName$ = dropdownVisible$.pipe(
    map((visible) => (visible ? [style.dropdownButtonActive] : [])),
  );
  const dropdownListHTML$ = graphTitles$.pipe(map(dropdownListHTML));
  const dropdownListIdSelected$ = dropdownListElement$.pipe(
    switchMap((element) => fromEvent(element, 'click')),
    map((clickEvent) => {
      let {target} = clickEvent;
      if (target instanceof HTMLElement) {
        const optionElement = target.closest('[data-option]');
        if (optionElement instanceof HTMLElement) {
          const graphId = optionElement.dataset['option'];
          if (graphId) {
            return {type: 'selectGraph', graphId};
          }
        }
      }
    }),
    filter(Boolean),
    tap(() => dropdownVisible$.next(false)),
  );
  const dropdownClassName$ = dropdownVisible$.pipe(
    map(
      (visible) => `web-audio-select-graph-dropdown ${visible ? '' : 'hidden'}`,
    ),
  );
  const dropdownPositionStyle$ = buttonElement$.pipe(
    switchMap((buttonElement) =>
      dropdownVisible$.pipe(
        map((visible) => {
          const rect = buttonElement.getBoundingClientRect();
          return visible
            ? {
                top: `${rect.bottom}px`,
                left: `${rect.left}px`,
              }
            : {};
        }),
      ),
    ),
  );

  return merge(
    setElementText(titleElement$, titleText$),
    toggleElementClassList(buttonElement$, buttonClassName$),
    setElementHTML(dropdownListElement$, dropdownListHTML$),
    setElementClassName(dropdownListElement$, dropdownClassName$),
    assignElementStyle(dropdownListElement$, dropdownPositionStyle$),
    dropdownListIdSelected$,
    eventAction$,
  );
}


================================================
FILE: src/panel/graph/AudioEdgeArrowGraphics.ts
================================================
import * as PIXI from 'pixi.js';
import {GraphColor} from './graphStyle';

const ARROW_LENGTH = 16;
const ARROW_HEIGHT = 8;

const ARROW_ANGLE_ROUNDING = 32;

export class EdgeArrowGraphics {
  geometryCache = new Array(ARROW_ANGLE_ROUNDING * 2 + 1).fill(null);

  drawFromPoint(
    pointOnLine: PIXI.Point,
    end: PIXI.Point,
    graphics: PIXI.Graphics,
  ) {
    const arrowMagnitude = Math.hypot(
      pointOnLine.y - end.y,
      pointOnLine.x - end.x,
    );
    const arrowUnitX = (pointOnLine.x - end.x) / arrowMagnitude;
    const arrowUnitY = (pointOnLine.y - end.y) / arrowMagnitude;

    this.drawFromUnit(arrowUnitX, arrowUnitY, end, graphics);
  }

  drawFromUnit(
    arrowUnitX: number,
    arrowUnitY: number,
    end: PIXI.Point,
    graphics: PIXI.Graphics,
  ) {
    graphics.beginFill(GraphColor.INPUT_OUTPUT);
    graphics.drawPolygon([
      new PIXI.Point(
        end.x + arrowUnitX * ARROW_LENGTH + arrowUnitY * ARROW_HEIGHT,
        end.y + arrowUnitY * ARROW_LENGTH - arrowUnitX * ARROW_HEIGHT,
      ),
      new PIXI.Point(
        end.x + arrowUnitX * ARROW_LENGTH - arrowUnitY * ARROW_HEIGHT,
        end.y + arrowUnitY * ARROW_LENGTH + arrowUnitX * ARROW_HEIGHT,
      ),
      new PIXI.Point(end.x, end.y),
    ]);
    graphics.endFill();
  }

  getGeometry(pointOnLine: PIXI.Point, end: PIXI.Point) {
    const magnitude = Math.hypot(pointOnLine.x - end.x, pointOnLine.y - end.y);
    const unitX = (pointOnLine.x - end.x) / magnitude;
    const unitY = (pointOnLine.y - end.y) / magnitude;
    const angle = Math.atan2(unitY, unitX);
    const angleSliceIndex = Math.round(
      (angle / Math.PI) * ARROW_ANGLE_ROUNDING,
    );
    const cacheIndex = angleSliceIndex + ARROW_ANGLE_ROUNDING;
    if (this.geometryCache[cacheIndex] === null) {
      const graphics = new PIXI.Graphics();
      const angleRounded = (angleSliceIndex / ARROW_ANGLE_ROUNDING) * Math.PI;
      this.drawFromUnit(
        Math.cos(angleRounded),
        Math.sin(angleRounded),
        new PIXI.Point(Math.cos(angleRounded) * 4, Math.sin(angleRounded) * 4),
        graphics,
      );
      this.geometryCache[cacheIndex] = graphics.geometry;
    }
    return this.geometryCache[cacheIndex];
  }

  createGraphics(pointOnLine: PIXI.Point, end: PIXI.Point) {
    const graphics = new PIXI.Graphics(this.getGeometry(pointOnLine, end));
    graphics.position.set(end.x, end.y);
    return graphics;
  }
}


================================================
FILE: src/panel/graph/AudioEdgeCurvedLineGraphics.ts
================================================
import * as PIXI from 'pixi.js';

import {AudionPanel} from '../Types';

import {GraphColor} from './graphStyle';

const STEP_RATIO = 1 / 10;

const LINE_COEFF = createLineCoefficients();

interface LineCoefficients {
  ax: number;
  ay: number;
  bx: number;
  by: number;
  cx: number;
  cy: number;
  dx: number;
  dy: number;
}

export class EdgeCurvedLineGraphics {
  geometryCache: PIXI.GraphicsGeometry[][] = [];

  getGeometry(a: PIXI.Point, d: PIXI.Point) {
    const i = Math.floor(Math.abs(d.x - a.x));
    const j = Math.floor(Math.abs(d.y - a.y));

    if (i > 100 || j > 100) {
      const graphics = new PIXI.Graphics();
      this.drawCurvedLine(
        new PIXI.Point(),
        new PIXI.Point(i / 2, j / 3),
        new PIXI.Point(i / 2, (j * 2) / 3),
        new PIXI.Point(i, j),
        graphics,
        new PIXI.Point(),
      );
      return graphics.geometry;
    }

    if (!this.geometryCache[i]) {
      this.geometryCache[i] = [];
    }
    if (!this.geometryCache[i][j]) {
      const b0 = new PIXI.Point(i / 2, j / 3);
      const c0 = new PIXI.Point(i / 2, (j * 2) / 3);
      const d0 = new PIXI.Point(i, j);
      const graphics = new PIXI.Graphics();
      this.drawCurvedLine(
        new PIXI.Point(),
        b0,
        c0,
        d0,
        graphics,
        new PIXI.Point(),
      );
      this.geometryCache[i][j] = graphics.geometry;
    }

    return this.geometryCache[i][j];
  }

  createGraphics(a: PIXI.Point, d: PIXI.Point) {
    const graphics = new PIXI.Graphics(this.getGeometry(a, d));
    graphics.position.set(a.x, a.y);
    const x = d.x - a.x;
    const y = d.y - a.y;
    graphics.scale.set(
      x === 0 ? 1 : x / Math.abs(x),
      y === 0 ? 1 : y / Math.abs(y),
    );
    return graphics;
  }

  /**
   * Draw a curved line with 3 points to control its shape.
   * @param a
   * @param b
   * @param c
   * @param graphics
   * @param pointOnLine
   */
  drawCurvedLine(
    a: AudionPanel.Point,
    b: AudionPanel.Point,
    c: AudionPanel.Point,
    d: AudionPanel.Point,
    graphics: PIXI.Graphics,
    pointOnLine: AudionPanel.Point,
  ) {
    const lineCoeffs = buildLineCoefficients(a, b, c, d, LINE_COEFF);

    const lineMagnitudeEstimate = Math.hypot(a.y - d.y, a.x - d.x);
    const steps = Math.max(2, Math.ceil(lineMagnitudeEstimate * STEP_RATIO));

    graphics.lineStyle(2, GraphColor.INPUT_OUTPUT);

    graphics.moveTo(a.x, a.y);
    for (let i = 1; i < steps; i++) {
      interpolateCoefficients(lineCoeffs, i / steps, pointOnLine);
      graphics.lineTo(pointOnLine.x, pointOnLine.y);
    }
    graphics.lineStyle(0);
    graphics.closePath();
  }

  /**
   * Adjust a point along a line by amount radius.
   * @param end
   * @param destination
   * @param radius
   */
  adjustPoint(
    end: AudionPanel.Point,
    destination: AudionPanel.Point,
    radius: number,
  ) {
    const magnitude = Math.hypot(end.y - destination.y, end.x - destination.x);

    destination.x += ((end.x - destination.x) / magnitude) * radius;
    destination.y += ((end.y - destination.y) / magnitude) * radius;
  }
}

/**
 * Create a LineCoefficients object.
 * @return
 */
function createLineCoefficients(): LineCoefficients {
  return {ax: 0, ay: 0, bx: 0, by: 0, cx: 0, cy: 0, dx: 0, dy: 0};
}

/**
 * Interpolate a line from 4 points: a, b, c, d.
 * @param a
 * @param b
 * @param c
 * @param d
 * @param coeff
 * @return
 */
function buildLineCoefficients(
  a: AudionPanel.Point,
  b: AudionPanel.Point,
  c: AudionPanel.Point,
  d: AudionPanel.Point,
  coeff = createLineCoefficients(),
): LineCoefficients {
  const {x: ax, y: ay} = a;
  const {x: bx, y: by} = b;
  const {x: cx, y: cy} = c;
  const {x: dx, y: dy} = d;

  coeff.ax = dx - 3 * cx + 3 * bx - ax;
  coeff.ay = dy - 3 * cy + 3 * by - ay;
  coeff.bx = 3 * cx - 6 * bx + 3 * ax;
  coeff.by = 3 * cy - 6 * by + 3 * ay;
  coeff.cx = 3 * bx - 3 * ax;
  coeff.cy = 3 * by - 3 * ay;
  coeff.dx = ax;
  coeff.dy = ay;

  return coeff;
}

/**
 * @param coeff
 * @param t number between 0 and 1 inclusive
 * @param destination
 * @return
 */
function interpolateCoefficients(
  coeff: LineCoefficients,
  t: number,
  destination: AudionPanel.Point = new PIXI.Point(),
): AudionPanel.Point {
  const t2 = t * t;
  const t3 = t2 * t;
  destination.x = coeff.ax * t3 + coeff.bx * t2 + coeff.cx * t + coeff.dx;
  destination.y = coeff.ay * t3 + coeff.by * t2 + coeff.cy * t + coeff.dy;
  return destination;
}


================================================
FILE: src/panel/graph/AudioEdgeRender.ts
================================================
import * as PIXI from 'pixi.js';

import type {AudionPanel} from '../Types';

import {EdgeArrowGraphics} from './AudioEdgeArrowGraphics';
import {EdgeCurvedLineGraphics} from './AudioEdgeCurvedLineGraphics';

import {GraphColor} from './graphStyle';

const ARROW_LENGTH = 12;
const ARROW_HEIGHT = 4;

const STEP_RATIO = 1 / 10;

const LINE_COEFF = createLineCoefficients();

export interface AudioEdgeKey {
  v: string;
  w: string;
  name: string;
}

/**
 * Render a line between AudionNodes and their inputs, outputs, and parameters.
 */
export class AudioEdgeRender {
  key: AudioEdgeKey;
  source: AudionPanel.Port;
  destination: AudionPanel.Port;
  parent: PIXI.Container;
  graphics: PIXI.Graphics;
  container: PIXI.Container;

  /**
   * @param options
   */
  constructor({
    key,
    source,
    destination,
  }: {
    key: AudioEdgeKey;
    source: AudionPanel.Port;
    destination: AudionPanel.Port;
  }) {
    this.key = key;
    this.source = source;
    this.destination = destination;
    this.parent = null;
    this.graphics = new PIXI.Graphics();
    this.container = new PIXI.Container();

    this.source.edges.push(this);
    this.destination.edges.push(this);
  }
  /**
   * @param parent
   */
  setPIXIParent(parent: PIXI.Container) {
    this.parent = parent;
    parent.addChild(this.container);
  }
  /**
   * Remove the PIXI DisplayObject from the rendered hierarchy.
   */
  remove() {
    this.container.parent.removeChild(this.container);

    this.source.edges.splice(this.source.edges.indexOf(this), 1);
    this.destination.edges.splice(this.destination.edges.indexOf(this), 1);
  }
  /**
   * @param line
   */
  draw(
    line: AudionPanel.Point[],
    {
      edgeArrowGraphics: arrowGraphics,
      edgeCurvedLineGraphics: curvedLineGraphics,
    }: {
      edgeArrowGraphics: EdgeArrowGraphics;
      edgeCurvedLineGraphics: EdgeCurvedLineGraphics;
    },
  ) {
    const {
      offset: start,
      node: {position: sourcePosition},
    } = this.source;
    const {
      offset: end,
      node: {position: destinationPosition},
    } = this.destination;
    const a = new PIXI.Point(
      sourcePosition.x + start.x,
      sourcePosition.y + start.y,
    );
    const d = new PIXI.Point(
      destinationPosition.x + end.x,
      destinationPosition.y + end.y,
    );
    this.container.removeChildren();
    this.container.addChild(arrowGraphics.createGraphics(a, d));
    this.container.addChild(curvedLineGraphics.createGraphics(a, d));
  }

  /**
   * Draw an arrow.
   * @param pointOnLine
   * @param end
   * @param graphics
   */
  drawArrow(
    pointOnLine: AudionPanel.Point,
    end: AudionPanel.Point,
    graphics: PIXI.Graphics,
  ) {
    const arrowMagnitude = Math.hypot(
      pointOnLine.y - end.y,
      pointOnLine.x - end.x,
    );
    const arrowUnitX = (pointOnLine.x - end.x) / arrowMagnitude;
    const arrowUnitY = (pointOnLine.y - end.y) / arrowMagnitude;

    graphics.beginFill(GraphColor.INPUT_OUTPUT);
    graphics.lineTo(
      end.x + arrowUnitX * ARROW_LENGTH + arrowUnitY * ARROW_HEIGHT,
      end.y + arrowUnitY * ARROW_LENGTH - arrowUnitX * ARROW_HEIGHT,
    );
    graphics.lineTo(
      end.x + arrowUnitX * ARROW_LENGTH - arrowUnitY * ARROW_HEIGHT,
      end.y + arrowUnitY * ARROW_LENGTH + arrowUnitX * ARROW_HEIGHT,
    );
    graphics.lineTo(end.x, end.y);
    graphics.endFill();
  }

  /**
   * Draw a curved line with 3 points to control its shape.
   * @param a
   * @param b
   * @param c
   * @param graphics
   * @param pointOnLine
   */
  drawCurvedLine(
    a: AudionPanel.Point,
    b: AudionPanel.Point,
    c: AudionPanel.Point,
    graphics: PIXI.Graphics,
    pointOnLine: AudionPanel.Point,
  ) {
    const lineCoeffs = lineCoefficients(a, b, c, LINE_COEFF);

    const lineMagnitudeEstimate = Math.hypot(a.y - c.y, a.x - c.x);
    const steps = Math.max(2, Math.ceil(lineMagnitudeEstimate * STEP_RATIO));

    graphics.lineStyle(2, GraphColor.INPUT_OUTPUT);

    graphics.moveTo(a.x, a.y);
    for (let i = 1; i < steps; i++) {
      interpolateCoefficients(lineCoeffs, i / steps, pointOnLine);
      graphics.lineTo(pointOnLine.x, pointOnLine.y);
    }
    graphics.lineTo(c.x, c.y);
  }

  /**
   * Adjust a point along a line by amount radius.
   * @param end
   * @param destination
   * @param radius
   */
  adjustPoint(
    end: AudionPanel.Point,
    destination: AudionPanel.Point,
    radius: number,
  ) {
    const magnitude = Math.hypot(end.y - destination.y, end.x - destination.x);

    destination.x += ((end.x - destination.x) / magnitude) * radius;
    destination.y += ((end.y - destination.y) / magnitude) * radius;
  }
}

/**
 * Create a LineCoefficients object.
 * @return
 */
function createLineCoefficients(): LineCoefficients {
  return {ax: 0, ay: 0, bx: 0, by: 0, cx: 0, cy: 0};
}

/**
 * Interpolate a line from 3 points: a, b, c.
 * @param a
 * @param b
 * @param c
 * @param coeff
 * @return
 */
function lineCoefficients(
  a: AudionPanel.Point,
  b: AudionPanel.Point,
  c: AudionPanel.Point,
  coeff = createLineCoefficients(),
): LineCoefficients {
  const {x: ax, y: ay} = a;
  const {x: bx, y: by} = b;
  const {x: cx, y: cy} = c;

  const cbx = cx - bx;
  const bax = bx - ax;
  const cby = cy - by;
  const bay = by - ay;

  coeff.ax = cbx - bax;
  coeff.ay = cby - bay;
  coeff.bx = 2 * bax;
  coeff.by = 2 * bay;
  coeff.cx = ax;
  coeff.cy = ay;

  return coeff;
}

/**
 * @param coeff
 * @param t number between 0 and 1 inclusive
 * @param destination
 * @return
 */
function interpolateCoefficients(
  coeff: LineCoefficients,
  t: number,
  destination: AudionPanel.Point = new PIXI.Point(),
): AudionPanel.Point {
  destination.x = coeff.ax * t * t + coeff.bx * t + coeff.cx;
  destination.y = coeff.ay * t * t + coeff.by * t + coeff.cy;
  return destination;
}

/**
 * @typedef LineCoefficients
 * @property {number} ax
 * @property {number} ay
 * @property {number} bx
 * @property {number} by
 * @property {number} cx
 * @property {number} cy
 */

interface LineCoefficients {
  ax: number;
  ay: number;
  bx: number;
  by: number;
  cx: number;
  cy: number;
}


================================================
FILE: src/panel/graph/AudioGraphRender.ts
================================================
/// <reference path="../../chrome/Types.js" />

import * as PIXI from 'pixi.js';
import {BehaviorSubject} from 'rxjs';

import {Audion} from '../../devtools/Types';

import {AudioEdgeKey, AudioEdgeRender} from './AudioEdgeRender';
import {AudioNodeRender} from './AudioNodeRender';
import {Camera} from './Camera';
import {GraphicsCache} from './GraphicsCache';

type AnimationFrameId = ReturnType<typeof requestAnimationFrame>;

/**
 * Render a graph of nodes and edges.
 */
export class AudioGraphRender {
  nodeMap: Map<string, AudioNodeRender>;
  edgeIdMap: Map<string, Map<string, Map<string, AudioEdgeKey>>>;
  edgeMap: Map<AudioEdgeKey, AudioEdgeRender>;

  camera: Camera;

  elementContainer: HTMLElement;
  pixiApplication: PIXI.Application<HTMLCanvasElement> | null;
  pixiView: HTMLCanvasElement | null;
  pixiNodeContainer: PIXI.Container | null;
  pixiEdgeContainer: PIXI.Container | null;

  renderFrameId: AnimationFrameId | null;

  graphicsCache: GraphicsCache;

  selectedNode$: BehaviorSubject<Audion.GraphNode>;

  /**
   * Create an AudioGraphRender.
   * @param options
   */
  constructor({elementContainer}: {elementContainer: HTMLElement}) {
    this.nodeMap = new Map();
    this.edgeIdMap = new Map();
    this.edgeMap = new Map();

    this.camera = new Camera();

    this.elementContainer = elementContainer;
    this.pixiView = null;
    this.pixiApplication = null;
    this.pixiNodeContainer = null;
    this.pixiEdgeContainer = null;

    this.renderFrameId = null;

    this.graphicsCache = null;

    this._render = this._render.bind(this);

    this.selectedNode$ = new BehaviorSubject<Audion.GraphNode>(null);
  }

  /** Initialize. */
  init() {
    const app = (this.pixiApplication = new PIXI.Application<HTMLCanvasElement>(
      {
        backgroundColor: 0xffffff,
        resizeTo: this.elementContainer,
        antialias: true,
        autoDensity: true,
        resolution: window.devicePixelRatio,
      },
    ));
    this.pixiView = app.view;

    this.graphicsCache = new GraphicsCache();

    const nodeContainer = (this.pixiNodeContainer = new PIXI.Container());
    app.stage.addChild(nodeContainer);

    const edgeContainer = (this.pixiEdgeContainer = new PIXI.Container());
    app.stage.addChild(edgeContainer);

    this.initEvents();

    this.camera.viewportObserver.observe((viewport) => {
      const {x, y, width, height} = this.camera.viewport;
      app.stage.setTransform(-x / width, -y / height, 1 / width, 1 / height);
      this.requestRender();
    });
  }

  /** Render the graph. */
  requestRender() {
    if (this.renderFrameId === null) {
      this.renderFrameId = requestAnimationFrame(this._render);
    }
  }

  _render() {
    this.renderFrameId = null;

    const {pixiApplication: app} = this;

    this.camera.setScreenSize(app.screen.width, app.screen.height);
    app.render();
  }

  /** Stop rendering. */
  stop() {
    cancelAnimationFrame(this.renderFrameId);
  }

  /**
   * @param message
   */
  updateGraphSizes(message: Audion.GraphContext): Audion.GraphContext {
    if (message.graph) {
      message.graph.nodes.forEach(({v: nodeId, value: node}) => {
        if (node) {
          const nodeRender = this.createNodeRender(
            nodeId,
            message.nodes[nodeId],
          );
          node.width = nodeRender.size.x;
          node.height = nodeRender.size.y;
        }
      });
    } else {
      for (const nodeId of this.nodeMap.keys()) {
        this.destroyNodeRender(nodeId);
      }
      for (const edgeId of this.edgeMap.keys()) {
        this.destroyEdgeRender(edgeId);
      }
    }
    return message;
  }

  /**
   * @param message
   */
  update(message: Audion.GraphContext) {
    this.camera.setGraphSize(
      message.graph.value.width,
      message.graph.value.height,
    );

    const previousNodeRenders = new Set(this.nodeMap.values());
    for (let i = 0; i < message.graph.nodes.length; i++) {
      const nodeKeyValue = message.graph.nodes[i];
      const nodeId = nodeKeyValue.v;
      const node = nodeKeyValue.value;

      if (node) {
        const nodeRender = this.createNodeRender(nodeId, message.nodes[nodeId]);
        nodeRender.container.visible = true;
        nodeRender.position.set(
          node.x - nodeRender.size.x / 2,
          node.y - nodeRender.size.y / 2,
        );
        previousNodeRenders.delete(nodeRender);
      } else {
        this.destroyNodeRender(nodeId);
      }
    }
    for (const nodeRender of previousNodeRenders) {
      this.destroyNodeRender(nodeRender.id);
    }

    const previousEdgeRenders = new Set(this.edgeMap.values());
    for (let i = 0; i < message.graph.edges.length; i++) {
      const edgeKeyValue = message.graph.edges[i];
      const edge = edgeKeyValue.value;

      if (edge) {
        const edgeRender = this.createEdgeRender(edgeKeyValue, message);
        if (edgeRender) {
          edgeRender.draw(edge.points, this.graphicsCache);
        }
        previousEdgeRenders.delete(edgeRender);
      }
    }
    for (const edgeRender of previousEdgeRenders) {
      this.destroyEdgeRender(edgeRender.key);
    }

    this.requestRender();
  }

  getNodeAtViewportPoint(viewportPoint: {x: number; y: number}) {
    const screenPoint = new PIXI.Point(
      viewportPoint.x * this.camera.screen.width,
      viewportPoint.y * this.camera.screen.height,
    );
    return this.getNodeAtScreenPoint(screenPoint);
  }

  getNodeAtScreenPoint(screenPoint: {x: number; y: number}) {
    for (const nodeRender of this.nodeMap.values()) {
      if (
        nodeRender.container.getBounds().contains(screenPoint.x, screenPoint.y)
      ) {
        return nodeRender.node;
      }
    }

    return null;
  }

  /** Initialize event handling. */
  initEvents() {
    const {pixiApplication: app} = this;

    app.stage.eventMode = 'dynamic';
    let lastPoint = null;
    app.stage.addListener('mousemove', (e) => {
      if (lastPoint && e.buttons) {
        this.camera.move(lastPoint.x - e.globalX, lastPoint.y - e.globalY);
      }
      lastPoint = e.global.clone();
    });

    app.view.onclick = ({offsetX, offsetY}) => {
      const {clientWidth, clientHeight} = app.view;
      const viewportPoint = new PIXI.Point(
        offsetX / clientWidth,
        offsetY / clientHeight,
      );

      const lastSelectedNode = this.selectedNode$.value;
      const selectedNode = this.getNodeAtViewportPoint(viewportPoint);
      this.nodeMap.get(lastSelectedNode?.node?.nodeId)?.setHighlight(false);
      this.nodeMap.get(selectedNode?.node?.nodeId)?.setHighlight(true);
      this.requestRender();

      this.selectedNode$.next(selectedNode);
    };

    app.view.onwheel = (e) => {
      this.camera.zoom(
        e.clientX - app.view.clientLeft,
        e.clientY - app.view.clientTop,
        e.deltaY / 1000,
      );
    };
  }

  /**
   * Create the rendering for an audio node.
   * @param nodeId
   * @param node
   * @returns
   */
  createNodeRender(nodeId: string, node: Audion.GraphNode): AudioNodeRender {
    let nodeRender = this.nodeMap.get(nodeId);
    if (!nodeRender) {
      if (node.node && node.node.nodeType) {
        nodeRender = new AudioNodeRender(nodeId).init(node, this.graphicsCache);
        nodeRender.setPixiParent(this.pixiNodeContainer);
        this.nodeMap.set(nodeId, nodeRender);
      }
    }
    return nodeRender;
  }

  /**
   * Destroy the rendering for an audio node.
   * @param nodeId
   */
  destroyNodeRender(nodeId: any) {
    const nodeRender = this.nodeMap.get(nodeId);
    if (nodeRender) {
      nodeRender.remove();
      this.nodeMap.delete(nodeId);

      if (nodeId === this.selectedNode$.value?.node?.nodeId) {
        this.selectedNode$.next(null);
      }
    }
  }

  compareEdgeKey(left: AudioEdgeKey, right: AudioEdgeKey) {
    if (left.v < right.v) {
      return -1;
    } else if (left.v > right.v) {
      return 1;
    }
    if (left.w < right.w) {
      return -1;
    } else if (left.w > right.w) {
      return 1;
    }
    if (left.name < right.name) {
      return -1;
    } else if (left.name > right.name) {
      return 1;
    }
    return 0;
  }

  createEdgeId({v, w, name}: Audion.GraphlibEdge) {
    if (!this.edgeIdMap.has(v)) {
      this.edgeIdMap.set(v, new Map());
    }
    const edgeIdVMap = this.edgeIdMap.get(v);
    if (!edgeIdVMap.has(w)) {
      edgeIdVMap.set(w, new Map());
    }
    const edgeIdVWMap = edgeIdVMap.get(w);
    if (!edgeIdVWMap.has(name)) {
      edgeIdVWMap.set(name, {v, w, name});
    }
    return edgeIdVWMap.get(name);
  }

  destroyEdgeId(edgeId: AudioEdgeKey) {
    if (this.edgeIdMap.has(edgeId.v)) {
      const edgeIdVMap = this.edgeIdMap.get(edgeId.v);
      if (edgeIdVMap.has(edgeId.w)) {
        const edgeIdVWMap = edgeIdVMap.get(edgeId.w);
        if (edgeIdVWMap.has(edgeId.name)) {
          edgeIdVWMap.delete(edgeId.name);
        }
        if (edgeIdVWMap.size === 0) {
          edgeIdVMap.delete(edgeId.w);
        }
      }
      if (edgeIdVMap.size === 0) {
        this.edgeIdMap.delete(edgeId.v);
      }
    }
  }

  /**
   * @param edge
   * @param context
   * @return
   */
  createEdgeRender(
    edge: Audion.GraphlibEdge,
    context: Audion.GraphContext,
  ): AudioEdgeRender {
    const edgeId = this.createEdgeId(edge);
    let edgeRender = this.edgeMap.get(edgeId);
    if (!edgeRender) {
      const sourceData = context.nodes[edge.v];
      const destinationData = context.nodes[edge.w];
      if (sourceData && destinationData) {
        const sourceNode = this.nodeMap.get(sourceData.node.nodeId);
        const destinationNode = this.nodeMap.get(destinationData.node.nodeId);

        if (sourceNode && destinationNode) {
          const {sourceOutputIndex, destinationType} = edge.value;
          const sourceNodePort = sourceNode.output[sourceOutputIndex];
          const destinationNodePort =
            destinationType === Audion.GraphEdgeType.NODE
              ? destinationNode.input[edge.value.destinationInputIndex]
              : destinationNode.param[edge.value.destinationParamIndex];

          if (sourceNodePort && destinationNodePort) {
            edgeRender = new AudioEdgeRender({
              key: edgeId,
              source: sourceNodePort,
              destination: destinationNodePort,
            });
            edgeRender.setPIXIParent(this.pixiEdgeContainer);

            edgeRender.source.updateNodeDisplay();
            edgeRender.destination.updateNodeDisplay();

            this.edgeMap.set(edgeId, edgeRender);
          }
        }
      }
    }
    return edgeRender;
  }

  /**
   * @param edgeId
   */
  destroyEdgeRender(edgeId: AudioEdgeKey) {
    const edgeRender = this.edgeMap.get(edgeId);
    if (edgeRender) {
      edgeRender.remove();

      edgeRender.source.updateNodeDisplay();
      edgeRender.destination.updateNodeDisplay();

      this.edgeMap.delete(edgeId);

      this.destroyEdgeId(edgeId);
    }
  }
}


================================================
FILE: src/panel/graph/AudioGraphText.ts
================================================
import * as PIXI from 'pixi.js';

export class AudioGraphText {
  bounds: PIXI.Rectangle;
  content: string;
  text: PIXI.Text;
  textStyle: PIXI.TextStyle;
  texture: PIXI.Texture;

  constructor(textStyle: PIXI.TextStyle, content: string) {
    this.textStyle = textStyle;
    this.content = content;

    this.text = new PIXI.Text(content, this.textStyle);
    this.bounds = this.text.getLocalBounds(new PIXI.Rectangle());
    this.texture = this.text.texture;
  }

  createSprite() {
    return new PIXI.Sprite(this.texture);
  }
}


================================================
FILE: src/panel/graph/AudioGraphTextCacheGroup.ts
================================================
import * as PIXI from 'pixi.js';

import {AudioGraphText} from './AudioGraphText';
import {GraphTextStyle} from './graphStyle';

export class AudioGraphTextCache {
  textStyle: PIXI.TextStyle;

  cache: Map<string, AudioGraphText> = new Map();

  constructor({textStyle}: {textStyle: PIXI.TextStyle}) {
    this.textStyle = textStyle;
  }

  getText(content: string) {
    if (!this.cache.has(content)) {
      const newText = new AudioGraphText(this.textStyle, content);
      this.cache.set(content, newText);
    }
    return this.cache.get(content);
  }

  getTextBounds(content: string) {
  
Download .txt
gitextract_0gd149fy/

├── .babelrc
├── .editorconfig
├── .eslintrc.json
├── .github/
│   └── workflows/
│       └── nodejs-ci.yml
├── .gitignore
├── .husky/
│   ├── .gitignore
│   └── pre-commit
├── .jsdoc.json
├── .prettierrc
├── LICENSE
├── README.md
├── fixtures/
│   └── oscillatorGainParam.ts
├── package.json
├── simulations/
│   ├── updateGraphRender.html
│   ├── updateGraphRender.ts
│   └── webpack.config.js
├── src/
│   ├── .jest.config.json
│   ├── build/
│   │   ├── make-chrome-extension.js
│   │   └── manifest.json.mustache
│   ├── chrome/
│   │   ├── API.js
│   │   ├── Debugger.js
│   │   ├── DebuggerPageDomain.ts
│   │   ├── DebuggerWebAudioDomain.ts
│   │   ├── DevTools.js
│   │   ├── Runtime.js
│   │   ├── Types.js
│   │   └── index.js
│   ├── custom.d.ts
│   ├── devtools/
│   │   ├── DebuggerAttachEventController.ts
│   │   ├── DebuggerEvents.ts
│   │   ├── DevtoolsGraphPanel.test.js
│   │   ├── DevtoolsGraphPanel.ts
│   │   ├── Types.ts
│   │   ├── WebAudioEventObserver.test.js
│   │   ├── WebAudioEventObserver.ts
│   │   ├── WebAudioGraphIntegrator.test.js
│   │   ├── WebAudioGraphIntegrator.ts
│   │   ├── WebAudioRealtimeData.ts
│   │   ├── deserializeGraphContext.ts
│   │   ├── layoutGraphContext.ts
│   │   ├── main.ts
│   │   ├── partitionMap.ts
│   │   ├── serializeGraphContext.js
│   │   └── setOptionsToGraphContext.ts
│   ├── devtools.html
│   ├── extraSettingPage/
│   │   ├── options.html
│   │   └── options.js
│   ├── panel/
│   │   ├── GraphSelector.ts
│   │   ├── Observer.runtime.ts
│   │   ├── Types.ts
│   │   ├── components/
│   │   │   ├── WholeGraphButton.css
│   │   │   ├── WholeGraphButton.ts
│   │   │   ├── collectGarbage.css
│   │   │   ├── collectGarbage.ts
│   │   │   ├── detailPanel.css
│   │   │   ├── detailPanel.ts
│   │   │   ├── domUtils.ts
│   │   │   ├── realtimeSummary.ts
│   │   │   ├── selectGraph.css
│   │   │   └── selectGraph.ts
│   │   ├── graph/
│   │   │   ├── AudioEdgeArrowGraphics.ts
│   │   │   ├── AudioEdgeCurvedLineGraphics.ts
│   │   │   ├── AudioEdgeRender.ts
│   │   │   ├── AudioGraphRender.ts
│   │   │   ├── AudioGraphText.ts
│   │   │   ├── AudioGraphTextCacheGroup.ts
│   │   │   ├── AudioNodeBackground.ts
│   │   │   ├── AudioNodeBackgroundRenderCacheGroup.ts
│   │   │   ├── AudioNodePort.ts
│   │   │   ├── AudioNodeRender.ts
│   │   │   ├── AudioPortCacheGroup.ts
│   │   │   ├── Camera.js
│   │   │   ├── GraphicsCache.ts
│   │   │   ├── graphStyle.js
│   │   │   └── graphStyle.ts
│   │   ├── main.ts
│   │   ├── updateGraphRender.ts
│   │   ├── updateGraphSizes.ts
│   │   └── worker.ts
│   ├── panel.html
│   ├── utils/
│   │   ├── Observer.emitter.js
│   │   ├── Observer.test.js
│   │   ├── Observer.ts
│   │   ├── Types.ts
│   │   ├── dlog.js
│   │   ├── error.js
│   │   ├── error.test.js
│   │   ├── index.js
│   │   ├── mapThruWorker.ts
│   │   ├── math.js
│   │   ├── retry.js
│   │   ├── retry.test.js
│   │   ├── rxChrome.ts
│   │   └── rxInterop.ts
│   └── webpack.config.js
├── test/
│   ├── .jest-puppeteer.config.json
│   ├── .jest.config.json
│   ├── README.md
│   ├── browserLaunch.js
│   └── updateGraphRender.js
└── tsconfig.json
Download .txt
SYMBOL INDEX (335 symbols across 59 files)

FILE: fixtures/oscillatorGainParam.ts
  constant OSCILLATOR_GAIN_PARAM_EVENTS (line 44) | const OSCILLATOR_GAIN_PARAM_EVENTS: Audion.WebAudioEvent[] = [

FILE: simulations/updateGraphRender.ts
  function main (line 26) | function main() {

FILE: src/build/make-chrome-extension.js
  function main (line 24) | async function main() {
  function copyFiles (line 48) | async function copyFiles({src, dest, files, cwd = __dirname}) {
  function generateManifest (line 66) | async function generateManifest({
  function zipChromeExtension (line 91) | async function zipChromeExtension({
  function readdirRecursive (line 119) | async function readdirRecursive(dir) {
  function mkdir (line 146) | async function mkdir(dirpath) {
  function unlink (line 163) | async function unlink(filepath) {

FILE: src/chrome/DebuggerPageDomain.ts
  type PageDebuggerMethod (line 10) | enum PageDebuggerMethod {
  type PageDebuggerEvent (line 16) | enum PageDebuggerEvent {
  type PageDebuggerEventParams (line 29) | type PageDebuggerEventParams<Name extends PageDebuggerEvent> =

FILE: src/chrome/DebuggerWebAudioDomain.ts
  type WebAudioDebuggerMethod (line 10) | enum WebAudioDebuggerMethod {
  type WebAudioDebuggerEvent (line 17) | enum WebAudioDebuggerEvent {
  type WebAudioDebuggerEventParams (line 34) | type WebAudioDebuggerEventParams<Name extends WebAudioDebuggerEvent> =

FILE: src/chrome/index.js
  function noopChrome (line 23) | function noopChrome() {
  function getGlobal (line 71) | function getGlobal() {
  function getChrome (line 87) | function getChrome() {

FILE: src/devtools/DebuggerAttachEventController.ts
  type AttachPermission (line 48) | enum AttachPermission {
  type BinaryTransition (line 74) | enum BinaryTransition {
  type DebuggerAttachEventState (line 81) | interface DebuggerAttachEventState {
  type ChromeDebuggerAPIEventName (line 97) | enum ChromeDebuggerAPIEventName {
  type ChromeDebuggerAPIDetachEventParams (line 101) | interface ChromeDebuggerAPIDetachEventParams {
  type ChromeDebuggerAPIDetachEvent (line 105) | interface ChromeDebuggerAPIDetachEvent {
  type ChromeDebuggerAPIEvent (line 110) | type ChromeDebuggerAPIEvent = ChromeDebuggerAPIDetachEvent;
  type ChromeDebuggerAPIEventParams (line 112) | type ChromeDebuggerAPIEventParams = ChromeDebuggerAPIEvent['params'];
  class DebuggerAttachEventController (line 121) | class DebuggerAttachEventController {
    method constructor (line 144) | constructor() {
    method sendCommand (line 271) | sendCommand(method: string): Observable<never> {
  function activateEventWhileAttached (line 282) | function activateEventWhileAttached(
  function bindChromeCallback (line 319) | function bindChromeCallback<P extends any[]>(
  function fromChromeEvent (line 340) | function fromChromeEvent<A extends any[]>(
  class PermissionSubject (line 396) | class PermissionSubject extends BehaviorSubject<AttachPermission> {
    method constructor (line 397) | constructor() {
    method grantTemporary (line 404) | grantTemporary() {
    method reject (line 413) | reject() {
  type BinaryTransitionDescription (line 423) | interface BinaryTransitionDescription {
  class BinaryTransitionSubject (line 445) | class BinaryTransitionSubject extends BehaviorSubject<BinaryTransition> {
    method constructor (line 449) | constructor({
    method transition (line 481) | transition(description: BinaryTransitionDescription) {
    method activate (line 516) | activate() {
    method deactivate (line 523) | deactivate() {
  class CounterSubject (line 531) | class CounterSubject extends BehaviorSubject<number> {
    method increment (line 535) | increment() {
    method decrement (line 542) | decrement() {

FILE: src/devtools/DebuggerEvents.ts
  type DebuggerDomain (line 7) | type DebuggerDomain = 'page' | 'webAudio';
  type DebuggerEventsOptions (line 9) | interface DebuggerEventsOptions<D extends DebuggerDomain> {
  type DebuggerDomainEvent (line 13) | type DebuggerDomainEvent<D extends DebuggerDomain> = D extends 'page'
  class DebuggerEventsObservable (line 19) | class DebuggerEventsObservable<
    method constructor (line 22) | constructor(

FILE: src/devtools/DevtoolsGraphPanel.test.js
  function simulateShowPanel (line 272) | function simulateShowPanel(panel) {
  function simulateCreatePanel (line 282) | function simulateCreatePanel(panel = mockPanel()) {
  function simulateConnectPort (line 293) | function simulateConnectPort(port = mockPort()) {
  function mockEvent (line 301) | function mockEvent() {
  function mockPort (line 306) | function mockPort() {
  function mockPanel (line 317) | function mockPanel() {
  function subscribeWhen (line 327) | function subscribeWhen(subscribeNotifier, unsubscribeNotifier) {

FILE: src/devtools/DevtoolsGraphPanel.ts
  function fromChromeEvent (line 9) | function fromChromeEvent<T>(
  class DevtoolsGraphPanel (line 21) | class DevtoolsGraphPanel {
    method constructor (line 29) | constructor(graphs$: Observable<Audion.DevtoolsMessage>) {

FILE: src/devtools/Types.ts
  type ContextRealtimeData (line 25) | type ContextRealtimeData = Protocol.WebAudio.ContextRealtimeData;
  type GraphEdgeType (line 27) | enum GraphEdgeType {
  type GraphNodeEdge (line 32) | interface GraphNodeEdge {
  type GraphParamEdge (line 38) | interface GraphParamEdge {
  type GraphEdge (line 45) | type GraphEdge = GraphNodeEdge | GraphParamEdge;
  type GraphlibEdge (line 47) | interface GraphlibEdge<V = GraphEdge> {
  type GraphContext (line 54) | interface GraphContext {
  type GraphContextMessage (line 64) | interface GraphContextMessage {
  type GraphContextsById (line 68) | interface GraphContextsById {
  type AllGraphsMessage (line 72) | interface AllGraphsMessage {
  type DevtoolsMessage (line 76) | type DevtoolsMessage = GraphContextMessage | AllGraphsMessage;
  type DevtoolsRequestType (line 78) | enum DevtoolsRequestType {
  type DevtoolsCollectGarbageRequest (line 82) | interface DevtoolsCollectGarbageRequest {
  type DevtoolsRequest (line 86) | type DevtoolsRequest = DevtoolsCollectGarbageRequest;
  type DevtoolsObserver (line 88) | interface DevtoolsObserver extends Utils.Observer<DevtoolsMessage> {}
  type GraphNode (line 90) | interface GraphNode {
  type PageEvent (line 96) | type PageEvent<N extends PageDebuggerEvent = PageDebuggerEvent> = {
  type WebAudioEvent (line 101) | type WebAudioEvent<

FILE: src/devtools/WebAudioEventObserver.ts
  class WebAudioEventObservable (line 16) | class WebAudioEventObservable extends Observable<Audion.WebAudioEvent> {
    method constructor (line 29) | constructor(debuggerAttachController: DebuggerAttachEventController) {

FILE: src/devtools/WebAudioGraphIntegrator.test.js
  method pollContext (line 37) | pollContext() {
  method pollContext (line 597) | pollContext() {
  function mockSubscriber (line 727) | function mockSubscriber() {

FILE: src/devtools/WebAudioGraphIntegrator.ts
  type GraphContextDestroyReasonMessage (line 46) | enum GraphContextDestroyReasonMessage {
  type MutableContexts (line 51) | type MutableContexts = {
  type EventHelpers (line 59) | interface EventHelpers {
  type IntegratableEventName (line 63) | type IntegratableEventName =
  type IntegratableEvent (line 68) | type IntegratableEvent =
  type IntegratableEventMapping (line 73) | type IntegratableEventMapping = {
  type EventHandlers (line 83) | type EventHandlers =
  constant EVENT_HANDLERS (line 96) | const EVENT_HANDLERS: Partial<EventHandlers> = {
  function ensureContextsExist (line 726) | function ensureContextsExist(
  function removeAll (line 763) | function removeAll<T>(array: T[], fn: (value: T) => boolean) {
  function integrateWebAudioGraph (line 776) | function integrateWebAudioGraph(

FILE: src/devtools/WebAudioRealtimeData.ts
  type RealtimeDataErrorMessage (line 16) | enum RealtimeDataErrorMessage {
  type RealtimeDataReason (line 23) | interface RealtimeDataReason<Message extends RealtimeDataErrorMessage> {
  constant INITIAL_CONTEXT_REALTIME_DATA (line 34) | const INITIAL_CONTEXT_REALTIME_DATA = {
  class WebAudioRealtimeData (line 41) | class WebAudioRealtimeData {
    method pollContext (line 47) | pollContext(contextId: string) {
  method parseReason (line 69) | parseReason(reason: any) {
  method toString (line 78) | toString(reason: any) {
  method isRealtimeOnlyReason (line 82) | isRealtimeOnlyReason(
  method isCannotFindReason (line 88) | isCannotFindReason(

FILE: src/devtools/deserializeGraphContext.ts
  type SerializedGraphContext (line 4) | interface SerializedGraphContext extends Audion.GraphContext {
  function deserializeGraphContext (line 8) | function deserializeGraphContext(

FILE: src/devtools/layoutGraphContext.ts
  function layoutGraphContext (line 5) | function layoutGraphContext(

FILE: src/devtools/main.ts
  method next (line 87) | next() {
  method next (line 94) | next(value) {

FILE: src/devtools/partitionMap.ts
  type PartitionMapConfig (line 3) | interface PartitionMapConfig<V> {
  function partitionMap (line 16) | function partitionMap<V>({

FILE: src/devtools/serializeGraphContext.js
  function serializeGraphContext (line 7) | function serializeGraphContext(graphContext) {

FILE: src/devtools/setOptionsToGraphContext.ts
  function setOptionsToGraphContext (line 5) | function setOptionsToGraphContext([context, layoutOptions]: [

FILE: src/extraSettingPage/options.js
  function saveOptions (line 15) | function saveOptions() {
  function restoreOptions (line 23) | function restoreOptions() {

FILE: src/panel/GraphSelector.ts
  type GraphMap (line 6) | type GraphMap = {[key: string]: Audion.GraphContext};
  type GraphMapRX (line 8) | type GraphMapRX = Observable<GraphMap>;
  constant EMPTY_GRAPH (line 10) | const EMPTY_GRAPH = {
  class GraphSelector (line 17) | class GraphSelector {
    method graphId (line 24) | get graphId(): string {
    method constructor (line 32) | constructor({allGraphs$: allGraphs$}: {allGraphs$: GraphMapRX}) {
    method select (line 61) | select(graphId: string) {

FILE: src/panel/Observer.runtime.ts
  function connect (line 11) | function connect<S, T>(requests$: Observable<S>): Observable<T> {

FILE: src/panel/Types.ts
  type Point (line 26) | interface Point {
  type Node (line 31) | interface Node {
  type PortType (line 38) | enum PortType {
  type Port (line 44) | interface Port {

FILE: src/panel/components/WholeGraphButton.ts
  class WholeGraphButton (line 9) | class WholeGraphButton {
    method constructor (line 15) | constructor() {
    method render (line 23) | render() {

FILE: src/panel/components/collectGarbage.ts
  function collectGarbageImageHTML (line 12) | function collectGarbageImageHTML(): string {
  function renderCollectGarbage (line 22) | function renderCollectGarbage(

FILE: src/panel/components/detailPanel.ts
  function graphContextHTML (line 17) | function graphContextHTML({
  function graphNodeBaseHTML (line 41) | function graphNodeBaseHTML({
  function graphParamHTML (line 67) | function graphParamHTML({
  function graphNodeHTML (line 91) | function graphNodeHTML({node, params}: Audion.GraphNode): string {
  function renderDetailPanel (line 108) | function renderDetailPanel(

FILE: src/panel/components/domUtils.ts
  function setElementProperty (line 9) | function setElementProperty<
  type PropertySet (line 38) | interface PropertySet<T> {
  type PropertySetChange (line 46) | interface PropertySetChange {
  function toggleElementPropertySet (line 61) | function toggleElementPropertySet<
  type PropertyMapChange (line 113) | interface PropertyMapChange {
  function assignElementProperty (line 122) | function assignElementProperty<
  function querySelector (line 209) | function querySelector(

FILE: src/panel/components/realtimeSummary.ts
  function realtimeSummaryHTML (line 11) | function realtimeSummaryHTML(realtimeData: Audion.ContextRealtimeData) {
  function renderRealtimeSummary (line 36) | function renderRealtimeSummary(

FILE: src/panel/components/selectGraph.ts
  constant NO_GRAPHS_AVAILABLE_TITLE (line 31) | const NO_GRAPHS_AVAILABLE_TITLE = '(no recordings)';
  function graphIdTitle (line 38) | function graphIdTitle(graphId: string) {
  function graphTitle (line 47) | function graphTitle(graph: Audion.GraphContext) {
  function graphTitles (line 56) | function graphTitles(allGraphs: Audion.GraphContextsById): {
  function buttonTitle (line 73) | function buttonTitle([graphId, graphTitles]) {
  function equalTitles (line 105) | function equalTitles(
  function renderSelectGraph (line 133) | function renderSelectGraph(

FILE: src/panel/graph/AudioEdgeArrowGraphics.ts
  constant ARROW_LENGTH (line 4) | const ARROW_LENGTH = 16;
  constant ARROW_HEIGHT (line 5) | const ARROW_HEIGHT = 8;
  constant ARROW_ANGLE_ROUNDING (line 7) | const ARROW_ANGLE_ROUNDING = 32;
  class EdgeArrowGraphics (line 9) | class EdgeArrowGraphics {
    method drawFromPoint (line 12) | drawFromPoint(
    method drawFromUnit (line 27) | drawFromUnit(
    method getGeometry (line 48) | getGeometry(pointOnLine: PIXI.Point, end: PIXI.Point) {
    method createGraphics (line 71) | createGraphics(pointOnLine: PIXI.Point, end: PIXI.Point) {

FILE: src/panel/graph/AudioEdgeCurvedLineGraphics.ts
  constant STEP_RATIO (line 7) | const STEP_RATIO = 1 / 10;
  constant LINE_COEFF (line 9) | const LINE_COEFF = createLineCoefficients();
  type LineCoefficients (line 11) | interface LineCoefficients {
  class EdgeCurvedLineGraphics (line 22) | class EdgeCurvedLineGraphics {
    method getGeometry (line 25) | getGeometry(a: PIXI.Point, d: PIXI.Point) {
    method createGraphics (line 64) | createGraphics(a: PIXI.Point, d: PIXI.Point) {
    method drawCurvedLine (line 84) | drawCurvedLine(
    method adjustPoint (line 114) | adjustPoint(
  function createLineCoefficients (line 130) | function createLineCoefficients(): LineCoefficients {
  function buildLineCoefficients (line 143) | function buildLineCoefficients(
  function interpolateCoefficients (line 173) | function interpolateCoefficients(

FILE: src/panel/graph/AudioEdgeRender.ts
  constant ARROW_LENGTH (line 10) | const ARROW_LENGTH = 12;
  constant ARROW_HEIGHT (line 11) | const ARROW_HEIGHT = 4;
  constant STEP_RATIO (line 13) | const STEP_RATIO = 1 / 10;
  constant LINE_COEFF (line 15) | const LINE_COEFF = createLineCoefficients();
  type AudioEdgeKey (line 17) | interface AudioEdgeKey {
  class AudioEdgeRender (line 26) | class AudioEdgeRender {
    method constructor (line 37) | constructor({
    method setPIXIParent (line 59) | setPIXIParent(parent: PIXI.Container) {
    method remove (line 66) | remove() {
    method draw (line 75) | draw(
    method drawArrow (line 112) | drawArrow(
    method drawCurvedLine (line 145) | drawCurvedLine(
    method adjustPoint (line 173) | adjustPoint(
  function createLineCoefficients (line 189) | function createLineCoefficients(): LineCoefficients {
  function lineCoefficients (line 201) | function lineCoefficients(
  function interpolateCoefficients (line 232) | function interpolateCoefficients(
  type LineCoefficients (line 252) | interface LineCoefficients {

FILE: src/panel/graph/AudioGraphRender.ts
  type AnimationFrameId (line 13) | type AnimationFrameId = ReturnType<typeof requestAnimationFrame>;
  class AudioGraphRender (line 18) | class AudioGraphRender {
    method constructor (line 41) | constructor({elementContainer}: {elementContainer: HTMLElement}) {
    method init (line 64) | init() {
    method requestRender (line 94) | requestRender() {
    method _render (line 100) | _render() {
    method stop (line 110) | stop() {
    method updateGraphSizes (line 117) | updateGraphSizes(message: Audion.GraphContext): Audion.GraphContext {
    method update (line 143) | update(message: Audion.GraphContext) {
    method getNodeAtViewportPoint (line 191) | getNodeAtViewportPoint(viewportPoint: {x: number; y: number}) {
    method getNodeAtScreenPoint (line 199) | getNodeAtScreenPoint(screenPoint: {x: number; y: number}) {
    method initEvents (line 212) | initEvents() {
    method createNodeRender (line 255) | createNodeRender(nodeId: string, node: Audion.GraphNode): AudioNodeRen...
    method destroyNodeRender (line 271) | destroyNodeRender(nodeId: any) {
    method compareEdgeKey (line 283) | compareEdgeKey(left: AudioEdgeKey, right: AudioEdgeKey) {
    method createEdgeId (line 302) | createEdgeId({v, w, name}: Audion.GraphlibEdge) {
    method destroyEdgeId (line 317) | destroyEdgeId(edgeId: AudioEdgeKey) {
    method createEdgeRender (line 340) | createEdgeRender(
    method destroyEdgeRender (line 383) | destroyEdgeRender(edgeId: AudioEdgeKey) {

FILE: src/panel/graph/AudioGraphText.ts
  class AudioGraphText (line 3) | class AudioGraphText {
    method constructor (line 10) | constructor(textStyle: PIXI.TextStyle, content: string) {
    method createSprite (line 19) | createSprite() {

FILE: src/panel/graph/AudioGraphTextCacheGroup.ts
  class AudioGraphTextCache (line 6) | class AudioGraphTextCache {
    method constructor (line 11) | constructor({textStyle}: {textStyle: PIXI.TextStyle}) {
    method getText (line 15) | getText(content: string) {
    method getTextBounds (line 23) | getTextBounds(content: string) {
  class AudioGraphTextCacheGroup (line 28) | class AudioGraphTextCacheGroup {
    method constructor (line 32) | constructor() {

FILE: src/panel/graph/AudioNodeBackground.ts
  type AudioNodeBackgroundStyle (line 16) | interface AudioNodeBackgroundStyle {
  class AudioNodeTextMetrics (line 20) | class AudioNodeTextMetrics {
    method from (line 24) | static from(
  class AudioNodeMetrics (line 39) | class AudioNodeMetrics {
    method from (line 46) | static from(
  class AudioNodeBackground (line 60) | class AudioNodeBackground {
    method INPUT_GROUP_MARGIN (line 69) | static get INPUT_GROUP_MARGIN() {
    method INPUT_HEIGHT (line 74) | static get INPUT_HEIGHT() {
    method INPUT_RADIUS (line 79) | static get INPUT_RADIUS() {
    method PARAM_GROUP_MARGIN (line 84) | static get PARAM_GROUP_MARGIN() {
    method PARAM_HEIGHT (line 89) | static get PARAM_HEIGHT() {
    method PARAM_RADIUS (line 94) | static get PARAM_RADIUS() {
    method init (line 98) | init(metrics: AudioNodeMetrics) {
    method _getParamYStart (line 153) | private _getParamYStart({
    method _getSize (line 168) | private _getSize(
  class AudioNodeBackgroundRender (line 208) | class AudioNodeBackgroundRender {
    method constructor (line 215) | constructor(
    method draw (line 225) | draw(graphics: PIXI.Graphics) {
    method getGeometry (line 259) | getGeometry() {
    method createMesh (line 268) | createMesh() {

FILE: src/panel/graph/AudioNodeBackgroundRenderCacheGroup.ts
  class AudioNodeBackgroundCache (line 14) | class AudioNodeBackgroundCache {
    method constructor (line 19) | constructor(textCacheGroup: AudioGraphTextCacheGroup) {
    method getBackground (line 23) | getBackground(node: Audion.GraphNode) {
  class AudioNodeBackgroundRenderCache (line 33) | class AudioNodeBackgroundRenderCache {
    method constructor (line 43) | constructor({
    method getBackground (line 59) | getBackground(node: Audion.GraphNode) {
  class AudioNodeBackgroundRenderCacheGroup (line 73) | class AudioNodeBackgroundRenderCacheGroup {
    method constructor (line 80) | constructor({textCacheGroup}: {textCacheGroup: AudioGraphTextCacheGrou...

FILE: src/panel/graph/AudioNodePort.ts
  constant ZERO_POINT (line 6) | const ZERO_POINT = new PIXI.Point();
  type AudioNodePortType (line 8) | enum AudioNodePortType {
  class AudioNodePort (line 17) | class AudioNodePort {
    method INPUT_RADIUS (line 27) | static get INPUT_RADIUS() {
    method PARAM_RADIUS (line 32) | static get PARAM_RADIUS() {
    method constructor (line 40) | constructor({
    method updateNodeDisplay (line 64) | updateNodeDisplay() {
    method drawSocket (line 71) | drawSocket(
    method drawConnect (line 89) | drawConnect(graphics: PIXI.Graphics) {

FILE: src/panel/graph/AudioNodeRender.ts
  class AudioNodeRender (line 21) | class AudioNodeRender {
    method constructor (line 48) | constructor(id: string) {
    method INPUT_GROUP_MARGIN (line 68) | static get INPUT_GROUP_MARGIN() {
    method INPUT_HEIGHT (line 73) | static get INPUT_HEIGHT() {
    method INPUT_RADIUS (line 78) | static get INPUT_RADIUS() {
    method PARAM_GROUP_MARGIN (line 83) | static get PARAM_GROUP_MARGIN() {
    method PARAM_HEIGHT (line 88) | static get PARAM_HEIGHT() {
    method PARAM_RADIUS (line 93) | static get PARAM_RADIUS() {
    method init (line 101) | init(
    method setPixiParent (line 152) | setPixiParent(parent: PIXI.Container) {
    method remove (line 160) | remove() {
    method initSize (line 165) | initSize(textCacheGroup: AudioGraphTextCacheGroup) {
    method initPorts (line 212) | initPorts(portCacheGroup: AudioPortCacheGroup) {
    method setHighlight (line 303) | setHighlight(isHighlighted: boolean) {
    method updatePortDisplay (line 308) | updatePortDisplay(portType: AudionPanel.PortType, index: number) {
    method draw (line 324) | draw() {

FILE: src/panel/graph/AudioPortCacheGroup.ts
  class AudioPortCache (line 6) | class AudioPortCache {
    method constructor (line 11) | constructor(port: AudioNodePort) {
    method getGeometry (line 15) | getGeometry() {
    method createGraphics (line 24) | createGraphics(position = new PIXI.Point()) {
  class AudioPortCacheGroup (line 32) | class AudioPortCacheGroup {
    method constructor (line 36) | constructor() {

FILE: src/panel/graph/Camera.js
  constant MIN_ZOOM (line 6) | const MIN_ZOOM = 0.5;
  class Camera (line 11) | class Camera {
    method constructor (line 13) | constructor() {
    method update (line 27) | update() {}
    method move (line 33) | move(dx, dy) {
    method zoom (line 66) | zoom(screenX, screenY, zoomDelta) {
    method fitToScreen (line 105) | fitToScreen() {
    method setGraphSize (line 113) | setGraphSize(width, height) {
    method setScreenSize (line 124) | setScreenSize(width, height) {

FILE: src/panel/graph/GraphicsCache.ts
  class GraphicsCache (line 7) | class GraphicsCache {

FILE: src/panel/graph/graphStyle.ts
  type GraphColor (line 1) | enum GraphColor {

FILE: src/panel/main.ts
  function isHTMLElement (line 153) | function isHTMLElement(value: unknown): value is HTMLElement {

FILE: src/panel/updateGraphRender.ts
  function updateGraphRender (line 4) | function updateGraphRender(

FILE: src/panel/updateGraphSizes.ts
  function updateGraphSizes (line 5) | function updateGraphSizes(

FILE: src/panel/worker.ts
  type LayoutOptionsMessage (line 20) | interface LayoutOptionsMessage {
  type GraphContextMessage (line 24) | interface GraphContextMessage {
  type PanelMessage (line 28) | type PanelMessage = MessageEvent<LayoutOptionsMessage | GraphContextMess...

FILE: src/utils/Observer.emitter.js
  function observeMessageEvents (line 10) | function observeMessageEvents(emitter) {
  function postObservations (line 26) | function postObservations(observer, poster) {

FILE: src/utils/Observer.ts
  function noop (line 11) | function noop(...args: any) {}
  function makeCancelable (line 18) | function makeCancelable<T>(promise: Promise<T>): Utils.CancelablePromise...
  class Observer (line 36) | class Observer<T> implements Utils.Observer<T> {
    method constructor (line 41) | constructor(subscribe: Utils.SubscribeCallback<T>) {
    method transform (line 52) | static transform<T1, T2>(
    method filter (line 63) | static filter<T>(
    method reduce (line 76) | static reduce<T, R>(
    method throttle (line 90) | static throttle<T>(
    method onSubscribe (line 105) | static onSubscribe<T1, T2>(
    method props (line 112) | static props<T extends {[key: string]: any}>(
    method observe (line 138) | observe(
    method _onNext (line 152) | protected _onNext(message: T): void {
    method _onComplete (line 158) | protected _onComplete(): void {
    method _onError (line 164) | protected _onError(reason: any): void {
    method _subscribeToParent (line 173) | protected _subscribeToParent(): void {
    method _unsubscribeFromParent (line 186) | protected _unsubscribeFromParent(): void {
  class ThrottleObserver (line 199) | class ThrottleObserver<T> extends Observer<T> {
    method constructor (line 205) | constructor(
    method _flush (line 269) | private _flush() {
  class SubscribeImmediateObserver (line 285) | class SubscribeImmediateObserver<T1, T2> extends Observer<T1 | T2> {
    method constructor (line 291) | constructor(target: Utils.Observer<T1>, onSubscribe: () => T2) {
    method observe (line 299) | observe(

FILE: src/utils/Types.ts
  type Observer (line 102) | interface Observer<T> {
  type SubscribeCallback (line 116) | interface SubscribeCallback<T> {
  type SubscribeOnNext (line 124) | interface SubscribeOnNext<T> {
  type Cancelable (line 128) | interface Cancelable<T> {
  type CancelablePromise (line 133) | interface CancelablePromise<T> {
  type ThrottleObserverOptions (line 138) | interface ThrottleObserverOptions<T> {
  type RetryOptions (line 143) | interface RetryOptions {

FILE: src/utils/dlog.js
  function DLOG (line 10) | function DLOG(message, properties) {

FILE: src/utils/error.js
  class InvariantError (line 6) | class InvariantError extends Error {
    method constructor (line 12) | constructor(message, args) {
    method message (line 21) | get message() {
  function invariant (line 38) | function invariant(test, message, ...args) {

FILE: src/utils/mapThruWorker.ts
  function mapThruWorker (line 4) | function mapThruWorker<T2>(worker: Worker) {

FILE: src/utils/math.js
  function clamp (line 10) | function clamp(value, min, max) {
  function trunc (line 25) | function trunc(value, digits) {

FILE: src/utils/retry.js
  function retry (line 9) | async function retry(

FILE: src/utils/rxChrome.ts
  function bindChromeCallback (line 12) | function bindChromeCallback<P extends any[], R extends any[]>(

FILE: src/utils/rxInterop.ts
  function toRX (line 15) | function toRX<T>(observer: Utils.Observer<T>): Observable<T> {
  function toUtilsObserver (line 25) | function toUtilsObserver<T>(
Condensed preview — 101 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (295K chars).
[
  {
    "path": ".babelrc",
    "chars": 169,
    "preview": "{\n  \"plugins\": [[\n    \"@babel/plugin-transform-modules-commonjs\"\n  ], [\n    \"@babel/plugin-proposal-optional-chaining\"\n "
  },
  {
    "path": ".editorconfig",
    "chars": 230,
    "preview": "# EditorConfig is awesome: https://EditorConfig.org\n\n# top-most EditorConfig file\nroot = true\n\n[*]\nindent_style = space\n"
  },
  {
    "path": ".eslintrc.json",
    "chars": 407,
    "preview": "{\n  \"env\": {\n    \"browser\": true,\n    \"es2021\": true,\n    \"node\": true\n  },\n  \"extends\": [\"eslint:recommended\", \"google\""
  },
  {
    "path": ".github/workflows/nodejs-ci.yml",
    "chars": 379,
    "preview": "name: Node.js CI\n\non: [push, pull_request]\n\njobs:\n  build:\n    runs-on: ubuntu-latest\n\n    steps:\n      - uses: actions/"
  },
  {
    "path": ".gitignore",
    "chars": 115,
    "preview": ".DS_Store\n# dependencies\nnode_modules\n# build/test\n.eslintcache\ndocs\ncoverage\n/build\n/simulations/build\n!src/build\n"
  },
  {
    "path": ".husky/.gitignore",
    "chars": 2,
    "preview": "_\n"
  },
  {
    "path": ".husky/pre-commit",
    "chars": 58,
    "preview": "#!/bin/sh\n. \"$(dirname \"$0\")/_/husky.sh\"\n\nnpx lint-staged\n"
  },
  {
    "path": ".jsdoc.json",
    "chars": 391,
    "preview": "{\n  \"source\": {\n    \"include\": [\"./src/\"],\n    \"includePattern\": \".+\\\\.js(doc)?$\",\n    \"excludePattern\": \"(^|\\\\/|\\\\\\\\)_|"
  },
  {
    "path": ".prettierrc",
    "chars": 116,
    "preview": "{\n  \"tabWidth\": 2,\n  \"useTabs\": false,\n  \"trailingComma\": \"all\",\n  \"singleQuote\": true,\n  \"bracketSpacing\": false\n}\n"
  },
  {
    "path": "LICENSE",
    "chars": 11343,
    "preview": "\n                                 Apache License\n                           Version 2.0, January 2004\n                  "
  },
  {
    "path": "README.md",
    "chars": 3736,
    "preview": "# Audion: Web Audio Graph Visualizer\n\n[![Node.js CI](https://github.com/GoogleChrome/audion/actions/workflows/nodejs-ci."
  },
  {
    "path": "fixtures/oscillatorGainParam.ts",
    "chars": 12402,
    "preview": "/**\n * Event sequences that would be produced by an audio context with oscillator\n * and gain nodes connecting outputs t"
  },
  {
    "path": "package.json",
    "chars": 3280,
    "preview": "{\n  \"name\": \"audion\",\n  \"private\": true,\n  \"version\": \"3.0.9\",\n  \"description\": \"A Chrome DevTools extension traces Web "
  },
  {
    "path": "simulations/updateGraphRender.html",
    "chars": 100,
    "preview": "<div class=\"graph\" style=\"height: 100%\"></div>\n<script src=\"./build/updateGraphRender.js\"></script>\n"
  },
  {
    "path": "simulations/updateGraphRender.ts",
    "chars": 2177,
    "preview": "import {\n  auditTime,\n  EMPTY,\n  filter,\n  finalize,\n  from,\n  interval,\n  map,\n  pipe,\n  switchMap,\n  take,\n} from 'rxj"
  },
  {
    "path": "simulations/webpack.config.js",
    "chars": 295,
    "preview": "const {resolve} = require('path');\n\nconst srcConfig = require('../src/webpack.config');\n\nmodule.exports = (env, argv) =>"
  },
  {
    "path": "src/.jest.config.json",
    "chars": 165,
    "preview": "{\n  \"collectCoverage\": true,\n  \"injectGlobals\": false,\n  \"transform\": {\n    \"\\\\.[jt]sx?$\": \"babel-jest\"\n  },\n  \"coverage"
  },
  {
    "path": "src/build/make-chrome-extension.js",
    "chars": 3778,
    "preview": "/**\n * A nodejs script that copies files, writes a extension manifest, and zips it\n * all up.\n *\n * @namespace makeChrom"
  },
  {
    "path": "src/build/manifest.json.mustache",
    "chars": 299,
    "preview": "{\n  \"manifest_version\": 3,\n  \"name\": \"Audion\",\n  \"version\": \"{{version}}\",\n  \"description\": \"Web Audio DevTools Extensio"
  },
  {
    "path": "src/chrome/API.js",
    "chars": 366,
    "preview": "/// <reference path=\"./Debugger.js\" />\n/// <reference path=\"./DevTools.js\" />\n/// <reference path=\"./Runtime.js\" />\n\n/**"
  },
  {
    "path": "src/chrome/Debugger.js",
    "chars": 1587,
    "preview": "/// <reference path=\"Types.js\" />\n\n/**\n * [Chrome extension api][1] to the [Chrome Debugger Protocol][2]. Used by this\n "
  },
  {
    "path": "src/chrome/DebuggerPageDomain.ts",
    "chars": 1095,
    "preview": "/**\n * @file\n * Strings passed to `chrome.debugger.sendCommand` and received from\n * `chrome.debugger.onEvent` callbacks"
  },
  {
    "path": "src/chrome/DebuggerWebAudioDomain.ts",
    "chars": 1487,
    "preview": "/**\n * @file\n * Strings passed to `chrome.debugger.sendCommand` and received from\n * `chrome.debugger.onEvent` callbacks"
  },
  {
    "path": "src/chrome/DevTools.js",
    "chars": 1946,
    "preview": "/// <reference path=\"Types.js\" />\n\n/**\n * [Chrome extension api][1] to devtool inspector available to a extension's\n * d"
  },
  {
    "path": "src/chrome/Runtime.js",
    "chars": 1339,
    "preview": "/// <reference path=\"Types.js\" />\n\n/**\n * [Chrome extension api][1] about the extension the host platform and\n * communi"
  },
  {
    "path": "src/chrome/Types.js",
    "chars": 636,
    "preview": "/**\n * Types provided by the [chrome extension api][1].\n *\n * [1]: https://developer.chrome.com/docs/extensions/referenc"
  },
  {
    "path": "src/chrome/index.js",
    "chars": 2243,
    "preview": "/// <reference path=\"API.js\" />\n/// <reference path=\"Types.js\" />\n\n/**\n * Global chrome extension api instance.\n *\n * No"
  },
  {
    "path": "src/custom.d.ts",
    "chars": 151,
    "preview": "declare module '*.svg' {\n  const content: any;\n  export default content;\n}\n\ndeclare module '*.css' {\n  const content: an"
  },
  {
    "path": "src/devtools/DebuggerAttachEventController.ts",
    "chars": 17202,
    "preview": "import {\n  BehaviorSubject,\n  combineLatest,\n  concat,\n  defer,\n  EMPTY,\n  Observable,\n  of,\n  Subject,\n  Subscriber,\n} "
  },
  {
    "path": "src/devtools/DebuggerEvents.ts",
    "chars": 1430,
    "preview": "import {filter, map, Observable} from 'rxjs';\nimport {chrome} from '../chrome';\nimport {fromChromeEvent} from '../utils/"
  },
  {
    "path": "src/devtools/DevtoolsGraphPanel.test.js",
    "chars": 9158,
    "preview": "/// <reference path=\"../chrome/Types.js\" />\n/// <reference path=\"../chrome/DebuggerWebAudioDomain.ts\" />\n/// <reference "
  },
  {
    "path": "src/devtools/DevtoolsGraphPanel.ts",
    "chars": 1448,
    "preview": "/** DevTools panel that renders the Web Audio graph and more debugging information. */\n\nimport {chrome} from '../chrome'"
  },
  {
    "path": "src/devtools/Types.ts",
    "chars": 3333,
    "preview": "/// <reference path=\"../chrome/DebuggerWebAudioDomain.ts\" />\n\nimport {Protocol} from 'devtools-protocol/types/protocol';"
  },
  {
    "path": "src/devtools/WebAudioEventObserver.test.js",
    "chars": 3876,
    "preview": "/// <reference path=\"../chrome/DebuggerWebAudioDomain.ts\" />\n\nimport {beforeEach, describe, expect, it, jest} from '@jes"
  },
  {
    "path": "src/devtools/WebAudioEventObserver.ts",
    "chars": 2162,
    "preview": "import {chrome} from '../chrome';\nimport {Audion} from './Types';\n\nimport {Observable} from 'rxjs';\nimport {\n  CounterSu"
  },
  {
    "path": "src/devtools/WebAudioGraphIntegrator.test.js",
    "chars": 20093,
    "preview": "/// <reference path=\"../chrome/DebuggerWebAudioDomain.ts\" />\n\nimport {beforeEach, describe, expect, it, jest} from '@jes"
  },
  {
    "path": "src/devtools/WebAudioGraphIntegrator.ts",
    "chars": 20145,
    "preview": "import * as dagre from 'dagre';\nimport * as graphlib from 'graphlib';\nimport {ProtocolMapping} from 'devtools-protocol/t"
  },
  {
    "path": "src/devtools/WebAudioRealtimeData.ts",
    "chars": 2788,
    "preview": "import Protocol from 'devtools-protocol';\nimport {bindCallback, concatMap, interval, Observable} from 'rxjs';\nimport {ma"
  },
  {
    "path": "src/devtools/deserializeGraphContext.ts",
    "chars": 661,
    "preview": "import * as graphlib from 'graphlib';\nimport {Audion} from './Types';\n\nexport interface SerializedGraphContext extends A"
  },
  {
    "path": "src/devtools/layoutGraphContext.ts",
    "chars": 512,
    "preview": "import * as dagre from 'dagre';\n\nimport {Audion} from './Types';\n\nexport function layoutGraphContext(\n  context: Audion."
  },
  {
    "path": "src/devtools/main.ts",
    "chars": 3027,
    "preview": "import {merge} from 'rxjs';\nimport {\n  map,\n  scan,\n  take,\n  shareReplay,\n  share,\n  mergeMap,\n  auditTime,\n} from 'rxj"
  },
  {
    "path": "src/devtools/partitionMap.ts",
    "chars": 1887,
    "preview": "import {Observable, OperatorFunction, Subject} from 'rxjs';\n\ninterface PartitionMapConfig<V> {\n  /** Callback that retur"
  },
  {
    "path": "src/devtools/serializeGraphContext.js",
    "chars": 328,
    "preview": "import dagre from 'dagre';\n\n/**\n * @param {Audion.GraphContext} graphContext\n * @return {Audion.GraphContext}\n */\nexport"
  },
  {
    "path": "src/devtools/setOptionsToGraphContext.ts",
    "chars": 313,
    "preview": "import * as dagre from 'dagre';\n\nimport {Audion} from './Types';\n\nexport function setOptionsToGraphContext([context, lay"
  },
  {
    "path": "src/devtools.html",
    "chars": 146,
    "preview": "<html>\n  <head>\n    <title>DevTools: Audion Extension</title>\n  </head>\n  <body>\n    <script src=\"audion-devtools.js\"></"
  },
  {
    "path": "src/extraSettingPage/options.html",
    "chars": 270,
    "preview": "<!DOCTYPE html>\n<html>\n<head>\n    <title>Audion Addition Setting Options</title>\n</head>\n<body>\n    <label>\n        Clic"
  },
  {
    "path": "src/extraSettingPage/options.js",
    "chars": 903,
    "preview": "// prettier-ignore\n/**\n * Initializes the options page by setting up event listeners and\n * restoring saved options.\n */"
  },
  {
    "path": "src/panel/GraphSelector.ts",
    "chars": 1596,
    "preview": "import {Observable, combineLatest, BehaviorSubject} from 'rxjs';\nimport {map, shareReplay, distinctUntilChanged} from 'r"
  },
  {
    "path": "src/panel/Observer.runtime.ts",
    "chars": 1290,
    "preview": "import {Observable} from 'rxjs';\nimport {share} from 'rxjs/operators';\n\nimport {chrome} from '../chrome';\n\n/**\n * Connec"
  },
  {
    "path": "src/panel/Types.ts",
    "chars": 952,
    "preview": "import * as PIXI from 'pixi.js';\n\n/** @namespace AudionPanel */\n\n/**\n * @typedef AudionPanel.Point\n * @property {number}"
  },
  {
    "path": "src/panel/components/WholeGraphButton.css",
    "chars": 157,
    "preview": ".wholeGraphButton {\n  position: absolute;\n  top: 5px;\n  left: 5px;\n  cursor: pointer;\n  opacity: 0.8;\n  border-radius: 3"
  },
  {
    "path": "src/panel/components/WholeGraphButton.ts",
    "chars": 656,
    "preview": "import {fromEvent} from 'rxjs';\n\nimport style from './WholeGraphButton.css';\nimport wholeGraphButtonImage from './WholeG"
  },
  {
    "path": "src/panel/components/collectGarbage.css",
    "chars": 521,
    "preview": ":global(.-theme-with-dark-background) .collectIcon {\n  --override-icon-mask-background-color: rgb(145 145 145);\n}\n.colle"
  },
  {
    "path": "src/panel/components/collectGarbage.ts",
    "chars": 1417,
    "preview": "import {fromEvent, merge, NEVER, Observable} from 'rxjs';\nimport {map, startWith, switchMap} from 'rxjs/operators';\n\nimp"
  },
  {
    "path": "src/panel/components/detailPanel.css",
    "chars": 367,
    "preview": ".detailPanel > * {\n  padding: 0 1rem;\n}\n.detailPanel h1,\n.detailPanel h2,\n.detailPanel h3,\n.detailPanel h4,\n.detailPanel"
  },
  {
    "path": "src/panel/components/detailPanel.ts",
    "chars": 4190,
    "preview": "import {merge, NEVER, Observable} from 'rxjs';\nimport {distinctUntilChanged, map, startWith, switchMap} from 'rxjs/opera"
  },
  {
    "path": "src/panel/components/domUtils.ts",
    "chars": 6048,
    "preview": "import {defer, Observable, of} from 'rxjs';\nimport {finalize, map, scan, switchMap} from 'rxjs/operators';\n\n/**\n * Creat"
  },
  {
    "path": "src/panel/components/realtimeSummary.ts",
    "chars": 1574,
    "preview": "import {map, Observable} from 'rxjs';\n\nimport {Audion} from '../../devtools/Types';\nimport {setElementHTML} from './domU"
  },
  {
    "path": "src/panel/components/selectGraph.css",
    "chars": 217,
    "preview": ".dropdownOption {\n  display: flex;\n  height: 2rem;\n  align-items: center;\n  cursor: pointer;\n  padding: 0 0.2rem;\n}\n\n.dr"
  },
  {
    "path": "src/panel/components/selectGraph.ts",
    "chars": 7034,
    "preview": "import {\n  BehaviorSubject,\n  combineLatest,\n  fromEvent,\n  merge,\n  Observable,\n  of,\n} from 'rxjs';\nimport {\n  distinc"
  },
  {
    "path": "src/panel/graph/AudioEdgeArrowGraphics.ts",
    "chars": 2419,
    "preview": "import * as PIXI from 'pixi.js';\nimport {GraphColor} from './graphStyle';\n\nconst ARROW_LENGTH = 16;\nconst ARROW_HEIGHT ="
  },
  {
    "path": "src/panel/graph/AudioEdgeCurvedLineGraphics.ts",
    "chars": 4440,
    "preview": "import * as PIXI from 'pixi.js';\n\nimport {AudionPanel} from '../Types';\n\nimport {GraphColor} from './graphStyle';\n\nconst"
  },
  {
    "path": "src/panel/graph/AudioEdgeRender.ts",
    "chars": 6122,
    "preview": "import * as PIXI from 'pixi.js';\n\nimport type {AudionPanel} from '../Types';\n\nimport {EdgeArrowGraphics} from './AudioEd"
  },
  {
    "path": "src/panel/graph/AudioGraphRender.ts",
    "chars": 10936,
    "preview": "/// <reference path=\"../../chrome/Types.js\" />\n\nimport * as PIXI from 'pixi.js';\nimport {BehaviorSubject} from 'rxjs';\n\n"
  },
  {
    "path": "src/panel/graph/AudioGraphText.ts",
    "chars": 536,
    "preview": "import * as PIXI from 'pixi.js';\n\nexport class AudioGraphText {\n  bounds: PIXI.Rectangle;\n  content: string;\n  text: PIX"
  },
  {
    "path": "src/panel/graph/AudioGraphTextCacheGroup.ts",
    "chars": 1004,
    "preview": "import * as PIXI from 'pixi.js';\n\nimport {AudioGraphText} from './AudioGraphText';\nimport {GraphTextStyle} from './graph"
  },
  {
    "path": "src/panel/graph/AudioNodeBackground.ts",
    "chars": 7303,
    "preview": "import * as PIXI from 'pixi.js';\n\nimport {Audion} from '../../devtools/Types';\n\nimport {AudionPanel} from '../Types';\n\ni"
  },
  {
    "path": "src/panel/graph/AudioNodeBackgroundRenderCacheGroup.ts",
    "chars": 2632,
    "preview": "import * as PIXI from 'pixi.js';\nimport {MeshMaterial} from 'pixi.js';\n\nimport {Audion} from '../../devtools/Types';\n\nim"
  },
  {
    "path": "src/panel/graph/AudioNodePort.ts",
    "chars": 1844,
    "preview": "import * as PIXI from 'pixi.js';\n\nimport {AudionPanel} from '../Types';\nimport {GraphPortStyle} from './graphStyle';\n\nco"
  },
  {
    "path": "src/panel/graph/AudioNodeRender.ts",
    "chars": 9331,
    "preview": "import * as PIXI from 'pixi.js';\n\nimport {Audion} from '../../devtools/Types';\n\nimport {\n  GraphColor,\n  colorFromNodeTy"
  },
  {
    "path": "src/panel/graph/AudioPortCacheGroup.ts",
    "chars": 1461,
    "preview": "import * as PIXI from 'pixi.js';\nimport {AudionPanel} from '../Types';\nimport {AudioNodePort} from './AudioNodePort';\nim"
  },
  {
    "path": "src/panel/graph/Camera.js",
    "chars": 3079,
    "preview": "import {Rectangle} from '@pixi/math';\n\nimport {Observer} from '../../utils/Observer';\nimport {trunc, clamp} from '../../"
  },
  {
    "path": "src/panel/graph/GraphicsCache.ts",
    "chars": 831,
    "preview": "import {EdgeArrowGraphics} from './AudioEdgeArrowGraphics';\nimport {EdgeCurvedLineGraphics} from './AudioEdgeCurvedLineG"
  },
  {
    "path": "src/panel/graph/graphStyle.js",
    "chars": 2598,
    "preview": "/** @enum {number} */\nexport const Color = {\n  PROCESSOR: 0x64b5f6,\n  MEDIA: 0xba68c8,\n  SOURCE: 0x81c784,\n  DESTINATION"
  },
  {
    "path": "src/panel/graph/graphStyle.ts",
    "chars": 2765,
    "preview": "export enum GraphColor {\n  PROCESSOR = 0x64b5f6,\n  MEDIA = 0xba68c8,\n  SOURCE = 0x81c784,\n  DESTINATION = 0x90a4ad,\n  AN"
  },
  {
    "path": "src/panel/main.ts",
    "chars": 4797,
    "preview": "/// <reference path=\"../chrome/DebuggerWebAudioDomain.ts\" />\n\n// This module disable's pixi.js use of new Function to op"
  },
  {
    "path": "src/panel/updateGraphRender.ts",
    "chars": 275,
    "preview": "import {Audion} from '../devtools/Types';\nimport {AudioGraphRender} from './graph/AudioGraphRender';\n\nexport function up"
  },
  {
    "path": "src/panel/updateGraphSizes.ts",
    "chars": 315,
    "preview": "import {Audion} from '../devtools/Types';\n\nimport {AudioGraphRender} from './graph/AudioGraphRender';\n\nexport function u"
  },
  {
    "path": "src/panel/worker.ts",
    "chars": 1592,
    "preview": "import * as dagre from 'dagre';\nimport {fromEvent, Observable} from 'rxjs';\nimport {\n  auditTime,\n  distinctUntilChanged"
  },
  {
    "path": "src/panel.html",
    "chars": 15254,
    "preview": "<html>\n  <head>\n    <style>\n      body {\n        cursor: default;\n        font-family: '.SFNSDisplay-Regular', 'Helvetic"
  },
  {
    "path": "src/utils/Observer.emitter.js",
    "chars": 730,
    "preview": "/// <reference path=\"Types.ts\" />\n\nimport {Observer} from './Observer';\n\n/**\n * @param {Utils.DataEmitter<T>} emitter\n *"
  },
  {
    "path": "src/utils/Observer.test.js",
    "chars": 9032,
    "preview": "import {describe, expect, it, jest} from '@jest/globals';\n\nimport {InvariantError} from './error';\nimport {Observer} fro"
  },
  {
    "path": "src/utils/Observer.ts",
    "chars": 7524,
    "preview": "import {Utils} from './Types';\n\nimport {invariant} from './error';\n\n/* istanbul ignore next */\n/**\n * Do nothing.\n * @pa"
  },
  {
    "path": "src/utils/Types.ts",
    "chars": 3339,
    "preview": "/** @namespace Utils */\n\n/**\n * An abstraction of the observer idiom.\n *\n * @typedef Utils.Observer\n * @property {Utils."
  },
  {
    "path": "src/utils/dlog.js",
    "chars": 1728,
    "preview": "import {getTimestampAsString} from '../devtools/WebAudioGraphIntegrator';\n\n// prettier-ignore\n/**\n * Send console loggin"
  },
  {
    "path": "src/utils/error.js",
    "chars": 825,
    "preview": "/**\n * An error caused by a falsifiable assumption shown to be false.\n * @memberof Utils\n * @alias InvariantError\n */\nex"
  },
  {
    "path": "src/utils/error.test.js",
    "chars": 698,
    "preview": "import {describe, expect, it} from '@jest/globals';\n\nimport {invariant, InvariantError} from './error';\n\ndescribe('invar"
  },
  {
    "path": "src/utils/index.js",
    "chars": 34,
    "preview": "/// <reference path=\"Types.ts\" />\n"
  },
  {
    "path": "src/utils/mapThruWorker.ts",
    "chars": 641,
    "preview": "import {fromEvent, Observable, Subscription} from 'rxjs';\nimport {map} from 'rxjs/operators';\n\nexport function mapThruWo"
  },
  {
    "path": "src/utils/math.js",
    "chars": 754,
    "preview": "import {invariant} from './error';\n\n/**\n * Clamp a value between two extremes.\n * @param {number} value\n * @param {numbe"
  },
  {
    "path": "src/utils/retry.js",
    "chars": 435,
    "preview": "/**\n * @param {function(): PromiseLike<T> | T} fn\n * @param {Utils.RetryOptions} options\n * @return {Promise<T>}\n * @tem"
  },
  {
    "path": "src/utils/retry.test.js",
    "chars": 1535,
    "preview": "import {describe, expect, it, jest} from '@jest/globals';\n\nimport {retry} from './retry';\n\ndescribe('retry', () => {\n  i"
  },
  {
    "path": "src/utils/rxChrome.ts",
    "chars": 1633,
    "preview": "import {fromEventPattern, Observable} from 'rxjs';\nimport {chrome} from '../chrome';\nimport {ChromeDebuggerAPIEvent} fro"
  },
  {
    "path": "src/utils/rxInterop.ts",
    "chars": 968,
    "preview": "import {Observable} from 'rxjs';\n\nimport {Observer} from './Observer';\nimport {Utils} from './Types';\n\n/**\n * Wrap a `Ut"
  },
  {
    "path": "src/webpack.config.js",
    "chars": 1150,
    "preview": "const {resolve} = require('path');\nconst CopyPlugin = require('copy-webpack-plugin');\n\nmodule.exports = (env, argv) => ("
  },
  {
    "path": "test/.jest-puppeteer.config.json",
    "chars": 197,
    "preview": "{\n  \"launch\": {\n    \"headless\": false,\n    \"devtools\": true,\n    \"args\": [\n      \"--no-sandbox\",\n      \"--disable-extens"
  },
  {
    "path": "test/.jest.config.json",
    "chars": 151,
    "preview": "{\n  \"preset\": \"jest-puppeteer\",\n  \"injectGlobals\": false,\n  \"transform\": {\n    \"\\\\.[jt]sx?$\": \"babel-jest\"\n  },\n  \"testM"
  },
  {
    "path": "test/README.md",
    "chars": 34,
    "preview": "A directory of integration tests.\n"
  },
  {
    "path": "test/browserLaunch.js",
    "chars": 293,
    "preview": "/* global browser */\n\nimport {it} from '@jest/globals';\n\nit('browser launches with extension', async () => {\n  const bro"
  },
  {
    "path": "test/updateGraphRender.js",
    "chars": 543,
    "preview": "import {resolve} from 'path';\n\nimport {expect, it} from '@jest/globals';\nimport {from, fromEvent, lastValueFrom, takeUnt"
  },
  {
    "path": "tsconfig.json",
    "chars": 207,
    "preview": "{\n  \"compilerOptions\": {\n    \"allowJs\": true,\n    \"target\": \"es2020\",\n    \"moduleResolution\": \"node\",\n    \"allowSyntheti"
  }
]

About this extraction

This page contains the full source code of the google/audion GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 101 files (271.5 KB), approximately 71.7k tokens, and a symbol index with 335 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!