[
  {
    "path": ".babelrc",
    "content": "{\n  \"plugins\": [[\n    \"@babel/plugin-transform-modules-commonjs\"\n  ], [\n    \"@babel/plugin-proposal-optional-chaining\"\n  ]],\n  \"presets\": [\"@babel/preset-typescript\"]\n}\n"
  },
  {
    "path": ".editorconfig",
    "content": "# EditorConfig is awesome: https://EditorConfig.org\n\n# top-most EditorConfig file\nroot = true\n\n[*]\nindent_style = space\nindent_size = 2\nend_of_line = lf\ncharset = utf-8\ntrim_trailing_whitespace = true\ninsert_final_newline = false\n"
  },
  {
    "path": ".eslintrc.json",
    "content": "{\n  \"env\": {\n    \"browser\": true,\n    \"es2021\": true,\n    \"node\": true\n  },\n  \"extends\": [\"eslint:recommended\", \"google\"],\n  \"parserOptions\": {\n    \"ecmaVersion\": 12,\n    \"sourceType\": \"module\"\n  },\n  \"rules\": {\n    // Indent files with prettier\n    \"indent\": [\"off\"],\n    // Allow triple slash comments\n    \"spaced-comment\": [\"error\", \"always\", {\"markers\": [\"/\"]}],\n    \"operator-linebreak\": [\"off\"]\n  }\n}\n"
  },
  {
    "path": ".github/workflows/nodejs-ci.yml",
    "content": "name: Node.js CI\n\non: [push, pull_request]\n\njobs:\n  build:\n    runs-on: ubuntu-latest\n\n    steps:\n      - uses: actions/checkout@v3\n      - name: Use Node.js \"18.x\"\n        uses: actions/setup-node@v3\n        with:\n          node-version: '18.x'\n      - run: npm ci\n      - name: Run npm test with xvfb\n        uses: coactions/setup-xvfb@v1\n        with:\n          run: npm test\n"
  },
  {
    "path": ".gitignore",
    "content": ".DS_Store\n# dependencies\nnode_modules\n# build/test\n.eslintcache\ndocs\ncoverage\n/build\n/simulations/build\n!src/build\n"
  },
  {
    "path": ".husky/.gitignore",
    "content": "_\n"
  },
  {
    "path": ".husky/pre-commit",
    "content": "#!/bin/sh\n. \"$(dirname \"$0\")/_/husky.sh\"\n\nnpx lint-staged\n"
  },
  {
    "path": ".jsdoc.json",
    "content": "{\n  \"source\": {\n    \"include\": [\"./src/\"],\n    \"includePattern\": \".+\\\\.js(doc)?$\",\n    \"excludePattern\": \"(^|\\\\/|\\\\\\\\)_|\\\\.test\\\\.js$\"\n  },\n  \"opts\": {\n    \"encoding\": \"utf8\",\n    \"recurse\": true,\n    \"private\": false,\n    \"lenient\": true,\n    \"destination\": \"./docs\",\n    \"template\": \"./node_modules/@pixi/jsdoc-template\",\n    \"readme\": \"README.md\"\n  },\n  \"plugins\": [\"plugins/markdown\"]\n}\n"
  },
  {
    "path": ".prettierrc",
    "content": "{\n  \"tabWidth\": 2,\n  \"useTabs\": false,\n  \"trailingComma\": \"all\",\n  \"singleQuote\": true,\n  \"bracketSpacing\": false\n}\n"
  },
  {
    "path": "LICENSE",
    "content": "\n                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2021 Google, Inc.\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "README.md",
    "content": "# Audion: Web Audio Graph Visualizer\n\n[![Node.js CI](https://github.com/GoogleChrome/audion/actions/workflows/nodejs-ci.yml/badge.svg)](https://github.com/GoogleChrome/audion/actions/workflows/nodejs-ci.yml)\n\nAudion is a Chrome extension that adds a panel to DevTools. This panel\nvisualizes the audio graph (programmed with Web Audio API) in real-time.\nSoon you will be able to install the extension from Chrome Web Store page.\n\n![Google Doodle Hiphop](https://raw.githubusercontent.com/GoogleChrome/audion/main/images/hiphop-doodle.png)\n\n## Usage\n\n1. [Install the extension](https://chrome.google.com/webstore/detail/audion/cmhomipkklckpomafalojobppmmidlgl)\n   from Chrome Web Store.\n   1. Alternatively, you can clone this repository and build the extension\n      locally. Follow\n      [this instruction](https://developer.chrome.com/docs/extensions/mv3/faq/#faq-dev-01)\n      to load the local build.\n1. [Open Chrome Developer Tools](https://developer.chrome.com/docs/devtools/open/).\n   You should be able to find “Web Audio” panel in the top. Select the panel.\n1. Visit or reload a page that uses Web Audio API. If the page is loaded before\n   opening Developer Tools, you need to reload the page for the extension to\n   work correctly.\n1. You can pan and zoom with the mouse and wheel. Click the “autofit” button to\n   fit the graph within the panel.\n\n## Development\n\n### Build and test the extension\n\n1. Install NodeJS 14 or later.\n1. Install dependencies with `npm ci` or `npm install`.\n1. Run `npm test` to build and test the extension.\n\n#### Install the development copy of the extension\n\n1. Open `chrome://extensions` in Chrome.\n1. Turn on `Developer mode` if it is not already active.\n1. Load an unpacked extension with the `Load unpacked` button. In the file\n   modal that opens, select the `audion` directory inside of the `build`\n   directory under the copy of this repository.\n\n#### Use and make changes to the extension\n\n1. Open the added `Web Audio` panel in an inspector window with a page that\n   uses Web Audio API.\n1. Make changes to the extension and rebuild with `npm test` or `npm run build`.\n1. Open `chrome://extensions`, click `Update` to reload the rebuilt extension.\n   Close and reopen any tab and inspector to get the rebuilt extension's panel.\n\n### Use extra debugging information\n\n1. Open the extension option panel and check \"Click here to show more debug\n   info\".\n2. Right click the visualizer panel and click \"Inspect\" to the extension's\n   DevTools panel, and see the console for the extra debugging information.\n\n## Acknowledgments\n\nSpecial thanks to [Chi Zeng](https://github.com/chihuahua) (Google),\n[Gaoping Huang](https://github.com/gaopinghuang0),\n[Michael \"Z\" Goddard](https://github.com/mzgoddard)\n([Bocoup](https://bocoup.com/)) and\n[Tenghui Zhang](https://github.com/TenghuiZhang) for their contribution on this\nproject.\n\n## Contribution\n\nIf you have found an error in this library, please file an issue at:\nhttps://github.com/GoogleChrome/audion/issues.\n\nPatches are encouraged, and may be submitted by forking this project and\nsubmitting a pull request through GitHub. See CONTRIBUTING for more detail.\n\n## License\n\nCopyright 2021 Google Inc. All Rights Reserved.\n\nLicensed under the Apache License, Version 2.0 (the \"License\"); you may not use\nthis file except in compliance with the License. You may obtain a copy of the\nLicense at\n\nhttp://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software distributed\nunder the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR\nCONDITIONS OF ANY KIND, either express or implied. See the License for the\nspecific language governing permissions and limitations under the License.\n"
  },
  {
    "path": "fixtures/oscillatorGainParam.ts",
    "content": "/**\n * Event sequences that would be produced by an audio context with oscillator\n * and gain nodes connecting outputs to params.\n *\n * @file\n */\n\nimport {WebAudioDebuggerEvent} from '../src/chrome/DebuggerWebAudioDomain';\nimport {Audion} from '../src/devtools/Types';\n\n/**\n * A sequence of events produced by WebAudioEventObservable from a context\n * connect some oscillator and gain nodes, especially connecting an output to\n * another gain node's gain param.\n *\n * @example\n *   // unit and integration tests can replace\n *   new WebAudioEventObservable()\n *   // with something like\n *   from(OSCILLATOR_GAIN_PARAM_EVENTS)\n *   // or something over time such as\n *   interval(50).pipe(map((_, i) =>\n *     OSCILLATOR_GAIN_PARAM_EVENTS[i]))\n *\n * @example\n *   // context that creates this sequence from\n *   // WebAudioEventObservable\n *   const audioContext = new AudioContext();\n *   const delayNode = new DelayNode(audioContext,\n *     {delayTime: delayTime});\n *   const inputNode = new GainNode(audioContext);\n *   const outputNode = new GainNode(audioContext);\n *   const depthNode = new GainNode(audioContext,\n *     {gain: width});\n *   const oscillatorNode = new OscillatorNode(audioContext,\n *     {type: \"sine\", frequency: speed});\n *   inputNode.connect(delayNode);\n *   delayNode.connect(outputNode);\n *   oscillatorNode.connect(depthNode);\n *   depthNode.connect(delayNode.delayTime);\n *\n * @see https://github.com/GoogleChrome/audion/issues/117\n */\nexport const OSCILLATOR_GAIN_PARAM_EVENTS: Audion.WebAudioEvent[] = [\n  {\n    method: WebAudioDebuggerEvent.contextCreated,\n    params: {\n      context: {\n        callbackBufferSize: 256,\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        contextState: 'suspended',\n        contextType: 'realtime',\n        maxOutputChannelCount: 2,\n        sampleRate: 48000,\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioNodeCreated,\n    params: {\n      node: {\n        channelCount: 2,\n        channelCountMode: 'explicit',\n        channelInterpretation: 'speakers',\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        nodeId: '57a4d84b-6165-495e-9ad7-2ad82497d423',\n        nodeType: 'AudioDestination',\n        numberOfInputs: 1,\n        numberOfOutputs: 0,\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioListenerCreated,\n    params: {\n      listener: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        listenerId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 0,\n        maxValue: 3.4028234663852886e38,\n        minValue: -3.4028234663852886e38,\n        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',\n        paramId: '63a77a6c-1779-42df-bedc-c68c5171722f',\n        paramType: 'positionX',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 0,\n        maxValue: 3.4028234663852886e38,\n        minValue: -3.4028234663852886e38,\n        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',\n        paramId: 'e15f2c0e-f466-4d2a-92a2-c3fe23e591f5',\n        paramType: 'positionY',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 0,\n        maxValue: 3.4028234663852886e38,\n        minValue: -3.4028234663852886e38,\n        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',\n        paramId: 'bbabbcc8-91eb-4014-9351-43e1742644e9',\n        paramType: 'positionZ',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 0,\n        maxValue: 3.4028234663852886e38,\n        minValue: -3.4028234663852886e38,\n        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',\n        paramId: '4e3f5c2d-6b59-4a69-ab4f-da62db30e7db',\n        paramType: 'forwardX',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 0,\n        maxValue: 3.4028234663852886e38,\n        minValue: -3.4028234663852886e38,\n        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',\n        paramId: 'd2425aaa-dc91-4e60-ba57-22be7b26f941',\n        paramType: 'forwardY',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: -1,\n        maxValue: 3.4028234663852886e38,\n        minValue: -3.4028234663852886e38,\n        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',\n        paramId: '1842fc18-6b51-402b-97f1-c56d4681866a',\n        paramType: 'forwardZ',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 0,\n        maxValue: 3.4028234663852886e38,\n        minValue: -3.4028234663852886e38,\n        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',\n        paramId: '872a56b9-ed99-47ea-9957-bda9307fac5b',\n        paramType: 'upX',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 1,\n        maxValue: 3.4028234663852886e38,\n        minValue: -3.4028234663852886e38,\n        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',\n        paramId: '4acf61c7-363f-44af-9857-c5e8c8ea5629',\n        paramType: 'upY',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 0,\n        maxValue: 3.4028234663852886e38,\n        minValue: -3.4028234663852886e38,\n        nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28',\n        paramId: '4b818074-5b96-42c3-b2e6-fcdd350e37bb',\n        paramType: 'upZ',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioNodeCreated,\n    params: {\n      node: {\n        channelCount: 2,\n        channelCountMode: 'max',\n        channelInterpretation: 'speakers',\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        nodeId: 'e5bd5ec5-abb8-426a-bad8-65f723970c76',\n        nodeType: 'Delay',\n        numberOfInputs: 1,\n        numberOfOutputs: 1,\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 0,\n        maxValue: 1,\n        minValue: 0,\n        nodeId: 'e5bd5ec5-abb8-426a-bad8-65f723970c76',\n        paramId: 'a88ea483-fc15-4c2b-ab0c-597af8e069b9',\n        paramType: 'delayTime',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioNodeCreated,\n    params: {\n      node: {\n        channelCount: 2,\n        channelCountMode: 'max',\n        channelInterpretation: 'speakers',\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        nodeId: '61b107eb-24ad-4f11-b811-72b2c5e7e79f',\n        nodeType: 'Gain',\n        numberOfInputs: 1,\n        numberOfOutputs: 1,\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 1,\n        maxValue: 3.4028234663852886e38,\n        minValue: -3.4028234663852886e38,\n        nodeId: '61b107eb-24ad-4f11-b811-72b2c5e7e79f',\n        paramId: '03e13b59-a58f-4883-8479-d7a048ebe80a',\n        paramType: 'gain',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioNodeCreated,\n    params: {\n      node: {\n        channelCount: 2,\n        channelCountMode: 'max',\n        channelInterpretation: 'speakers',\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        nodeId: '78b78fae-b32e-4993-a2b4-7523c08e16c0',\n        nodeType: 'Gain',\n        numberOfInputs: 1,\n        numberOfOutputs: 1,\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 1,\n        maxValue: 3.4028234663852886e38,\n        minValue: -3.4028234663852886e38,\n        nodeId: '78b78fae-b32e-4993-a2b4-7523c08e16c0',\n        paramId: 'b6ea1b98-2dda-43d0-8a52-49492fcafdde',\n        paramType: 'gain',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioNodeCreated,\n    params: {\n      node: {\n        channelCount: 2,\n        channelCountMode: 'max',\n        channelInterpretation: 'speakers',\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        nodeId: 'd8ac44f0-f099-40ff-9cf4-949148fca53f',\n        nodeType: 'Gain',\n        numberOfInputs: 1,\n        numberOfOutputs: 1,\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 1,\n        maxValue: 3.4028234663852886e38,\n        minValue: -3.4028234663852886e38,\n        nodeId: 'd8ac44f0-f099-40ff-9cf4-949148fca53f',\n        paramId: '38ec329f-650c-4c35-805c-32c559b47ea7',\n        paramType: 'gain',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioNodeCreated,\n    params: {\n      node: {\n        channelCount: 2,\n        channelCountMode: 'max',\n        channelInterpretation: 'speakers',\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        nodeId: '59200b98-60e1-43cf-88f6-d0a33d5643cf',\n        nodeType: 'Oscillator',\n        numberOfInputs: 0,\n        numberOfOutputs: 1,\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 0,\n        maxValue: 153600,\n        minValue: -153600,\n        nodeId: '59200b98-60e1-43cf-88f6-d0a33d5643cf',\n        paramId: '0b2b73d2-bc98-423b-a19c-1a0651e06d20',\n        paramType: 'detune',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.audioParamCreated,\n    params: {\n      param: {\n        contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n        defaultValue: 440,\n        maxValue: 24000,\n        minValue: -24000,\n        nodeId: '59200b98-60e1-43cf-88f6-d0a33d5643cf',\n        paramId: '42dddc62-c058-473e-9f48-a678a708c001',\n        paramType: 'frequency',\n        rate: 'a-rate',\n      },\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.nodesConnected,\n    params: {\n      contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n      destinationId: 'e5bd5ec5-abb8-426a-bad8-65f723970c76',\n      destinationInputIndex: 0,\n      sourceId: '61b107eb-24ad-4f11-b811-72b2c5e7e79f',\n      sourceOutputIndex: 0,\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.nodesConnected,\n    params: {\n      contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n      destinationId: '78b78fae-b32e-4993-a2b4-7523c08e16c0',\n      destinationInputIndex: 0,\n      sourceId: 'e5bd5ec5-abb8-426a-bad8-65f723970c76',\n      sourceOutputIndex: 0,\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.nodesConnected,\n    params: {\n      contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n      destinationId: 'd8ac44f0-f099-40ff-9cf4-949148fca53f',\n      destinationInputIndex: 0,\n      sourceId: '59200b98-60e1-43cf-88f6-d0a33d5643cf',\n      sourceOutputIndex: 0,\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.nodeParamConnected,\n    params: {\n      contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62',\n      destinationId: 'a88ea483-fc15-4c2b-ab0c-597af8e069b9',\n      sourceId: 'd8ac44f0-f099-40ff-9cf4-949148fca53f',\n      sourceOutputIndex: 0,\n    },\n  },\n  {\n    method: WebAudioDebuggerEvent.contextWillBeDestroyed,\n    params: {contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62'},\n  },\n];\n"
  },
  {
    "path": "package.json",
    "content": "{\n  \"name\": \"audion\",\n  \"private\": true,\n  \"version\": \"3.0.9\",\n  \"description\": \"A Chrome DevTools extension traces Web Audio API calls and visualizes in the DevTools.\",\n  \"repository\": {\n    \"type\": \"git\",\n    \"url\": \"git+https://github.com/GoogleChrome/audion.git\"\n  },\n  \"keywords\": [],\n  \"author\": \"\",\n  \"license\": \"Apache-2.0\",\n  \"bugs\": {\n    \"url\": \"https://github.com/GoogleChrome/audion/issues\"\n  },\n  \"homepage\": \"https://github.com/GoogleChrome/audion#readme\",\n  \"main\": \"index.js\",\n  \"engines\": {\n    \"node\": \"18\"\n  },\n  \"dependencies\": {\n    \"@pixi/unsafe-eval\": \"^7.2.4\",\n    \"dagre\": \"^0.8.5\",\n    \"pixi.js\": \"^7.2.4\",\n    \"rxjs\": \"^7.8.1\",\n    \"taffydb\": \"^2.7.3\"\n  },\n  \"devDependencies\": {\n    \"@babel/core\": \"^7.14.6\",\n    \"@babel/plugin-proposal-optional-chaining\": \"^7.16.7\",\n    \"@babel/plugin-transform-modules-commonjs\": \"^7.14.5\",\n    \"@babel/preset-typescript\": \"^7.16.7\",\n    \"@pixi/jsdoc-template\": \"^2.6.0\",\n    \"@types/dagre\": \"^0.7.46\",\n    \"@types/graphlib\": \"^2.1.8\",\n    \"babel-jest\": \"^29.5.0\",\n    \"copy-webpack-plugin\": \"^11.0.0\",\n    \"css-loader\": \"^6.6.0\",\n    \"devtools-protocol\": \"^0.0.924232\",\n    \"eslint\": \"^8.40.0\",\n    \"eslint-config-google\": \"^0.14.0\",\n    \"file-loader\": \"^6.2.0\",\n    \"husky\": \">=6\",\n    \"jest\": \"^27.0.6\",\n    \"jest-puppeteer\": \"^5.0.4\",\n    \"jsdoc\": \"^4.0.2\",\n    \"lint-staged\": \">=10\",\n    \"mustache\": \"^4.2.0\",\n    \"pinst\": \">=2\",\n    \"prettier\": \"^2.3.2\",\n    \"puppeteer\": \"^9.1.1\",\n    \"raw-loader\": \"^4.0.2\",\n    \"rimraf\": \"^3.0.2\",\n    \"source-map-loader\": \"^3.0.0\",\n    \"style-loader\": \"^3.2.1\",\n    \"ts-loader\": \"^9.2.6\",\n    \"typescript\": \"^4.4.3\",\n    \"webpack\": \"^5.44.0\",\n    \"webpack-cli\": \"^4.7.2\",\n    \"yazl\": \"^2.5.1\"\n  },\n  \"scripts\": {\n    \"build:chrome-extension\": \"node src/build/make-chrome-extension.js\",\n    \"build:clean\": \"rimraf build\",\n    \"build:webpack\": \"webpack --mode production --config src/webpack.config.js\",\n    \"build\": \"npm run build:clean && npm run build:webpack && npm run build:chrome-extension\",\n    \"clean\": \"rimraf build docs src/coverage simulations/build\",\n    \"dev\": \"webpack --mode development --config src/webpack.config.js && npm run build:chrome-extension\",\n    \"postinstall\": \"husky install\",\n    \"postpublish\": \"pinst --enable\",\n    \"prepublishOnly\": \"pinst --disable\",\n    \"test:integration:build\": \"npm run test:integration:clean && npm run test:integration:webpack\",\n    \"test:integration:clean\": \"rimraf simulations/build\",\n    \"test:integration:webpack\": \"webpack --mode development --config simulations/webpack.config.js\",\n    \"test:integration:run\": \"JEST_PUPPETEER_CONFIG=test/.jest-puppeteer.config.json jest --config test/.jest.config.json\",\n    \"test:integration\": \"npm run build && npm run test:integration:build && npm run test:integration:run\",\n    \"test:jsdoc\": \"jsdoc -c .jsdoc.json\",\n    \"test:lint:eslint\": \"eslint src/**/*.js\",\n    \"test:lint:prettier\": \"prettier --check src/**/*.{js,ts}\",\n    \"test:lint\": \"npm run test:lint:eslint && npm run test:lint:prettier\",\n    \"test:unit\": \"jest --config src/.jest.config.json\",\n    \"test\": \"npm run test:lint && npm run test:jsdoc && npm run test:unit && npm run test:integration\"\n  },\n  \"lint-staged\": {\n    \"*.{js}\": \"eslint --cache --fix\",\n    \"*.{js,ts,json,css,md}\": \"prettier --write\"\n  }\n}\n"
  },
  {
    "path": "simulations/updateGraphRender.html",
    "content": "<div class=\"graph\" style=\"height: 100%\"></div>\n<script src=\"./build/updateGraphRender.js\"></script>\n"
  },
  {
    "path": "simulations/updateGraphRender.ts",
    "content": "import {\n  auditTime,\n  EMPTY,\n  filter,\n  finalize,\n  from,\n  interval,\n  map,\n  pipe,\n  switchMap,\n  take,\n} from 'rxjs';\n\nimport {layoutGraphContext} from '../src/devtools/layoutGraphContext';\nimport {deserializeGraphContext} from '../src/devtools/deserializeGraphContext';\nimport {serializeGraphContext} from '../src/devtools/serializeGraphContext';\nimport {WebAudioRealtimeData} from '../src/devtools/WebAudioRealtimeData';\nimport {integrateWebAudioGraph} from '../src/devtools/WebAudioGraphIntegrator';\n\nimport {updateGraphRender} from '../src/panel/updateGraphRender';\nimport {AudioGraphRender} from '../src/panel/graph/AudioGraphRender';\n\nimport {OSCILLATOR_GAIN_PARAM_EVENTS} from '../fixtures/oscillatorGainParam';\nimport {updateGraphSizes} from '../src/panel/updateGraphSizes';\n\nfunction main() {\n  const graphContainer = document.querySelector('.graph') as HTMLElement;\n  const graphRender = new AudioGraphRender({\n    elementContainer: graphContainer,\n  });\n  graphRender.init();\n  graphContainer.appendChild(graphRender.pixiView);\n\n  const simulation = () =>\n    pipe(\n      integrateWebAudioGraph({\n        pollContext() {\n          return EMPTY;\n        },\n      } as unknown as WebAudioRealtimeData),\n      auditTime(1),\n      map(serializeGraphContext),\n      filter((graphContext) => graphContext.graph !== null),\n      map(updateGraphSizes(graphRender)),\n      map(deserializeGraphContext),\n      map(layoutGraphContext),\n      map(serializeGraphContext),\n      map(updateGraphRender(graphRender)),\n    );\n\n  interval(50)\n    .pipe(\n      take(OSCILLATOR_GAIN_PARAM_EVENTS.length),\n      switchMap((_, i) =>\n        from(\n          OSCILLATOR_GAIN_PARAM_EVENTS.slice(-1).concat(\n            OSCILLATOR_GAIN_PARAM_EVENTS.slice(\n              0,\n              i % (OSCILLATOR_GAIN_PARAM_EVENTS.length - 1),\n            ),\n            OSCILLATOR_GAIN_PARAM_EVENTS.slice(\n              (i + 1) % (OSCILLATOR_GAIN_PARAM_EVENTS.length - 1),\n              OSCILLATOR_GAIN_PARAM_EVENTS.length - 1,\n            ),\n          ),\n        ),\n      ),\n      simulation(),\n      finalize(() => graphContainer.classList.add('complete')),\n    )\n    .subscribe();\n}\n\nmain();\n"
  },
  {
    "path": "simulations/webpack.config.js",
    "content": "const {resolve} = require('path');\n\nconst srcConfig = require('../src/webpack.config');\n\nmodule.exports = (env, argv) => ({\n  ...srcConfig(env, argv),\n  entry: {\n    updateGraphRender: resolve(__dirname, './updateGraphRender'),\n  },\n  output: {\n    path: resolve(__dirname, './build'),\n  },\n});\n"
  },
  {
    "path": "src/.jest.config.json",
    "content": "{\n  \"collectCoverage\": true,\n  \"injectGlobals\": false,\n  \"transform\": {\n    \"\\\\.[jt]sx?$\": \"babel-jest\"\n  },\n  \"coveragePathIgnorePatterns\": [\"<rootDir>/chrome/\"]\n}\n"
  },
  {
    "path": "src/build/make-chrome-extension.js",
    "content": "/**\n * A nodejs script that copies files, writes a extension manifest, and zips it\n * all up.\n *\n * @namespace makeChromeExtension\n */\n\nconst fs = require('fs').promises;\nconst {createWriteStream} = require('fs');\nconst path = require('path');\n\nconst mustache = require('mustache');\nconst {ZipFile} = require('yazl');\n\nmain();\n\n/**\n * Copy files, generate extension manifest, and zip the unpacked extension.\n *\n * Calls other methods in this script.\n *\n * @memberof makeChromeExtension\n */\nasync function main() {\n  await Promise.all([\n    copyFiles({\n      src: '..',\n      dest: '../../build/audion',\n      files: ['panel.html', 'devtools.html'],\n    }),\n    generateManifest({\n      view: {version: require('../../package.json').version},\n      dest: '../../build/audion',\n    }),\n  ]);\n  await zipChromeExtension({\n    src: '../../build',\n    dir: 'audion',\n  });\n}\n\n/**\n * Copy file paths from a src directory to a dest directory.\n *\n * @param {object} options\n * @memberof makeChromeExtension\n */\nasync function copyFiles({src, dest, files, cwd = __dirname}) {\n  await Promise.all(\n    files.map(async (file) => {\n      await mkdir(path.resolve(cwd, dest, path.dirname(file)));\n      await fs.copyFile(\n        path.resolve(cwd, src, file),\n        path.resolve(cwd, dest, file),\n      );\n    }),\n  );\n}\n\n/**\n * Generate a extension manifest from a template file.\n *\n * @param {object} options\n * @memberof makeChromeExtension\n */\nasync function generateManifest({\n  view,\n  dest,\n  file = 'manifest.json',\n  cwd = __dirname,\n}) {\n  await mkdir(path.resolve(cwd, dest, path.dirname(file)));\n  await fs.writeFile(\n    path.resolve(cwd, dest, file),\n    mustache.render(\n      await fs.readFile(\n        path.resolve(__dirname, 'manifest.json.mustache'),\n        'utf8',\n      ),\n      view,\n    ),\n  );\n}\n\n/**\n * Zip the unpacked chrome extension.\n *\n * @param {object} options\n * @memberof makeChromeExtension\n */\nasync function zipChromeExtension({\n  src,\n  cwd = __dirname,\n  dir,\n  file = `${dir}.zip`,\n}) {\n  await unlink(path.resolve(cwd, src, file));\n  const files = await readdirRecursive(path.resolve(cwd, src, dir));\n\n  const output = createWriteStream(path.resolve(cwd, src, file));\n  const zip = new ZipFile();\n  const zipDone = new Promise((resolve, reject) =>\n    zip.outputStream.pipe(output).on('close', resolve).on('error', reject),\n  );\n  for (const file of files) {\n    zip.addFile(path.resolve(cwd, src, dir, file), file);\n  }\n  zip.end();\n\n  await zipDone;\n}\n\n/**\n * Read entry names in a directory recursively.\n * @param {string} dir directory to recursively read\n * @return {Promise<Array<string>>} array of paths relative to `dir`\n * @memberof makeChromeExtension\n */\nasync function readdirRecursive(dir) {\n  return (\n    await Promise.all(\n      (\n        await fs.readdir(dir)\n      ).map(async (file) => {\n        try {\n          return (await readdirRecursive(path.resolve(dir, file))).map(\n            (subfile) => path.join(file, subfile),\n          );\n        } catch (err) {\n          if (err.code === 'ENOTDIR') {\n            return file;\n          }\n          throw err;\n        }\n      }),\n    )\n  ).flat();\n}\n\n/**\n * Create a directory if it does not already exist.\n *\n * @param {string} dirpath directory to create\n * @memberof makeChromeExtension\n */\nasync function mkdir(dirpath) {\n  try {\n    await fs.mkdir(dirpath, {recursive: true});\n  } catch (err) {\n    if (err.code === 'EEXIST') {\n      return;\n    }\n    throw err;\n  }\n}\n\n/**\n * Unlink a file from the filesystem if it exists.\n *\n * @param {string} filepath file to unlink\n * @memberof makeChromeExtension\n */\nasync function unlink(filepath) {\n  try {\n    await fs.unlink(filepath);\n  } catch (err) {\n    if (err.code === 'ENOENT') {\n      return;\n    }\n    throw err;\n  }\n}\n"
  },
  {
    "path": "src/build/manifest.json.mustache",
    "content": "{\n  \"manifest_version\": 3,\n  \"name\": \"Audion\",\n  \"version\": \"{{version}}\",\n  \"description\": \"Web Audio DevTools Extension (graph visualizer)\",\n  \"devtools_page\": \"devtools.html\",\n  \"options_ui\": {\n      \"page\": \"options.html\",\n      \"open_in_tab\": false\n  },\n  \"permissions\": [\n    \"debugger\"\n  ]\n}\n"
  },
  {
    "path": "src/chrome/API.js",
    "content": "/// <reference path=\"./Debugger.js\" />\n/// <reference path=\"./DevTools.js\" />\n/// <reference path=\"./Runtime.js\" />\n\n/**\n * Top level chrome extension API type. Contains references of each accessible\n * extension api.\n *\n * @typedef Chrome.API\n * @property {Chrome.Debugger} debugger\n * @property {Chrome.DevTools} devtools\n * @property {Chrome.Runtime} runtime\n */\n"
  },
  {
    "path": "src/chrome/Debugger.js",
    "content": "/// <reference path=\"Types.js\" />\n\n/**\n * [Chrome extension api][1] to the [Chrome Debugger Protocol][2]. Used by this\n * extension to access the [Web Audio domain][3].\n *\n * [1]: https://developer.chrome.com/docs/extensions/reference/debugger/\n * [2]: https://chromedevtools.github.io/devtools-protocol/\n * [3]: ChromeDebuggerWebAudioDomain.html\n *\n * @typedef Chrome.Debugger\n * @property {function(\n *   Chrome.DebuggerDebuggee, string, function(): void\n * ): void} attach\n * @property {function(Chrome.DebuggerDebuggee, function(): void): void} detach\n * @property {Chrome.Event<function(object, string): void>} onDetach\n * @property {Chrome.Event<Chrome.DebuggerOnEventListener>} onEvent\n * @property {Chrome.DebuggerSendCommand} sendCommand\n * @see https://developer.chrome.com/docs/extensions/reference/debugger/\n * @see https://chromedevtools.github.io/devtools-protocol/\n */\n\n/**\n * @callback Chrome.DebuggerSendCommand\n * @param {Chrome.DebuggerDebuggee} target\n * @param {string} method\n * @param {*} [commandParams]\n * @param {*} [callback]\n */\n\n/**\n * A debuggee identifier.\n *\n * Either tabId or extensionId must be specified.\n *\n * @typedef Chrome.DebuggerDebuggee\n * @property {string} [extensionId]\n * @property {string} [tabId]\n * @property {string} [targetId]\n * @see https://developer.chrome.com/docs/extensions/reference/debugger/#type-Debuggee\n */\n\n/**\n * Arguments passed to Debugger onEvent listeners.\n *\n * @callback Chrome.DebuggerOnEventListener\n * @param {Chrome.DebuggerDebuggee} source\n * @param {string} method\n * @param {*} [params]\n * @return {void}\n */\n"
  },
  {
    "path": "src/chrome/DebuggerPageDomain.ts",
    "content": "/**\n * @file\n * Strings passed to `chrome.debugger.sendCommand` and received from\n * `chrome.debugger.onEvent` callbacks.\n */\n\nimport {ProtocolMapping} from 'devtools-protocol/types/protocol-mapping';\n\n/** @see https://chromedevtools.github.io/devtools-protocol/tot/Page/#methods */\nexport enum PageDebuggerMethod {\n  disable = 'Page.disable',\n  enable = 'Page.enable',\n}\n\n/** @see https://chromedevtools.github.io/devtools-protocol/tot/Page/#events */\nexport enum PageDebuggerEvent {\n  domContentEventFired = 'Page.domContentEventFired',\n  frameAttached = 'Page.frameAttached',\n  frameDetached = 'Page.frameDetached',\n  frameNavigated = 'Page.frameNavigated',\n  frameRequestedNavigation = 'Page.frameRequestedNavigation',\n  frameStartedLoading = 'Page.frameStartedLoading',\n  frameStoppedLoading = 'Page.frameStoppedLoading',\n  lifecycleEvent = 'Page.lifecycleEvent',\n  loadEventFired = 'Page.loadEventFired',\n}\n\n/** @see https://chromedevtools.github.io/devtools-protocol/tot/Page/#types */\nexport type PageDebuggerEventParams<Name extends PageDebuggerEvent> =\n  ProtocolMapping.Events[Name];\n"
  },
  {
    "path": "src/chrome/DebuggerWebAudioDomain.ts",
    "content": "/**\n * @file\n * Strings passed to `chrome.debugger.sendCommand` and received from\n * `chrome.debugger.onEvent` callbacks.\n */\n\nimport {ProtocolMapping} from 'devtools-protocol/types/protocol-mapping';\n\n/** @see https://chromedevtools.github.io/devtools-protocol/tot/WebAudio/#methods */\nexport enum WebAudioDebuggerMethod {\n  disable = 'WebAudio.disable',\n  enable = 'WebAudio.enable',\n  getRealtimeData = 'WebAudio.getRealtimeData',\n}\n\n/** @see https://chromedevtools.github.io/devtools-protocol/tot/WebAudio/#events */\nexport enum WebAudioDebuggerEvent {\n  audioListenerCreated = 'WebAudio.audioListenerCreated',\n  audioListenerWillBeDestroyed = 'WebAudio.audioListenerWillBeDestroyed',\n  audioNodeCreated = 'WebAudio.audioNodeCreated',\n  audioNodeWillBeDestroyed = 'WebAudio.audioNodeWillBeDestroyed',\n  audioParamCreated = 'WebAudio.audioParamCreated',\n  audioParamWillBeDestroyed = 'WebAudio.audioParamWillBeDestroyed',\n  contextChanged = 'WebAudio.contextChanged',\n  contextCreated = 'WebAudio.contextCreated',\n  contextWillBeDestroyed = 'WebAudio.contextWillBeDestroyed',\n  nodeParamConnected = 'WebAudio.nodeParamConnected',\n  nodeParamDisconnected = 'WebAudio.nodeParamDisconnected',\n  nodesConnected = 'WebAudio.nodesConnected',\n  nodesDisconnected = 'WebAudio.nodesDisconnected',\n}\n\n/** @see https://chromedevtools.github.io/devtools-protocol/tot/WebAudio/#types */\nexport type WebAudioDebuggerEventParams<Name extends WebAudioDebuggerEvent> =\n  ProtocolMapping.Events[Name];\n"
  },
  {
    "path": "src/chrome/DevTools.js",
    "content": "/// <reference path=\"Types.js\" />\n\n/**\n * [Chrome extension api][1] to devtool inspector available to a extension's\n * devtools page specified by the extension manifest's `\"devtools_page\"`.\n *\n * [1]: https://developer.chrome.com/docs/extensions/mv3/devtools/\n *\n * @typedef Chrome.DevTools\n * @property {Chrome.DevToolsInspectedWindow} inspectedWindow\n * @property {Chrome.DevtoolsNetwork} network\n * @property {Chrome.DevToolsPanels} panels\n */\n\n/**\n * [Extension api][1] for the tab inspected by this `\"devtools_page\"` instance.\n *\n * [1]: https://developer.chrome.com/docs/extensions/reference/devtools_inspectedWindow/\n *\n * @typedef Chrome.DevToolsInspectedWindow\n * @property {string} tabId\n */\n\n/**\n * @typedef Chrome.DevtoolsNetwork\n * @property {Chrome.Event<function(string): void>} onNavigated\n */\n\n/**\n * [Extension api][1] to manage panels this extension adds.\n *\n * [1]: https://developer.chrome.com/docs/extensions/reference/devtools_panels/\n *\n * @typedef Chrome.DevToolsPanels\n * @property {Chrome.DevToolsPanelsCreateFunction} create\n * @property {'default' | 'dark'} themeName\n */\n\n/**\n * [`chrome.devtools.panels.create(...)`][1]\n *\n * [1]: https://developer.chrome.com/docs/extensions/reference/devtools_panels/#method-create\n *\n * @callback Chrome.DevToolsPanelsCreateFunction\n * @param {string} title\n * @param {string} icon\n * @param {string} pageUrl\n * @param {Chrome.DevToolsPanelsCreateCallback} onPanelCreated\n * @return {void}\n */\n\n/**\n * @callback Chrome.DevToolsPanelsCreateCallback\n * @param {Chrome.DevToolsPanel} panel\n * @return {void}\n */\n\n/**\n * [Panel][1] created by [`chrome.devtools.panels.create`][2].\n *\n * [1]: https://developer.chrome.com/docs/extensions/reference/devtools_panels/#type-ExtensionPanel\n * [2]: Chrome.html#.DevToolsPanelsCreateFunction\n *\n * @typedef Chrome.DevToolsPanel\n * @property {Chrome.Event<function(): void>} onHidden\n * @property {Chrome.Event<function(): void>} onShown\n */\n"
  },
  {
    "path": "src/chrome/Runtime.js",
    "content": "/// <reference path=\"Types.js\" />\n\n/**\n * [Chrome extension api][1] about the extension the host platform and\n * communication betwen different extension contexts.\n *\n * [1]: https://developer.chrome.com/docs/extensions/reference/runtime/\n *\n * @typedef Chrome.Runtime\n * @property {function(): Chrome.RuntimePort} connect\n * @property {function(string): string} getURL\n * @property {Chrome.RuntimeError} lastError\n * @property {Chrome.Event<Chrome.RuntimeOnConnectCallback>} onConnect\n */\n\n/**\n * @typedef Chrome.RuntimeError\n * @property {string} [message]\n * @see https://developer.chrome.com/docs/extensions/reference/runtime/#property-lastError\n */\n\n/**\n * Callback passed to [`chrome.runtime.onConnect`][1].\n *\n * [1]: https://developer.chrome.com/docs/extensions/reference/runtime/#event-onConnect\n *\n * @callback Chrome.RuntimeOnConnectCallback\n * @param {Chrome.RuntimePort} port\n * @return {void}\n */\n\n/**\n * [Port][1] to another chrome extension runtime context.\n *\n * [1]: https://developer.chrome.com/docs/extensions/reference/runtime/#type-Port\n *\n * @typedef Chrome.RuntimePort\n * @property {function(): void} disconnect\n * @property {Chrome.Event<function(Chrome.RuntimePort): void>} onDisconnect\n * @property {Chrome.Event<function(*, Chrome.RuntimePort): void>} onMessage\n * @property {function(*): void} postMessage\n */\n"
  },
  {
    "path": "src/chrome/Types.js",
    "content": "/**\n * Types provided by the [chrome extension api][1].\n *\n * [1]: https://developer.chrome.com/docs/extensions/reference/\n *\n * @namespace Chrome\n */\n\n/**\n * Generic [event emitter][1] in chrome extension types.\n *\n * [1]: https://developer.chrome.com/docs/extensions/reference/events/#type-Event\n *\n * @typedef Chrome.Event\n * @property {Chrome.EventCallback<T>} addListener\n * @property {Chrome.EventCallback<T>} removeListener\n * @template {function} T\n */\n\n/**\n * Function taking an event listener passed to a {@link Chrome.Event} instance.\n *\n * @callback Chrome.EventCallback\n * @param {T} callback\n * @template {function} T\n */\n"
  },
  {
    "path": "src/chrome/index.js",
    "content": "/// <reference path=\"API.js\" />\n/// <reference path=\"Types.js\" />\n\n/**\n * Global chrome extension api instance.\n *\n * Normally available on the global context `chrome` identifier. Use this export\n * to assist in testing use of the chrome extension api from inside this\n * extension.\n *\n * @type {Chrome.API}\n * @memberof Chrome\n * @alias chrome\n */\nexport const chrome = getChrome();\n\n/**\n * Return a no-operation implementation of Chrome.API. Used in testing.\n *\n * @return {Chrome.API}\n * @memberof Chrome\n */\nfunction noopChrome() {\n  /**\n   * @return {Chrome.Event<*>}\n   */\n  function noopEvent() {\n    return {addListener() {}, removeListener() {}};\n  }\n  return {\n    debugger: {\n      attach() {},\n      detach() {},\n      onDetach: noopEvent(),\n      onEvent: noopEvent(),\n      sendCommand() {},\n    },\n    devtools: {\n      inspectedWindow: {tabId: 'tab'},\n      network: {onNavigated: noopEvent()},\n      panels: {create() {}},\n    },\n    runtime: {\n      connect() {\n        return {\n          onDisconnect: noopEvent(),\n          onMessage: noopEvent(),\n          disconnect() {},\n          postMessage(message) {},\n        };\n      },\n      getURL(url) {\n        return url;\n      },\n      /**\n       * If a called chrome api method errored, lastError is set to that error\n       * while the provided callback is run. Otherwise lastError is not set.\n       */\n      lastError: undefined,\n      onConnect: noopEvent(),\n    },\n  };\n}\n\n/**\n * Return the global scope.\n *\n * @return {*}\n * @memberof Chrome\n */\nfunction getGlobal() {\n  if (typeof window === 'object') return window;\n  if (typeof self === 'object') return self;\n  if (typeof globalThis === 'object') return globalThis;\n  if (typeof global === 'object') return global;\n  if (typeof process === 'object') return process;\n  throw new Error('Cannot find global object');\n}\n\n/**\n * Return a {@link Chrome.API} instance. Return a copy from\n * {@link Chrome.noopChrome} if running under a unit test environment.\n *\n * @return {Chrome.API}\n * @memberof Chrome\n */\nfunction getChrome() {\n  const g = getGlobal();\n  if (\n    'chrome' in g &&\n    typeof g.chrome === 'object' &&\n    typeof g.chrome.devtools === 'object'\n  ) {\n    return g.chrome;\n  }\n  return noopChrome();\n}\n"
  },
  {
    "path": "src/custom.d.ts",
    "content": "declare module '*.svg' {\n  const content: any;\n  export default content;\n}\n\ndeclare module '*.css' {\n  const content: any;\n  export default content;\n}\n"
  },
  {
    "path": "src/devtools/DebuggerAttachEventController.ts",
    "content": "import {\n  BehaviorSubject,\n  combineLatest,\n  concat,\n  defer,\n  EMPTY,\n  Observable,\n  of,\n  Subject,\n  Subscriber,\n} from 'rxjs';\nimport {\n  catchError,\n  delay,\n  distinctUntilChanged,\n  exhaustMap,\n  filter,\n  finalize,\n  map,\n  share,\n  take,\n} from 'rxjs/operators';\n\nimport {chrome} from '../chrome';\nimport {PageDebuggerMethod} from '../chrome/DebuggerPageDomain';\nimport {WebAudioDebuggerMethod} from '../chrome/DebuggerWebAudioDomain';\n\n/**\n * Permission value in regards to calling `chrome.debugger.attach`.\n *\n * When the extension calls `chrome.debugger.attach` a notification will display\n * in devtools that the extension is debugging the tab. Attaching when the user\n * does not expect it and then see this notification is not desired. The user\n * needs to grant permission for the extension the privilege to attach, or\n * reject prior permission.\n *\n * Permission could be implied when the extension's panel is opened.\n *\n * Permission should be rejected when the debugging notification is canceled or\n * dismissed.\n *\n * Permission could be granted more explicitly by a panel component when the\n * panel is visible but the extension does not have permission.\n *\n * WebAudioEventObserver will be instructed with rules like the above by other\n * functions outside of this file.\n */\nenum AttachPermission {\n  /**\n   * Initial value.\n   *\n   * When WebAudioEventObserver is created, it does not know if permission has\n   * been granted or not and should treat this as **not having** permission.\n   */\n  UNKNOWN,\n\n  /**\n   * Permission has been granted by a user action. WebAudioEventObserver may\n   * attach to `chrome.debugger`.\n   */\n  TEMPORARY,\n\n  /**\n   * Permission has been rejected. WebAudioEventObserver must not attach to\n   * `chrome.debugger`.\n   */\n  REJECTED,\n}\n\n/**\n * Value used to indicate if the `chrome.debugger` attachment and\n * receiving `chrome.debugger.onEvent` events are \"active\".\n */\nenum BinaryTransition {\n  DEACTIVATING = 'deactivating',\n  IS_INACTIVE = 'isInactive',\n  ACTIVATING = 'activating',\n  IS_ACTIVE = 'isActive',\n}\n\nexport interface DebuggerAttachEventState {\n  permission: AttachPermission;\n  attachInterest: number;\n  attachState: BinaryTransition;\n  pageEventInterest: number;\n  pageEventState: BinaryTransition;\n  webAudioEventInterest: number;\n  webAudioEventState: BinaryTransition;\n}\n\n/** Chrome Devtools Protocol version to attach to. */\nconst debuggerVersion = '1.3';\n\n/** Chrome tab to attach the debugger to. */\nconst {tabId} = chrome.devtools.inspectedWindow;\n\nexport enum ChromeDebuggerAPIEventName {\n  detached = 'ChromeDebuggerAPI.detached',\n}\n\nexport interface ChromeDebuggerAPIDetachEventParams {\n  reason: 'canceled_by_user' | 'target_closed';\n}\n\nexport interface ChromeDebuggerAPIDetachEvent {\n  method: ChromeDebuggerAPIEventName.detached;\n  params: ChromeDebuggerAPIDetachEventParams;\n}\n\nexport type ChromeDebuggerAPIEvent = ChromeDebuggerAPIDetachEvent;\n\nexport type ChromeDebuggerAPIEventParams = ChromeDebuggerAPIEvent['params'];\n\n/**\n * Control attachment to chrome.debugger depending on if the user has given\n * permission and how many parts of the extension need attachment.\n *\n * @memberof Audion\n * @alias DebuggerAttachEventController\n */\nexport class DebuggerAttachEventController {\n  /** Does user permit extension to use `chrome.debugger`. */\n  permission$: PermissionSubject;\n  /** How many subscriptions want to attach to `chrome.debugger`. */\n  attachInterest$: CounterSubject;\n  attachState$: Observable<BinaryTransition>;\n  /**\n   * How many subscriptions want to receive page events through\n   * `chrome.debugger.onEvent`.\n   */\n  pageEventInterest$: CounterSubject;\n  pageEventState$: Observable<BinaryTransition>;\n  /**\n   * How many subscriptions want to receive web audio events through\n   * `chrome.debugger.onEvent`.\n   */\n  webAudioEventInterest$: CounterSubject;\n  webAudioEventState$: Observable<BinaryTransition>;\n\n  combinedState$: Observable<DebuggerAttachEventState>;\n\n  debuggerEvent$: Observable<ChromeDebuggerAPIEvent>;\n\n  constructor() {\n    // Create an interface of subjects to track changes in state with the\n    // `chrome.debugger` api.\n    const debuggerSubject = {\n      // Does the extension have permission from the user to use `chrome.debugger` api.\n      permission: new PermissionSubject(),\n      // How many entities want to attach to the debugger to call `sendCommand`\n      // or listen to `onEvent`.\n      attachInterest: new CounterSubject(0),\n      // attachState must be IS_ACTIVE for `chrome.debugger.sendCommand` to be used.\n      attachState: new BinaryTransitionSubject({\n        initialState: BinaryTransition.IS_INACTIVE,\n        activateAction: () => attach({tabId}, debuggerVersion),\n        deactivateAction: () => detach({tabId}),\n      }),\n      // How many entities want to listen to page events through `onEvent`.\n      pageEventInterest: new CounterSubject(0),\n      // must be IS_ACTIVE for `onEvent` to receive events.\n      pageEventState: new BinaryTransitionSubject({\n        initialState: BinaryTransition.IS_INACTIVE,\n        activateAction: () => sendCommand({tabId}, PageDebuggerMethod.enable),\n        deactivateAction: () =>\n          sendCommand({tabId}, PageDebuggerMethod.disable),\n      }),\n      // How many entities want to listen to web audio events through `onEvent`.\n      webAudioEventInterest: new CounterSubject(0),\n      // webAudioEventState must be IS_ACTIVE for `onEvent` to receive events.\n      webAudioEventState: new BinaryTransitionSubject({\n        initialState: BinaryTransition.IS_INACTIVE,\n        activateAction: () =>\n          sendCommand({tabId}, WebAudioDebuggerMethod.enable),\n        deactivateAction: () =>\n          sendCommand({tabId}, WebAudioDebuggerMethod.disable),\n      }),\n    };\n    this.permission$ = debuggerSubject.permission;\n    this.attachInterest$ = debuggerSubject.attachInterest;\n    this.attachState$ = debuggerSubject.attachState;\n    this.pageEventInterest$ = debuggerSubject.pageEventInterest;\n    this.pageEventState$ = debuggerSubject.pageEventState;\n    this.webAudioEventInterest$ = debuggerSubject.webAudioEventInterest;\n    this.webAudioEventState$ = debuggerSubject.webAudioEventState;\n\n    // Observable of changes to state derived from debuggerSubject.\n    const debuggerState$ = (this.combinedState$ =\n      // Push objects mapping of keys in debuggerSubject to values pushed from\n      // that debuggerSubject member.\n      combineLatest(debuggerSubject).pipe(\n        // Filter out combined state that is not different from the last value.\n        distinctUntilChanged(\n          (previous, current) =>\n            previous.permission === current.permission &&\n            previous.attachInterest === current.attachInterest &&\n            previous.attachState === current.attachState &&\n            previous.pageEventInterest === current.pageEventInterest &&\n            previous.pageEventState === current.pageEventState &&\n            previous.webAudioEventInterest === current.webAudioEventInterest &&\n            previous.webAudioEventState === current.webAudioEventState,\n        ),\n        // Make one subscription debuggerSubject once for many subscribers.\n        share(),\n      ));\n\n    // The following subscriptions govern debuggerSubject.\n\n    // Govern attachment to `chrome.debugger`.\n    debuggerState$.subscribe({\n      next: (state) => {\n        // When debugger state has permission to attach to `chrome.debugger` and\n        // something wants to use `chrome.debugger`, activate the attachment.\n        // Otherwise deactivate the attachment.\n        if (\n          state.permission === AttachPermission.TEMPORARY &&\n          state.attachInterest > 0\n        ) {\n          debuggerSubject.attachState.activate();\n        } else {\n          debuggerSubject.attachState.deactivate();\n        }\n      },\n    });\n\n    this.debuggerEvent$ = onDebuggerDetach$.pipe(\n      map(([, reason]) => {\n        return {\n          method: ChromeDebuggerAPIEventName.detached,\n          params: {reason},\n        } as ChromeDebuggerAPIDetachEvent;\n      }),\n    );\n\n    // Govern permission rejection and externally induced detachment.\n    onDebuggerDetach$.subscribe({\n      next([, reason]) {\n        if (reason === 'canceled_by_user') {\n          // Reject permission to use `chrome.debugger` in this extension. We\n          // understand this event to be an explicit rejection from the\n          // extension's user.\n          debuggerSubject.permission.reject();\n        }\n\n        // Immediately go to the inactive state. Detachment was initiated\n        // outside the extension and does not need to be requested.\n        debuggerSubject.attachState.next(BinaryTransition.IS_INACTIVE);\n      },\n    });\n\n    // Govern receiving events through `chrome.debugger.onEvent`.\n    debuggerState$.subscribe(\n      activateEventWhileAttached(\n        debuggerSubject.pageEventState,\n        ({pageEventInterest}) => pageEventInterest > 0,\n      ),\n    );\n    debuggerState$.subscribe(\n      activateEventWhileAttached(\n        debuggerSubject.webAudioEventState,\n        ({webAudioEventInterest}) => webAudioEventInterest > 0,\n      ),\n    );\n  }\n\n  /**\n   * Attach to the debugger if not already, and call chrome.debugger.sendCommand.\n   * @param method Chrome devtools protocol method like 'HeapProfiler.collectGarbage'.\n   * @returns observable that completes once done without pushing any values\n   */\n  sendCommand(method: string): Observable<never> {\n    this.attachInterest$.increment();\n    return this.attachState$.pipe(\n      filter((state) => state === BinaryTransition.IS_ACTIVE),\n      take(1),\n      exhaustMap(() => sendCommand({tabId}, method)),\n      finalize(() => this.attachInterest$.decrement()),\n    );\n  }\n}\n\nfunction activateEventWhileAttached(\n  eventState: BinaryTransitionSubject,\n  interestExists: (state: DebuggerAttachEventState) => boolean,\n): Partial<Subscriber<DebuggerAttachEventState>> {\n  return {\n    next(state) {\n      if (\n        state.attachState === BinaryTransition.IS_ACTIVE &&\n        interestExists(state)\n      ) {\n        // Start receiving events. The attachemnt is active and some entities\n        // are listening for events.\n        eventState.activate();\n      } else {\n        if (state.attachState === BinaryTransition.IS_ACTIVE) {\n          // Stop receiving events. The attachment is still active but no\n          // entities are listening for events.\n          eventState.deactivate();\n        } else {\n          // \"Skip\" deactivation of receiving events and immediately go to the\n          // inactive state. The process of detachment either requested by the\n          // extension or initiated otherwise has implicitly stopped reception\n          // of events.\n          eventState.next(BinaryTransition.IS_INACTIVE);\n        }\n      }\n    },\n  };\n}\n\n/**\n * Create a function that returns an observable that completes when the api\n * calls back.\n * @param method `chrome` api method whose last argument is a callback\n * @param thisArg `this` inside of the method\n * @returns observable that completes when the method is done\n */\nfunction bindChromeCallback<P extends any[]>(\n  method: (...args: [...params: P, callback: () => void]) => void,\n  thisArg = null,\n) {\n  return (...args: P) =>\n    new Observable<never>((subscriber) => {\n      method.call(thisArg, ...args, () => {\n        if (chrome.runtime.lastError) {\n          subscriber.error(chrome.runtime.lastError);\n        } else {\n          subscriber.complete();\n        }\n      });\n    });\n}\n\n/**\n * Return an observable that pushes events from a `chrome` api event.\n * @param event `chrome` api event\n * @returns observable of `chrome` api events\n */\nfunction fromChromeEvent<A extends any[]>(\n  event: Chrome.Event<(...args: A) => any>,\n) {\n  return new Observable<A>((subscriber) => {\n    const listener = (...args: A) => {\n      subscriber.next(args);\n    };\n    event.addListener(listener);\n    return () => {\n      event.removeListener(listener);\n    };\n  });\n}\n\n/**\n * Call `chrome.debugger.attach`.\n *\n * @see\n * https://developer.chrome.com/docs/extensions/reference/debugger/#method-attach\n */\nconst attach = bindChromeCallback(chrome.debugger.attach, chrome.debugger);\n\n/**\n * Call `chrome.debugger.detach`.\n *\n * @see\n * https://developer.chrome.com/docs/extensions/reference/debugger/#method-detach\n */\nconst detach = bindChromeCallback(chrome.debugger.detach, chrome.debugger);\n\n/**\n * Call `chrome.debugger.sendCommand`.\n *\n * @see\n * https://developer.chrome.com/docs/extensions/reference/debugger/#method-sendCommand\n */\nconst sendCommand = bindChromeCallback(\n  chrome.debugger.sendCommand as (\n    target: Chrome.DebuggerDebuggee,\n    method: string,\n    params?,\n    callback?,\n  ) => void,\n  chrome.debugger,\n);\n\n/**\n * Observable of `chrome.debugger.onDetach` events.\n */\nconst onDebuggerDetach$ = fromChromeEvent<\n  [target: Chrome.DebuggerDebuggee, reason: string]\n>(chrome.debugger.onDetach);\n\n/**\n * Store if user allows the extension to use `chrome.debugger` api.\n */\nexport class PermissionSubject extends BehaviorSubject<AttachPermission> {\n  constructor() {\n    super(AttachPermission.UNKNOWN);\n  }\n\n  /**\n   * Permit use of `chrome.debugger`.\n   */\n  grantTemporary() {\n    if (this.value === AttachPermission.UNKNOWN) {\n      this.next(AttachPermission.TEMPORARY);\n    }\n  }\n\n  /**\n   * Reject use of `chrome.debugger`.\n   */\n  reject() {\n    if (this.value !== AttachPermission.REJECTED) {\n      this.next(AttachPermission.REJECTED);\n    }\n  }\n}\n\n/**\n * Description of a transition in BinaryTransitionSubject.\n */\ninterface BinaryTransitionDescription {\n  /** The state the Subject must start in to perform this transition. */\n  beginningState: BinaryTransition;\n  /** The state the Subject is in while performing this transition. */\n  intermediateState: BinaryTransition;\n  /** The state the Subject is in after action is successfully. */\n  successState: BinaryTransition;\n  /** The state the Subject is in after action is unsuccessful. */\n  errorState: BinaryTransition;\n  /**\n   * Delegate that does some work to modify other application state to the\n   * desired state.\n   */\n  action: () => Observable<void>;\n}\n\n/**\n * Control a transition between inactive and active state. To perform a\n * transition the subject enters a intermediate state and calls a delegate to do\n * some action. After the action completes successfully the subject enters the\n * desired state.\n */\nclass BinaryTransitionSubject extends BehaviorSubject<BinaryTransition> {\n  private readonly activateTransition: BinaryTransitionDescription;\n  private readonly deactivateTransition: BinaryTransitionDescription;\n\n  constructor({\n    initialState,\n    activateAction,\n    deactivateAction,\n  }: {\n    initialState: BinaryTransition;\n    activateAction: () => Observable<void>;\n    deactivateAction: () => Observable<void>;\n  }) {\n    super(initialState);\n    this.activateTransition = {\n      beginningState: BinaryTransition.IS_INACTIVE,\n      intermediateState: BinaryTransition.ACTIVATING,\n      successState: BinaryTransition.IS_ACTIVE,\n      errorState: BinaryTransition.IS_INACTIVE,\n      action: activateAction,\n    };\n    this.deactivateTransition = {\n      beginningState: BinaryTransition.IS_ACTIVE,\n      intermediateState: BinaryTransition.DEACTIVATING,\n      successState: BinaryTransition.IS_INACTIVE,\n      errorState: BinaryTransition.IS_INACTIVE,\n      action: deactivateAction,\n    };\n  }\n\n  /**\n   * Transition to a desired state.\n   *\n   * Change the subject value if it is set to beginningState to intermediateState and once action completes successfuly, set to successState.\n   * @param description\n   */\n  transition(description: BinaryTransitionDescription) {\n    if (this.value === description.beginningState) {\n      concat(\n        of(description.intermediateState),\n        description.action(),\n        defer(() =>\n          this.value === description.intermediateState\n            ? of(description.successState)\n            : EMPTY,\n        ),\n      )\n        .pipe(\n          catchError((err) => {\n            console.error(err);\n            if (\n              err.message.startsWith('Another debugger is already attached')\n            ) {\n              return this.value === description.intermediateState\n                ? of(description.successState)\n                : EMPTY;\n            }\n            return of(\n              this.value === description.intermediateState\n                ? description.errorState\n                : description.beginningState,\n            );\n          }),\n        )\n        .subscribe({next: this.next.bind(this)});\n    }\n  }\n\n  /**\n   * If subject is inactive, transition to active.\n   */\n  activate() {\n    this.transition(this.activateTransition);\n  }\n\n  /**\n   * If subject is active, transition to inactive.\n   */\n  deactivate() {\n    this.transition(this.deactivateTransition);\n  }\n}\n\n/**\n * Observable counting some discrete value.\n */\nexport class CounterSubject extends BehaviorSubject<number> {\n  /**\n   * Increase value by 1.\n   */\n  increment() {\n    this.next(this.value + 1);\n  }\n\n  /**\n   * Decrease value by 1.\n   */\n  decrement() {\n    this.next(this.value - 1);\n  }\n}\n"
  },
  {
    "path": "src/devtools/DebuggerEvents.ts",
    "content": "import {filter, map, Observable} from 'rxjs';\nimport {chrome} from '../chrome';\nimport {fromChromeEvent} from '../utils/rxChrome';\nimport {DebuggerAttachEventController} from './DebuggerAttachEventController';\nimport {Audion} from './Types';\n\ntype DebuggerDomain = 'page' | 'webAudio';\n\ninterface DebuggerEventsOptions<D extends DebuggerDomain> {\n  domain: D;\n}\n\ntype DebuggerDomainEvent<D extends DebuggerDomain> = D extends 'page'\n  ? Audion.PageEvent\n  : D extends 'webAudio'\n  ? Audion.WebAudioEvent\n  : never;\n\nexport class DebuggerEventsObservable<\n  D extends DebuggerDomain,\n> extends Observable<DebuggerDomainEvent<D>> {\n  constructor(\n    public attachController: DebuggerAttachEventController,\n    public options: DebuggerEventsOptions<D>,\n  ) {\n    super((subscriber) => {\n      attachController.attachInterest$.increment();\n      attachController[options.domain + 'EventInterest$'].increment();\n      const subscription = fromChromeEvent(chrome.debugger.onEvent)\n        .pipe(\n          map(([debuggeeId, method, params]) => ({method, params})),\n          filter(({method}) =>\n            method.toLowerCase().startsWith(options.domain.toLowerCase()),\n          ),\n        )\n        .subscribe(subscriber);\n      subscription.add(() => {\n        attachController.attachInterest$.decrement();\n        attachController[options.domain + 'EventInterest$'].decrement();\n      });\n      return subscription;\n    });\n  }\n}\n"
  },
  {
    "path": "src/devtools/DevtoolsGraphPanel.test.js",
    "content": "/// <reference path=\"../chrome/Types.js\" />\n/// <reference path=\"../chrome/DebuggerWebAudioDomain.ts\" />\n/// <reference path=\"../utils/Types.ts\" />\n/// <reference path=\"Types.ts\" />\n\nimport {beforeEach, describe, expect, it, jest} from '@jest/globals';\n\nimport dagre from 'dagre';\nimport {BehaviorSubject, Observable, partition, Subject} from 'rxjs';\nimport {map} from 'rxjs/operators';\n\nimport {chrome} from '../chrome';\n\nimport {DevtoolsGraphPanel} from './DevtoolsGraphPanel';\nimport {serializeGraphContext} from './serializeGraphContext';\n\njest.mock('../chrome');\n\n/**\n * @type {Object<*, Audion.GraphContext>}\n */\nconst mockGraphs = {\n  0: {\n    id: 'context0000',\n    /** @type {ChromeDebuggerWebAudio.BaseAudioContext} */\n    context: {\n      contextId: 'context0000',\n      contextType: 'realtime',\n      contextState: 'running',\n      sampleRate: 48000,\n      maxOutputChannelCount: 2,\n      callbackBufferSize: 1000,\n    },\n    graph: new dagre.graphlib.Graph(),\n    nodes: {},\n  },\n  1: {\n    id: 'context0000',\n    /** @type {ChromeDebuggerWebAudio.BaseAudioContext} */\n    context: {\n      contextId: 'context0000',\n      contextType: 'realtime',\n      contextState: 'suspended',\n      sampleRate: 48000,\n      maxOutputChannelCount: 2,\n      callbackBufferSize: 1000,\n    },\n    graph: new dagre.graphlib.Graph(),\n    nodes: {},\n  },\n  2: {\n    id: 'context0000',\n    context: null,\n    graph: null,\n    nodes: null,\n  },\n};\ndescribe('DevtoolsGraphPanel', () => {\n  let nextGraph = (graph) => {};\n  /** @type {Subject<Audion.GraphContext>} */\n  let subject;\n  /** @type {Chrome.RuntimePort} */\n  let port;\n\n  beforeEach(() => {\n    jest.resetAllMocks();\n\n    subject = new Subject();\n    nextGraph = (value) => subject.next(value);\n\n    /** @type {BehaviorSubject<boolean>} */\n    const gate = new BehaviorSubject();\n    const [gateOpen, gateClose] = partition(gate, Boolean).map(map(() => {}));\n\n    const panel = new DevtoolsGraphPanel(\n      subject.pipe(\n        map(serializeGraphContext),\n        map((graphContext) => ({graphContext})),\n        subscribeWhen(gateOpen, gateClose),\n      ),\n    );\n\n    panel.onPanelShown$.pipe(map(() => true)).subscribe(gate);\n\n    port = mockPort();\n  });\n\n  it('creates a panel with chrome.devtools', () => {\n    expect(chrome.devtools.panels.create).toBeCalled();\n    simulateCreatePanel();\n  });\n\n  it('subscribes to debugger events only after panel is shown', () => {\n    expect(subject.observed).toBe(false);\n\n    const panel = simulateCreatePanel();\n    simulateConnectPort(port);\n\n    expect(subject.observed).toBe(false);\n\n    // Send onShown event to panel creation callback.\n    simulateShowPanel(panel);\n\n    expect(subject.observed).toBe(true);\n  });\n\n  it('posts graphs when connected', () => {\n    // Send onShown event to panel creation callback.\n    const panel = simulateCreatePanel();\n    simulateConnectPort(port);\n    simulateShowPanel(panel);\n\n    nextGraph(mockGraphs[0]);\n    nextGraph(mockGraphs[1]);\n    expect(port.postMessage).toBeCalledTimes(2);\n    expect(port.postMessage.mock.calls[0]).toMatchInlineSnapshot(`\nArray [\n  Object {\n    \"graphContext\": Object {\n      \"context\": Object {\n        \"callbackBufferSize\": 1000,\n        \"contextId\": \"context0000\",\n        \"contextState\": \"running\",\n        \"contextType\": \"realtime\",\n        \"maxOutputChannelCount\": 2,\n        \"sampleRate\": 48000,\n      },\n      \"graph\": Object {\n        \"edges\": Array [],\n        \"nodes\": Array [],\n        \"options\": Object {\n          \"compound\": false,\n          \"directed\": true,\n          \"multigraph\": false,\n        },\n      },\n      \"id\": \"context0000\",\n      \"nodes\": Object {},\n    },\n  },\n]\n`);\n    expect(port.postMessage.mock.calls[1]).toMatchInlineSnapshot(`\nArray [\n  Object {\n    \"graphContext\": Object {\n      \"context\": Object {\n        \"callbackBufferSize\": 1000,\n        \"contextId\": \"context0000\",\n        \"contextState\": \"suspended\",\n        \"contextType\": \"realtime\",\n        \"maxOutputChannelCount\": 2,\n        \"sampleRate\": 48000,\n      },\n      \"graph\": Object {\n        \"edges\": Array [],\n        \"nodes\": Array [],\n        \"options\": Object {\n          \"compound\": false,\n          \"directed\": true,\n          \"multigraph\": false,\n        },\n      },\n      \"id\": \"context0000\",\n      \"nodes\": Object {},\n    },\n  },\n]\n`);\n  });\n\n  it('posts null graph when context is destroyed', () => {\n    // Send onShown event to panel creation callback.\n    const panel = simulateCreatePanel();\n    simulateConnectPort(port);\n    simulateShowPanel(panel);\n\n    nextGraph(mockGraphs[0]);\n    nextGraph(mockGraphs[2]);\n    expect(port.postMessage).toBeCalledTimes(2);\n    expect(port.postMessage.mock.calls[0]).toMatchInlineSnapshot(`\nArray [\n  Object {\n    \"graphContext\": Object {\n      \"context\": Object {\n        \"callbackBufferSize\": 1000,\n        \"contextId\": \"context0000\",\n        \"contextState\": \"running\",\n        \"contextType\": \"realtime\",\n        \"maxOutputChannelCount\": 2,\n        \"sampleRate\": 48000,\n      },\n      \"graph\": Object {\n        \"edges\": Array [],\n        \"nodes\": Array [],\n        \"options\": Object {\n          \"compound\": false,\n          \"directed\": true,\n          \"multigraph\": false,\n        },\n      },\n      \"id\": \"context0000\",\n      \"nodes\": Object {},\n    },\n  },\n]\n`);\n    expect(port.postMessage.mock.calls[1]).toMatchInlineSnapshot(`\nArray [\n  Object {\n    \"graphContext\": Object {\n      \"context\": null,\n      \"graph\": null,\n      \"id\": \"context0000\",\n      \"nodes\": null,\n    },\n  },\n]\n`);\n  });\n\n  it('stops posting graphs once disconnected', () => {\n    const panel = simulateCreatePanel();\n    simulateConnectPort(port);\n    simulateShowPanel(panel);\n\n    nextGraph(mockGraphs[0]);\n\n    if (jest.isMockFunction(port.onDisconnect.addListener)) {\n      /** @type {function} */ (\n        port.onDisconnect.addListener.mock.calls[0][0]\n      )();\n    }\n\n    nextGraph(mockGraphs[1]);\n\n    expect(port.postMessage).toBeCalledTimes(1);\n    expect(port.postMessage.mock.calls[0]).toMatchInlineSnapshot(`\nArray [\n  Object {\n    \"graphContext\": Object {\n      \"context\": Object {\n        \"callbackBufferSize\": 1000,\n        \"contextId\": \"context0000\",\n        \"contextState\": \"running\",\n        \"contextType\": \"realtime\",\n        \"maxOutputChannelCount\": 2,\n        \"sampleRate\": 48000,\n      },\n      \"graph\": Object {\n        \"edges\": Array [],\n        \"nodes\": Array [],\n        \"options\": Object {\n          \"compound\": false,\n          \"directed\": true,\n          \"multigraph\": false,\n        },\n      },\n      \"id\": \"context0000\",\n      \"nodes\": Object {},\n    },\n  },\n]\n`);\n  });\n});\n\n/**\n * Simulate chrome api as if devtool panel was shown.\n * @param {Chrome.DevToolsPanels} panel panel to simulating showing\n */\nfunction simulateShowPanel(panel) {\n  const panelOnShownCallback = panel.onShown.addListener.mock.calls[0][0];\n  panelOnShownCallback();\n}\n\n/**\n * Simulate chrome api as if devtool panel was created.\n * @param {Chrome.DevToolsPanel} [panel] panel to simulate creating\n * @return {Chrome.DevToolsPanel} created mock panel\n */\nfunction simulateCreatePanel(panel = mockPanel()) {\n  const panelCreateCallback = chrome.devtools.panels.create.mock.calls[0][3];\n  panelCreateCallback(panel);\n  return panel;\n}\n\n/**\n * Simulate chrome api as if runtime port was created.\n * @param {Chrome.RuntimePort} [port] port to simulate connecting\n * @return {Chrome.RuntimePort} connected port\n */\nfunction simulateConnectPort(port = mockPort()) {\n  const runtimeOnConnectCallback =\n    chrome.runtime.onConnect.addListener.mock.calls[0][0];\n  runtimeOnConnectCallback(port);\n  return port;\n}\n\n/** @return {Chrome.Event<*>} */\nfunction mockEvent() {\n  return {addListener: jest.fn(), removeListener: jest.fn()};\n}\n\n/** @return {Chrome.RuntimePort} */\nfunction mockPort() {\n  return {\n    onDisconnect: mockEvent(),\n    onMessage: mockEvent(),\n    postMessage: jest.fn(),\n  };\n}\n\n/**\n * @return {Chrome.DevToolsPanel} mock version of a devtool panel\n */\nfunction mockPanel() {\n  return {onHidden: mockEvent(), onShown: mockEvent()};\n}\n\n/**\n * @param {Observable<void>} subscribeNotifier\n * @param {Observable<void>} unsubscribeNotifier\n * @return {function(Observable<T>): Observable<T>}\n * @template T\n */\nfunction subscribeWhen(subscribeNotifier, unsubscribeNotifier) {\n  return (source) => {\n    return new Observable((subscriber) => {\n      let subscription = null;\n      let subscribe = () => {\n        const oldSubscribe = subscribe;\n        subscribe = () => {};\n        subscription = source.subscribe(subscriber);\n        unsubscribe = () => {\n          unsubscribe = () => {};\n          subscription.unsubscribe();\n          subscription = null;\n          subscribe = oldSubscribe;\n        };\n      };\n      let unsubscribe = () => {};\n      const onSubscription = subscribeNotifier.subscribe({\n        next() {\n          subscribe();\n        },\n      });\n      const offSubscription = unsubscribeNotifier.subscribe({\n        next() {\n          unsubscribe();\n        },\n      });\n      return () => {\n        onSubscription.unsubscribe();\n        offSubscription.unsubscribe();\n        unsubscribe();\n      };\n    });\n  };\n}\n"
  },
  {
    "path": "src/devtools/DevtoolsGraphPanel.ts",
    "content": "/** DevTools panel that renders the Web Audio graph and more debugging information. */\n\nimport {chrome} from '../chrome';\nimport {Audion} from './Types';\n\nimport {fromEventPattern, Observable, Subject} from 'rxjs';\nimport {map, takeUntil} from 'rxjs/operators';\n\nfunction fromChromeEvent<T>(\n  event: Chrome.Event<(msg: T) => void>,\n): Observable<T> {\n  return fromEventPattern(\n    (handler) => event.addListener(handler),\n    (handler) => event.removeListener(handler),\n  );\n}\n\n/**\n * Manage a devtools panel rendering a graph of a web audio context.\n */\nexport class DevtoolsGraphPanel {\n  requests$: Observable<Audion.DevtoolsRequest>;\n\n  onPanelShown$: Observable<void>;\n\n  /**\n   * Create a DevtoolsGraphPanel.\n   */\n  constructor(graphs$: Observable<Audion.DevtoolsMessage>) {\n    const requests$ = (this.requests$ = new Subject());\n\n    const onPanelShown$ = (this.onPanelShown$ = new Subject<void>());\n    chrome.devtools.panels.create('Web Audio', '', 'panel.html', (panel) => {\n      fromChromeEvent(panel.onShown).subscribe(onPanelShown$);\n    });\n\n    fromChromeEvent(chrome.runtime.onConnect).subscribe({\n      next(port) {\n        fromChromeEvent(port.onMessage)\n          .pipe(map(([message]) => message))\n          .subscribe(requests$);\n\n        graphs$.pipe(takeUntil(fromChromeEvent(port.onDisconnect))).subscribe({\n          next(graphs) {\n            port.postMessage(graphs);\n          },\n        });\n      },\n    });\n  }\n}\n"
  },
  {
    "path": "src/devtools/Types.ts",
    "content": "/// <reference path=\"../chrome/DebuggerWebAudioDomain.ts\" />\n\nimport {Protocol} from 'devtools-protocol/types/protocol';\nimport {\n  PageDebuggerEvent,\n  PageDebuggerEventParams,\n} from '../chrome/DebuggerPageDomain';\n\nimport {\n  WebAudioDebuggerEvent,\n  WebAudioDebuggerEventParams,\n} from '../chrome/DebuggerWebAudioDomain';\n\nimport {Utils} from '../utils/Types';\n\n/** @namespace Audion */\n\n/**\n * @typedef Audion.WebAudioEvent\n * @property {Method} method\n * @property {Params} params\n */\n\nexport namespace Audion {\n  export type ContextRealtimeData = Protocol.WebAudio.ContextRealtimeData;\n\n  export enum GraphEdgeType {\n    NODE = 'node',\n    PARAM = 'param',\n  }\n\n  export interface GraphNodeEdge {\n    sourceOutputIndex: number;\n    destinationType: GraphEdgeType.NODE;\n    destinationInputIndex: number;\n  }\n\n  export interface GraphParamEdge {\n    sourceOutputIndex: number;\n    destinationType: GraphEdgeType.PARAM;\n    destinationParamId: string;\n    destinationParamIndex: number;\n  }\n\n  export type GraphEdge = GraphNodeEdge | GraphParamEdge;\n\n  export interface GraphlibEdge<V = GraphEdge> {\n    v: string;\n    w: string;\n    name: string;\n    value: V;\n  }\n\n  export interface GraphContext {\n    id: Protocol.WebAudio.GraphObjectId;\n    eventCount: number;\n    context: Protocol.WebAudio.BaseAudioContext;\n    realtimeData: ContextRealtimeData;\n    nodes: {[key: string]: GraphNode};\n    params: {[key: string]: Protocol.WebAudio.AudioParam};\n    graph: any;\n  }\n\n  export interface GraphContextMessage {\n    graphContext: Audion.GraphContext;\n  }\n\n  export interface GraphContextsById {\n    [key: string]: Audion.GraphContext;\n  }\n\n  export interface AllGraphsMessage {\n    allGraphs: GraphContextsById;\n  }\n\n  export type DevtoolsMessage = GraphContextMessage | AllGraphsMessage;\n\n  export enum DevtoolsRequestType {\n    COLLECT_GARBAGE = 'collectGarbage',\n  }\n\n  export interface DevtoolsCollectGarbageRequest {\n    type: DevtoolsRequestType.COLLECT_GARBAGE;\n  }\n\n  export type DevtoolsRequest = DevtoolsCollectGarbageRequest;\n\n  export interface DevtoolsObserver extends Utils.Observer<DevtoolsMessage> {}\n\n  export interface GraphNode {\n    node: Protocol.WebAudio.AudioNode;\n    params: Protocol.WebAudio.AudioParam[];\n    edges: Protocol.WebAudio.NodesConnectedEvent[];\n  }\n\n  export type PageEvent<N extends PageDebuggerEvent = PageDebuggerEvent> = {\n    method: N;\n    params: PageDebuggerEventParams<N>[0];\n  };\n\n  export type WebAudioEvent<\n    N extends WebAudioDebuggerEvent = WebAudioDebuggerEvent,\n  > = {\n    method: N;\n    params: WebAudioDebuggerEventParams<N>[0];\n  };\n}\n\n/**\n * @typedef Audion.GraphContext\n * @property {ChromeDebuggerWebAudioDomain.GraphObjectId} id\n * @property {ChromeDebuggerWebAudioDomain.BaseAudioContext} context\n * @property {Object<string, Audion.GraphNode>} nodes\n * @property {Object<string, ChromeDebuggerWebAudioDomain.AudioParam>} params\n * @property {object} graph\n */\n\n/**\n * @typedef Audion.GraphContextMessage\n * @property {Audion.GraphContext} graphContext\n */\n\n/**\n * @typedef Audion.AllGraphsMessage\n * @property {Object<string, Audion.GraphContext>} allGraphs\n */\n\n/**\n * @typedef {Audion.GraphContextMessage\n *   | Audion.AllGraphsMessage\n *   } Audion.DevtoolsMessage\n */\n\n/**\n * @typedef {Utils.Observer<Audion.DevtoolsMessage>} Audion.DevtoolsObserver\n */\n"
  },
  {
    "path": "src/devtools/WebAudioEventObserver.test.js",
    "content": "/// <reference path=\"../chrome/DebuggerWebAudioDomain.ts\" />\n\nimport {beforeEach, describe, expect, it, jest} from '@jest/globals';\n\nimport {chrome} from '../chrome';\nimport {WebAudioDebuggerEvent} from '../chrome/DebuggerWebAudioDomain';\n\nimport {DebuggerAttachEventController} from './DebuggerAttachEventController';\nimport {WebAudioEventObservable} from './WebAudioEventObserver';\n\njest.mock('../chrome');\n\ndescribe('WebAudioEventObserver', () => {\n  let webAudioEvents$;\n\n  beforeEach(() => {\n    jest.clearAllMocks();\n\n    const attachController = new DebuggerAttachEventController();\n    attachController.permission$.grantTemporary();\n    webAudioEvents$ = new WebAudioEventObservable(attachController);\n  });\n\n  it('attaches to chrome.debugger', () => {\n    const sub = webAudioEvents$.subscribe();\n\n    expect(chrome.debugger.attach).toBeCalled();\n    if (jest.isMockFunction(chrome.debugger.attach)) {\n      /** @type {function} */ (chrome.debugger.attach.mock.calls[0][2])();\n    }\n    expect(chrome.debugger.sendCommand).toBeCalled();\n    expect(chrome.debugger.onDetach.addListener).toBeCalled();\n    expect(chrome.debugger.onEvent.addListener).toBeCalled();\n\n    sub.unsubscribe();\n  });\n\n  it('does not reattach when user triggers detach', () => {\n    const sub = webAudioEvents$.subscribe();\n\n    if (jest.isMockFunction(chrome.debugger.attach)) {\n      /** @type {function} */ (chrome.debugger.attach.mock.calls[0][2])();\n    }\n    expect(chrome.debugger.attach).toBeCalledTimes(1);\n    if (\n      jest.isMockFunction(chrome.debugger.onDetach.addListener) &&\n      chrome.debugger.onDetach.addListener.mock.calls.length > 0\n    ) {\n      /** @type {function} */ (\n        chrome.debugger.onDetach.addListener.mock.calls[0][0]\n      )({tabId: 'tab'}, 'canceled_by_user');\n    }\n    expect(chrome.debugger.attach).toBeCalledTimes(1);\n\n    sub.unsubscribe();\n  });\n\n  it('detachs from chrome.debugger on unsubscribe', () => {\n    const sub = webAudioEvents$.subscribe();\n\n    if (jest.isMockFunction(chrome.debugger.attach)) {\n      /** @type {function} */ (chrome.debugger.attach.mock.calls[0][2])();\n    }\n    expect(chrome.debugger.sendCommand).toBeCalledTimes(1);\n    if (jest.isMockFunction(chrome.debugger.sendCommand)) {\n      /** @type {function} */ (chrome.debugger.sendCommand.mock.calls[0][2])();\n    }\n    sub.unsubscribe();\n    expect(chrome.debugger.detach).toBeCalled();\n    if (jest.isMockFunction(chrome.debugger.sendCommand)) {\n      /** @type {function} */ (chrome.debugger.sendCommand.mock.calls[1][2])();\n    }\n    expect(chrome.debugger.sendCommand).toBeCalledTimes(2);\n    if (jest.isMockFunction(chrome.debugger.detach)) {\n      /** @type {function} */ (chrome.debugger.detach.mock.calls[0][1])();\n    }\n    expect(chrome.debugger.onDetach.removeListener).toBeCalled();\n    expect(chrome.debugger.onEvent.removeListener).toBeCalled();\n  });\n\n  it('forwards to WebAudio debugger protocol events', () => {\n    const nextMock = jest.fn();\n    const sub = webAudioEvents$.subscribe(nextMock);\n    if (jest.isMockFunction(chrome.debugger.attach)) {\n      /** @type {function} */ (chrome.debugger.attach.mock.calls[0][2])();\n    }\n    /** @type {ChromeDebuggerWebAudioDomain.ContextCreatedEvent} */\n    const contextCreated = {\n      context: {\n        contextId: '0',\n        contextType: 'realtime',\n        contextState: 'running',\n        sampleRate: 48000,\n        callbackBufferSize: 1000,\n        maxOutputChannelCount: 2,\n      },\n    };\n    if (jest.isMockFunction(chrome.debugger.onEvent.addListener)) {\n      /** @type {function} */ (\n        chrome.debugger.onEvent.addListener.mock.calls[0][0]\n      )('tab', WebAudioDebuggerEvent.contextCreated, contextCreated);\n    }\n    expect(nextMock).toBeCalledWith({\n      method: WebAudioDebuggerEvent.contextCreated,\n      params: contextCreated,\n    });\n\n    sub.unsubscribe();\n  });\n});\n"
  },
  {
    "path": "src/devtools/WebAudioEventObserver.ts",
    "content": "import {chrome} from '../chrome';\nimport {Audion} from './Types';\n\nimport {Observable} from 'rxjs';\nimport {\n  CounterSubject,\n  DebuggerAttachEventController,\n  PermissionSubject,\n} from './DebuggerAttachEventController';\nimport {WebAudioDebuggerEvent} from '../chrome/DebuggerWebAudioDomain';\n\n/**\n * @memberof Audion\n * @alias WebAudioEventObserver\n */\nexport class WebAudioEventObservable extends Observable<Audion.WebAudioEvent> {\n  debuggerAttachController: DebuggerAttachEventController;\n\n  /** Does user permit extension to use `chrome.debugger`. */\n  permission$: PermissionSubject;\n  /** How many subscriptions want to attach to `chrome.debugger`. */\n  attachInterest$: CounterSubject;\n  /**\n   * How many subscriptions want to receive events through\n   * `chrome.debugger.onEvent`.\n   */\n  webAudioEventInterest$: CounterSubject;\n\n  constructor(debuggerAttachController: DebuggerAttachEventController) {\n    super((subscriber) => {\n      this.debuggerAttachController = debuggerAttachController;\n      this.permission$ = debuggerAttachController.permission$;\n      this.attachInterest$ = debuggerAttachController.attachInterest$;\n      this.webAudioEventInterest$ =\n        debuggerAttachController.webAudioEventInterest$;\n\n      const onEvent: Chrome.DebuggerOnEventListener = (\n        debuggeeId,\n        method: WebAudioDebuggerEvent,\n        params,\n      ) => {\n        subscriber.next({method, params});\n      };\n\n      const onDetach = () => {\n        // TODO: Show a warning if the DevTools are still open and allow the\n        // user to re-attach manually, e.g. by pressing a button.\n        // See: https://developer.chrome.com/docs/extensions/reference/debugger/#type-DetachReason\n      };\n\n      chrome.debugger.onDetach.addListener(onDetach);\n      chrome.debugger.onEvent.addListener(onEvent);\n\n      this.attachInterest$.increment();\n      this.webAudioEventInterest$.increment();\n\n      return () => {\n        chrome.debugger.onDetach.removeListener(onDetach);\n        chrome.debugger.onEvent.removeListener(onEvent);\n\n        this.attachInterest$.decrement();\n        this.webAudioEventInterest$.decrement();\n      };\n    });\n  }\n}\n"
  },
  {
    "path": "src/devtools/WebAudioGraphIntegrator.test.js",
    "content": "/// <reference path=\"../chrome/DebuggerWebAudioDomain.ts\" />\n\nimport {beforeEach, describe, expect, it, jest} from '@jest/globals';\nimport {EMPTY, from, Observable, Subject, throwError} from 'rxjs';\nimport {concatWith, filter, takeUntil} from 'rxjs/operators';\n\nimport {WebAudioDebuggerEvent} from '../chrome/DebuggerWebAudioDomain';\n\nimport {integrateWebAudioGraph} from './WebAudioGraphIntegrator';\n\n// FIX: prettier isn't wrapping this next line.\n// eslint-disable-next-line max-len\nimport * as oscillatorGainFixture from '../../fixtures/oscillatorGainParam';\n\n// Node.js environment doesn't provide some browser-specific APIs\n// (e.g. performance.now(), localStorage.getItem() and localStorage.setItem())\n// Mocking these ensures no errors are thrown when running tests.\nglobal.performance = {\n  now: jest.fn(() => Date.now()),\n};\n\nconst localStorageMock = {\n  getItem: jest.fn(),\n  setItem: jest.fn(),\n};\nglobal.localStorage = localStorageMock;\n\ndescribe('WebAudioGraphIntegrator', () => {\n  let nextWebAudioEvent = (value) => {};\n  let nextGraphContext = jest.fn();\n\n  beforeEach(() => {\n    const subject = new Subject();\n    nextGraphContext = jest.fn();\n    nextWebAudioEvent = (value) => subject.next(value);\n    const webAudioRealtime = {\n      pollContext() {\n        return new Observable();\n      },\n    };\n    subject\n      .pipe(integrateWebAudioGraph(webAudioRealtime))\n      .subscribe(nextGraphContext);\n  });\n\n  it('adds new context', () => {\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.contextCreated,\n      params: MockWebAudioEvents.contextCreated[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(1);\n    expect(nextGraphContext.mock.calls[0]).toMatchInlineSnapshot(`\nArray [\n  Object {\n    \"context\": Object {\n      \"callbackBufferSize\": 1000,\n      \"contextId\": \"context0000\",\n      \"contextState\": \"running\",\n      \"contextType\": \"realtime\",\n      \"maxOutputChannelCount\": 2,\n      \"sampleRate\": 48000,\n    },\n    \"eventCount\": 1,\n    \"graph\": Graph {\n      \"_defaultEdgeLabelFn\": [Function],\n      \"_defaultNodeLabelFn\": [Function],\n      \"_edgeLabels\": Object {},\n      \"_edgeObjs\": Object {},\n      \"_in\": Object {},\n      \"_isCompound\": false,\n      \"_isDirected\": true,\n      \"_isMultigraph\": true,\n      \"_label\": Object {},\n      \"_nodes\": Object {},\n      \"_out\": Object {},\n      \"_preds\": Object {},\n      \"_sucs\": Object {},\n    },\n    \"id\": \"context0000\",\n    \"nodes\": Object {},\n    \"params\": Object {},\n    \"realtimeData\": Object {\n      \"callbackIntervalMean\": 0,\n      \"callbackIntervalVariance\": 0,\n      \"currentTime\": 0,\n      \"renderCapacity\": 0,\n    },\n  },\n]\n`);\n  });\n  it('changes context', () => {\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.contextCreated,\n      params: MockWebAudioEvents.contextCreated[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(1);\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.contextChanged,\n      params: MockWebAudioEvents.contextChanged[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(2);\n    expect(nextGraphContext.mock.calls[1]).toMatchInlineSnapshot(`\nArray [\n  Object {\n    \"context\": Object {\n      \"callbackBufferSize\": 1000,\n      \"contextId\": \"context0000\",\n      \"contextState\": \"suspended\",\n      \"contextType\": \"realtime\",\n      \"maxOutputChannelCount\": 2,\n      \"sampleRate\": 48000,\n    },\n    \"eventCount\": 2,\n    \"graph\": Graph {\n      \"_defaultEdgeLabelFn\": [Function],\n      \"_defaultNodeLabelFn\": [Function],\n      \"_edgeLabels\": Object {},\n      \"_edgeObjs\": Object {},\n      \"_in\": Object {},\n      \"_isCompound\": false,\n      \"_isDirected\": true,\n      \"_isMultigraph\": true,\n      \"_label\": Object {},\n      \"_nodes\": Object {},\n      \"_out\": Object {},\n      \"_preds\": Object {},\n      \"_sucs\": Object {},\n    },\n    \"id\": \"context0000\",\n    \"nodes\": Object {},\n    \"params\": Object {},\n    \"realtimeData\": Object {\n      \"callbackIntervalMean\": 0,\n      \"callbackIntervalVariance\": 0,\n      \"currentTime\": 0,\n      \"renderCapacity\": 0,\n    },\n  },\n]\n`);\n  });\n  it('removes old context', () => {\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.contextCreated,\n      params: MockWebAudioEvents.contextCreated[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(1);\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.contextWillBeDestroyed,\n      params: MockWebAudioEvents.contextWillBeDestroyed[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(2);\n    expect(nextGraphContext.mock.calls[1]).toMatchInlineSnapshot(`\nArray [\n  Object {\n    \"context\": null,\n    \"eventCount\": 2,\n    \"graph\": null,\n    \"id\": \"context0000\",\n    \"nodes\": null,\n    \"params\": null,\n    \"realtimeData\": null,\n  },\n]\n`);\n  });\n  it('adds new node', () => {\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.contextCreated,\n      params: MockWebAudioEvents.contextCreated[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(1);\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.audioNodeCreated,\n      params: MockWebAudioEvents.audioNodeCreated[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(2);\n    expect(nextGraphContext.mock.calls[1]).toMatchInlineSnapshot(`\nArray [\n  Object {\n    \"context\": Object {\n      \"callbackBufferSize\": 1000,\n      \"contextId\": \"context0000\",\n      \"contextState\": \"running\",\n      \"contextType\": \"realtime\",\n      \"maxOutputChannelCount\": 2,\n      \"sampleRate\": 48000,\n    },\n    \"eventCount\": 2,\n    \"graph\": Graph {\n      \"_defaultEdgeLabelFn\": [Function],\n      \"_defaultNodeLabelFn\": [Function],\n      \"_edgeLabels\": Object {},\n      \"_edgeObjs\": Object {},\n      \"_in\": Object {\n        \"node0000\": Object {},\n      },\n      \"_isCompound\": false,\n      \"_isDirected\": true,\n      \"_isMultigraph\": true,\n      \"_label\": Object {},\n      \"_nodeCount\": 1,\n      \"_nodes\": Object {\n        \"node0000\": Object {\n          \"color\": null,\n          \"height\": 50,\n          \"id\": \"node0000\",\n          \"label\": \"gain\",\n          \"type\": \"gain\",\n          \"width\": 150,\n        },\n      },\n      \"_out\": Object {\n        \"node0000\": Object {},\n      },\n      \"_preds\": Object {\n        \"node0000\": Object {},\n      },\n      \"_sucs\": Object {\n        \"node0000\": Object {},\n      },\n    },\n    \"id\": \"context0000\",\n    \"nodes\": Object {\n      \"node0000\": Object {\n        \"edges\": Array [],\n        \"node\": Object {\n          \"channelCountMode\": \"max\",\n          \"channelInterpretation\": \"discrete\",\n          \"contextId\": \"context0000\",\n          \"nodeId\": \"node0000\",\n          \"nodeType\": \"gain\",\n          \"numberOfInputs\": 1,\n          \"numberOfOutputs\": 1,\n        },\n        \"params\": Array [],\n      },\n    },\n    \"params\": Object {},\n    \"realtimeData\": Object {\n      \"callbackIntervalMean\": 0,\n      \"callbackIntervalVariance\": 0,\n      \"currentTime\": 0,\n      \"renderCapacity\": 0,\n    },\n  },\n]\n`);\n  });\n  it('removes old node', () => {\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.contextCreated,\n      params: MockWebAudioEvents.contextCreated[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(1);\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.audioNodeCreated,\n      params: MockWebAudioEvents.audioNodeCreated[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(2);\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.audioNodeWillBeDestroyed,\n      params: MockWebAudioEvents.audioNodeWillBeDestroyed[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(3);\n    expect(nextGraphContext.mock.calls[2]).toMatchInlineSnapshot(`\nArray [\n  Object {\n    \"context\": Object {\n      \"callbackBufferSize\": 1000,\n      \"contextId\": \"context0000\",\n      \"contextState\": \"running\",\n      \"contextType\": \"realtime\",\n      \"maxOutputChannelCount\": 2,\n      \"sampleRate\": 48000,\n    },\n    \"eventCount\": 3,\n    \"graph\": Graph {\n      \"_defaultEdgeLabelFn\": [Function],\n      \"_defaultNodeLabelFn\": [Function],\n      \"_edgeLabels\": Object {},\n      \"_edgeObjs\": Object {},\n      \"_in\": Object {},\n      \"_isCompound\": false,\n      \"_isDirected\": true,\n      \"_isMultigraph\": true,\n      \"_label\": Object {},\n      \"_nodeCount\": 0,\n      \"_nodes\": Object {},\n      \"_out\": Object {},\n      \"_preds\": Object {},\n      \"_sucs\": Object {},\n    },\n    \"id\": \"context0000\",\n    \"nodes\": Object {},\n    \"params\": Object {},\n    \"realtimeData\": Object {\n      \"callbackIntervalMean\": 0,\n      \"callbackIntervalVariance\": 0,\n      \"currentTime\": 0,\n      \"renderCapacity\": 0,\n    },\n  },\n]\n`);\n  });\n  it('adds new node edge connection', () => {\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.contextCreated,\n      params: MockWebAudioEvents.contextCreated[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(1);\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.audioNodeCreated,\n      params: MockWebAudioEvents.audioNodeCreated[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(2);\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.audioNodeCreated,\n      params: MockWebAudioEvents.audioNodeCreated[1],\n    });\n    expect(nextGraphContext).toBeCalledTimes(3);\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.nodesConnected,\n      params: MockWebAudioEvents.nodesConnected[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(4);\n    expect(nextGraphContext.mock.calls[3]).toMatchInlineSnapshot(`\nArray [\n  Object {\n    \"context\": Object {\n      \"callbackBufferSize\": 1000,\n      \"contextId\": \"context0000\",\n      \"contextState\": \"running\",\n      \"contextType\": \"realtime\",\n      \"maxOutputChannelCount\": 2,\n      \"sampleRate\": 48000,\n    },\n    \"eventCount\": 4,\n    \"graph\": Graph {\n      \"_defaultEdgeLabelFn\": [Function],\n      \"_defaultNodeLabelFn\": [Function],\n      \"_edgeCount\": 1,\n      \"_edgeLabels\": Object {\n        \"node0001\u0001node0000\u00010,0\": Object {\n          \"destinationInputIndex\": 0,\n          \"destinationType\": \"node\",\n          \"sourceOutputIndex\": 0,\n        },\n      },\n      \"_edgeObjs\": Object {\n        \"node0001\u0001node0000\u00010,0\": Object {\n          \"name\": \"0,0\",\n          \"v\": \"node0001\",\n          \"w\": \"node0000\",\n        },\n      },\n      \"_in\": Object {\n        \"node0000\": Object {\n          \"node0001\u0001node0000\u00010,0\": Object {\n            \"name\": \"0,0\",\n            \"v\": \"node0001\",\n            \"w\": \"node0000\",\n          },\n        },\n        \"node0001\": Object {},\n      },\n      \"_isCompound\": false,\n      \"_isDirected\": true,\n      \"_isMultigraph\": true,\n      \"_label\": Object {},\n      \"_nodeCount\": 2,\n      \"_nodes\": Object {\n        \"node0000\": Object {\n          \"color\": null,\n          \"height\": 50,\n          \"id\": \"node0000\",\n          \"label\": \"gain\",\n          \"type\": \"gain\",\n          \"width\": 150,\n        },\n        \"node0001\": Object {\n          \"color\": null,\n          \"height\": 50,\n          \"id\": \"node0001\",\n          \"label\": \"bufferSource\",\n          \"type\": \"bufferSource\",\n          \"width\": 150,\n        },\n      },\n      \"_out\": Object {\n        \"node0000\": Object {},\n        \"node0001\": Object {\n          \"node0001\u0001node0000\u00010,0\": Object {\n            \"name\": \"0,0\",\n            \"v\": \"node0001\",\n            \"w\": \"node0000\",\n          },\n        },\n      },\n      \"_preds\": Object {\n        \"node0000\": Object {\n          \"node0001\": 1,\n        },\n        \"node0001\": Object {},\n      },\n      \"_sucs\": Object {\n        \"node0000\": Object {},\n        \"node0001\": Object {\n          \"node0000\": 1,\n        },\n      },\n    },\n    \"id\": \"context0000\",\n    \"nodes\": Object {\n      \"node0000\": Object {\n        \"edges\": Array [],\n        \"node\": Object {\n          \"channelCountMode\": \"max\",\n          \"channelInterpretation\": \"discrete\",\n          \"contextId\": \"context0000\",\n          \"nodeId\": \"node0000\",\n          \"nodeType\": \"gain\",\n          \"numberOfInputs\": 1,\n          \"numberOfOutputs\": 1,\n        },\n        \"params\": Array [],\n      },\n      \"node0001\": Object {\n        \"edges\": Array [\n          Object {\n            \"contextId\": \"context0000\",\n            \"destinationId\": \"node0000\",\n            \"sourceId\": \"node0001\",\n          },\n        ],\n        \"node\": Object {\n          \"channelCountMode\": \"max\",\n          \"channelInterpretation\": \"discrete\",\n          \"contextId\": \"context0000\",\n          \"nodeId\": \"node0001\",\n          \"nodeType\": \"bufferSource\",\n          \"numberOfInputs\": 0,\n          \"numberOfOutputs\": 1,\n        },\n        \"params\": Array [],\n      },\n    },\n    \"params\": Object {},\n    \"realtimeData\": Object {\n      \"callbackIntervalMean\": 0,\n      \"callbackIntervalVariance\": 0,\n      \"currentTime\": 0,\n      \"renderCapacity\": 0,\n    },\n  },\n]\n`);\n  });\n  it('removes old node edge connection', () => {\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.contextCreated,\n      params: MockWebAudioEvents.contextCreated[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(1);\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.audioNodeCreated,\n      params: MockWebAudioEvents.audioNodeCreated[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(2);\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.audioNodeCreated,\n      params: MockWebAudioEvents.audioNodeCreated[1],\n    });\n    expect(nextGraphContext).toBeCalledTimes(3);\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.nodesConnected,\n      params: MockWebAudioEvents.nodesConnected[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(4);\n    nextWebAudioEvent({\n      method: WebAudioDebuggerEvent.nodesDisconnected,\n      params: MockWebAudioEvents.nodesDisconnected[0],\n    });\n    expect(nextGraphContext).toBeCalledTimes(5);\n    expect(nextGraphContext.mock.calls[4]).toMatchInlineSnapshot(`\nArray [\n  Object {\n    \"context\": Object {\n      \"callbackBufferSize\": 1000,\n      \"contextId\": \"context0000\",\n      \"contextState\": \"running\",\n      \"contextType\": \"realtime\",\n      \"maxOutputChannelCount\": 2,\n      \"sampleRate\": 48000,\n    },\n    \"eventCount\": 5,\n    \"graph\": Graph {\n      \"_defaultEdgeLabelFn\": [Function],\n      \"_defaultNodeLabelFn\": [Function],\n      \"_edgeCount\": 0,\n      \"_edgeLabels\": Object {},\n      \"_edgeObjs\": Object {},\n      \"_in\": Object {\n        \"node0000\": Object {},\n        \"node0001\": Object {},\n      },\n      \"_isCompound\": false,\n      \"_isDirected\": true,\n      \"_isMultigraph\": true,\n      \"_label\": Object {},\n      \"_nodeCount\": 2,\n      \"_nodes\": Object {\n        \"node0000\": Object {\n          \"color\": null,\n          \"height\": 50,\n          \"id\": \"node0000\",\n          \"label\": \"gain\",\n          \"type\": \"gain\",\n          \"width\": 150,\n        },\n        \"node0001\": Object {\n          \"color\": null,\n          \"height\": 50,\n          \"id\": \"node0001\",\n          \"label\": \"bufferSource\",\n          \"type\": \"bufferSource\",\n          \"width\": 150,\n        },\n      },\n      \"_out\": Object {\n        \"node0000\": Object {},\n        \"node0001\": Object {},\n      },\n      \"_preds\": Object {\n        \"node0000\": Object {},\n        \"node0001\": Object {},\n      },\n      \"_sucs\": Object {\n        \"node0000\": Object {},\n        \"node0001\": Object {},\n      },\n    },\n    \"id\": \"context0000\",\n    \"nodes\": Object {\n      \"node0000\": Object {\n        \"edges\": Array [],\n        \"node\": Object {\n          \"channelCountMode\": \"max\",\n          \"channelInterpretation\": \"discrete\",\n          \"contextId\": \"context0000\",\n          \"nodeId\": \"node0000\",\n          \"nodeType\": \"gain\",\n          \"numberOfInputs\": 1,\n          \"numberOfOutputs\": 1,\n        },\n        \"params\": Array [],\n      },\n      \"node0001\": Object {\n        \"edges\": Array [\n          Object {\n            \"contextId\": \"context0000\",\n            \"destinationId\": \"node0000\",\n            \"sourceId\": \"node0001\",\n          },\n        ],\n        \"node\": Object {\n          \"channelCountMode\": \"max\",\n          \"channelInterpretation\": \"discrete\",\n          \"contextId\": \"context0000\",\n          \"nodeId\": \"node0001\",\n          \"nodeType\": \"bufferSource\",\n          \"numberOfInputs\": 0,\n          \"numberOfOutputs\": 1,\n        },\n        \"params\": Array [],\n      },\n    },\n    \"params\": Object {},\n    \"realtimeData\": Object {\n      \"callbackIntervalMean\": 0,\n      \"callbackIntervalVariance\": 0,\n      \"currentTime\": 0,\n      \"renderCapacity\": 0,\n    },\n  },\n]\n`);\n  });\n\n  describe('simulate graphs', () => {\n    describe('oscillator -> gain param', () => {\n      const events = oscillatorGainFixture.OSCILLATOR_GAIN_PARAM_EVENTS;\n      const simulation = () =>\n        integrateWebAudioGraph({\n          pollContext() {\n            return EMPTY;\n          },\n        });\n      const eventSource = from(events);\n\n      for (let i = 0; i < events.length; i++) {\n        const errorEvent = events[i];\n        const falseSource = eventSource.pipe(\n          takeUntil((event) => event === errorEvent),\n          concatWith([throwError(() => new Error())]),\n        );\n        it(`falsify #${i} ${errorEvent.method}`, () => {\n          const subscriber = mockSubscriber();\n          falseSource.pipe(simulation()).subscribe(subscriber);\n          expect(subscriber.error).toBeCalled();\n        });\n      }\n\n      it(`all events`, () => {\n        const subscriber = mockSubscriber();\n        eventSource.pipe(simulation()).subscribe(subscriber);\n        expect(subscriber.next).toBeCalled();\n        expect(subscriber.error).not.toBeCalled();\n      });\n\n      for (let i = 0; i < events.length; i++) {\n        const skipEvent = events[i];\n        const skipSource = eventSource.pipe(filter((ev) => ev !== skipEvent));\n        it(`skip event #${i} ${skipEvent.method}`, () => {\n          const subscriber = mockSubscriber();\n          skipSource.pipe(simulation()).subscribe(subscriber);\n          expect(subscriber.error).not.toBeCalled();\n        });\n      }\n    });\n  });\n});\n\n/**\n * @type {Object<EventName,\n *   Object<*, WebAudioDebuggerEvent>>}\n */\nconst MockWebAudioEvents = {\n  audioNodeCreated: {\n    /** @type {ChromeDebuggerWebAudioDomain.AudioNodeCreatedEvent} */\n    0: {\n      node: {\n        contextId: 'context0000',\n        nodeId: 'node0000',\n        nodeType: 'gain',\n        channelCountMode: 'max',\n        channelInterpretation: 'discrete',\n        numberOfInputs: 1,\n        numberOfOutputs: 1,\n      },\n    },\n    /** @type {ChromeDebuggerWebAudioDomain.AudioNodeCreatedEvent} */\n    1: {\n      node: {\n        contextId: 'context0000',\n        nodeId: 'node0001',\n        nodeType: 'bufferSource',\n        channelCountMode: 'max',\n        channelInterpretation: 'discrete',\n        numberOfInputs: 0,\n        numberOfOutputs: 1,\n      },\n    },\n  },\n  audioNodeWillBeDestroyed: {\n    /** @type {ChromeDebuggerWebAudioDomain.AudioNodeWillBeDestroyedEvent} */\n    0: {\n      contextId: 'context0000',\n      nodeId: 'node0000',\n    },\n  },\n  contextChanged: {\n    /** @type {ChromeDebuggerWebAudioDomain.ContextChangedEvent} */\n    0: {\n      context: {\n        contextId: 'context0000',\n        contextType: 'realtime',\n        contextState: 'suspended',\n        sampleRate: 48000,\n        callbackBufferSize: 1000,\n        maxOutputChannelCount: 2,\n      },\n    },\n  },\n  contextCreated: {\n    /** @type {ChromeDebuggerWebAudioDomain.ContextCreatedEvent} */\n    0: {\n      context: {\n        contextId: 'context0000',\n        contextType: 'realtime',\n        contextState: 'running',\n        sampleRate: 48000,\n        callbackBufferSize: 1000,\n        maxOutputChannelCount: 2,\n      },\n    },\n  },\n  contextWillBeDestroyed: {\n    /** @type {ChromeDebuggerWebAudioDomain.ContextWillBeDestroyedEvent} */\n    0: {\n      contextId: 'context0000',\n    },\n  },\n  nodesConnected: {\n    /** @type {ChromeDebuggerWebAudioDomain.NodesConnectedEvent} */\n    0: {\n      contextId: 'context0000',\n      sourceId: 'node0001',\n      destinationId: 'node0000',\n    },\n  },\n  nodesDisconnected: {\n    /** @type {ChromeDebuggerWebAudioDomain.NodesDisconnectedEvent} */\n    0: {\n      contextId: 'context0000',\n      sourceId: 'node0001',\n      destinationId: 'node0000',\n    },\n  },\n};\n\n/**\n * @return {Subscriber}\n */\nfunction mockSubscriber() {\n  return {next: jest.fn(), complete: jest.fn(), error: jest.fn()};\n}\n"
  },
  {
    "path": "src/devtools/WebAudioGraphIntegrator.ts",
    "content": "import * as dagre from 'dagre';\nimport * as graphlib from 'graphlib';\nimport {ProtocolMapping} from 'devtools-protocol/types/protocol-mapping';\nimport {DLOG} from '../utils/dlog';\nimport {\n  EMPTY,\n  isObservable,\n  merge,\n  NEVER,\n  Observable,\n  of,\n  OperatorFunction,\n  pipe,\n  Subject,\n} from 'rxjs';\nimport {\n  map,\n  filter,\n  catchError,\n  mergeMap,\n  takeUntil,\n  take,\n  ignoreElements,\n  finalize,\n  share,\n} from 'rxjs/operators';\n\nimport {WebAudioDebuggerEvent} from '../chrome/DebuggerWebAudioDomain';\n\nimport {Audion} from './Types';\nimport {\n  INITIAL_CONTEXT_REALTIME_DATA,\n  RealtimeDataErrorMessage,\n  WebAudioRealtimeData,\n  WebAudioRealtimeDataReason,\n} from './WebAudioRealtimeData';\nimport {\n  ChromeDebuggerAPIEventName,\n  ChromeDebuggerAPIEvent,\n} from './DebuggerAttachEventController';\nimport {\n  PageDebuggerEvent,\n  PageDebuggerEventParams,\n} from '../chrome/DebuggerPageDomain';\n\nenum GraphContextDestroyReasonMessage {\n  RECEIVE_WILL_DESTROY_EVENT = `ReceiveWillDestroyEvent`,\n  CANNOT_FIND_REALTIME_DATA = `CannotFindRealtimeData`,\n}\n\ntype MutableContexts = {\n  [key: string]: {\n    graphContext: Audion.GraphContext;\n    graphContextDestroyed$: Subject<GraphContextDestroyReasonMessage>;\n    realtimeDataGraphContext$: Observable<Audion.GraphContext>;\n  };\n};\n\ninterface EventHelpers {\n  realtimeData: WebAudioRealtimeData;\n}\n\ntype IntegratableEventName =\n  | PageDebuggerEvent\n  | WebAudioDebuggerEvent\n  | ChromeDebuggerAPIEventName;\n\ntype IntegratableEvent =\n  | Audion.PageEvent\n  | Audion.WebAudioEvent\n  | ChromeDebuggerAPIEvent;\n\ntype IntegratableEventMapping = {\n  [K in IntegratableEventName]: ProtocolMapping.Events extends {\n    [key in K]: [infer P];\n  }\n    ? P\n    : ChromeDebuggerAPIEvent extends {method: K; params: infer P}\n    ? P\n    : never;\n};\n\ntype EventHandlers =\n  | {\n      readonly [K in IntegratableEventName]: (\n        helpers: EventHelpers,\n        contexts: MutableContexts,\n        event: IntegratableEventMapping[K],\n      ) => Observable<Audion.GraphContext> | Audion.GraphContext | void;\n    };\n\nexport const getTimestampAsString = () => {\n  return '[' + performance.now().toFixed(2) + '] ';\n};\n\nconst EVENT_HANDLERS: Partial<EventHandlers> = {\n  [WebAudioDebuggerEvent.audioNodeCreated]: (\n    helpers,\n    contexts,\n    audioNodeCreated,\n  ) => {\n    const node = audioNodeCreated.node;\n    const {contextId, nodeId, nodeType} = node;\n    const space = contexts[contextId];\n    if (!space) {\n      return;\n    }\n\n    DLOG(`A new AudioNode has been created.`, {\n      contextId,\n      nodeId,\n    });\n\n    const context = space.graphContext;\n    context.eventCount += 1;\n\n    if (context.nodes[nodeId]) {\n      DLOG(`Duplicate WebAudio.audioNodeCreated event`, {\n        contextId,\n        nodeId,\n      });\n      return;\n    }\n\n    context.nodes[nodeId] = {\n      node,\n      params: [],\n      edges: [],\n    };\n    context.graph.setNode(nodeId, {\n      id: nodeId,\n      label: nodeType,\n      type: nodeType,\n      color: null,\n      width: 150,\n      height: 50,\n    });\n    return context;\n  },\n\n  [WebAudioDebuggerEvent.audioNodeWillBeDestroyed]: (\n    helpers,\n    contexts,\n    audioNodeDestroyed,\n  ) => {\n    const {contextId, nodeId} = audioNodeDestroyed;\n\n    DLOG(`An existing AudioNode has been destroyed.`, {\n      contextId,\n      nodeId,\n    });\n\n    const space = contexts[contextId];\n    if (!space) {\n      return;\n    }\n\n    const context = space.graphContext;\n    context.eventCount += 1;\n\n    context.graph.removeNode(nodeId);\n    const node = context.nodes[nodeId];\n    if (node && node.params) {\n      for (const audioParam of node.params) {\n        delete context.params[audioParam.paramId];\n      }\n    }\n    delete context.nodes[nodeId];\n    return context;\n  },\n\n  [WebAudioDebuggerEvent.audioParamCreated]: (\n    helpers,\n    contexts,\n    audioParamCreated,\n  ) => {\n    const {param} = audioParamCreated;\n    const {contextId, nodeId, paramId: paramIdCreated} = param;\n\n    DLOG(`A new AudioParam has been created.`, {\n      contextId,\n      nodeId,\n      paramIdCreated,\n    });\n\n    const space = contexts[contextId];\n    if (!space) {\n      return;\n    }\n\n    const context = space.graphContext;\n    context.eventCount += 1;\n\n    const node = context.nodes[nodeId];\n    if (!node) {\n      return;\n    }\n\n    if (node.params.some(({paramId}) => paramId === paramIdCreated)) {\n      DLOG(`Duplicate WebAudio.audioParamCreated event`, {\n        contextId,\n        nodeId,\n        paramIdCreated,\n      });\n      return;\n    }\n\n    node.params.push(param);\n    context.params[paramIdCreated] = param;\n    return context;\n  },\n\n  [WebAudioDebuggerEvent.audioParamWillBeDestroyed]: (\n    helpers,\n    contexts,\n    audioParamWillBeDestroyed,\n  ) => {\n    const {\n      contextId,\n      nodeId,\n      paramId: paramIdCreated,\n    } = audioParamWillBeDestroyed;\n\n    DLOG(`An existing AudioParam has been destroyed.`, {\n      contextId,\n      nodeId,\n      paramIdCreated,\n    });\n\n    const space = contexts[contextId];\n    if (!space) {\n      return;\n    }\n\n    const context = space.graphContext;\n    context.eventCount += 1;\n\n    const node = context.nodes[nodeId];\n    if (node && node.params) {\n      removeAll(node.params, ({paramId}) => paramId === paramIdCreated);\n    }\n    delete context.params[paramIdCreated];\n    return context;\n  },\n\n  [WebAudioDebuggerEvent.contextChanged]: (\n    helpers,\n    contexts,\n    contextChanged,\n  ) => {\n    const {contextId} = contextChanged.context;\n    const space = contexts[contextId];\n    if (!space) {\n      DLOG(\n        `Unexpected WebAudio.contextChanged event.` +\n          `Did not receive an event when Audio Context was created`,\n        {\n          contextId,\n        },\n      );\n      return;\n    }\n\n    DLOG(\n      `Some properties in BaseAudioContext have changed.` +\n        `properties (id stays the same)`,\n      {\n        contextId,\n      },\n    );\n\n    space.graphContext.context = contextChanged.context;\n    space.graphContext.eventCount += 1;\n    return space.graphContext;\n  },\n\n  [WebAudioDebuggerEvent.audioListenerCreated]: (\n    helpers,\n    contexts,\n    contextChanged,\n  ) => {\n    const {contextId} = contextChanged.listener;\n\n    DLOG(`An AudioListener has been created.`, {\n      contextId,\n    });\n    return;\n  },\n\n  [WebAudioDebuggerEvent.audioListenerWillBeDestroyed]: (\n    helpers,\n    contexts,\n    contextChanged,\n  ) => {\n    const {contextId} = contextChanged;\n\n    DLOG(`An AudioListener will be destroyed.`, {\n      contextId,\n    });\n    return;\n  },\n\n  [WebAudioDebuggerEvent.contextCreated]: (\n    helpers,\n    contexts,\n    contextCreated,\n  ) => {\n    const {contextId, contextType} = contextCreated.context;\n\n    if (contexts[contextId]) {\n      // Duplicate or out of order context created event.\n      console.warn(\n        getTimestampAsString() +\n          `Duplicate ${WebAudioDebuggerEvent.contextCreated} event.`,\n        contextCreated,\n      );\n      return;\n    } else {\n      console.debug(\n        getTimestampAsString() +\n          `Audio Context (${contextId.slice(-6)}-${contextType}) created.` +\n          `Adding the context to the tracked set.`,\n      );\n    }\n\n    const graph = new dagre.graphlib.Graph({multigraph: true});\n    graph.setGraph({});\n    graph.setDefaultEdgeLabel(() => {\n      return {};\n    });\n\n    // Request realtime data for realtime and offline contexts. We use this\n    // information to help confirm the existence of this new context. Events\n    // that normally mark when contexts are destroyed may not arrive and so we\n    // need this extra way to determine when the contexts no longer exist.\n    const realtimeData$ = helpers.realtimeData.pollContext(contextId);\n    const graphContextDestroyed$ =\n      new Subject<GraphContextDestroyReasonMessage>();\n\n    const realtimeDataGraphContext$ = realtimeData$.pipe(\n      map((realtimeData) => {\n        const space = contexts[contextId];\n        if (space) {\n          space.graphContext = {\n            ...space.graphContext,\n            realtimeData,\n          };\n          return space.graphContext;\n        }\n      }),\n      filter((context): context is Audion.GraphContext => Boolean(context)),\n      catchError((reason, caught) => {\n        reason = WebAudioRealtimeDataReason.parseReason(reason);\n\n        if (WebAudioRealtimeDataReason.isCannotFindReason(reason)) {\n          const space = contexts[contextId];\n          space?.graphContextDestroyed$?.next(\n            GraphContextDestroyReasonMessage.CANNOT_FIND_REALTIME_DATA,\n          );\n\n          if (!space) {\n            DLOG(\n              `Error requesting realtime data for context,` +\n                `Context was likely cleaned up during requests for real time data.`,\n              {\n                reason,\n                contextId,\n              },\n            );\n          }\n\n          return EMPTY;\n        } else if (WebAudioRealtimeDataReason.isRealtimeOnlyReason(reason)) {\n          // Non-realtime/offline contexts do not have realtime data and will\n          // produce this error when that data is requested.\n        } else {\n          console.error(\n            getTimestampAsString() +\n              `Unexpected error requesting realtime data for context '${contextId}'.` +\n              `\"${WebAudioRealtimeDataReason.toString(reason)}\"`,\n          );\n        }\n        // Redirect back to the caught observable. We want to keep receiving\n        // realtime data values or errors until we receive CANNOT_FIND error.\n        return caught;\n      }),\n\n      takeUntil(graphContextDestroyed$),\n    );\n\n    contexts[contextId] = {\n      graphContext: {\n        id: contextId,\n        eventCount: 1,\n        context: contextCreated.context,\n        realtimeData: INITIAL_CONTEXT_REALTIME_DATA,\n        nodes: {},\n        params: {},\n        // TODO: dagre's graphlib typings are inaccurate, which is why we use\n        // graphlib's types. Revert to dagre's types once the issue is fixed:\n        // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/47439\n        graph: graph as unknown as graphlib.Graph,\n      },\n      graphContextDestroyed$,\n      realtimeDataGraphContext$,\n    };\n\n    return merge(\n      of(contexts[contextId].graphContext),\n      graphContextDestroyed$.pipe(\n        share(),\n        take(1),\n        mergeMap((message) => {\n          if (\n            message ===\n            GraphContextDestroyReasonMessage.CANNOT_FIND_REALTIME_DATA\n          ) {\n            DLOG(\n              `Audio Context cannot be found. ` +\n                `Removing the context from the tracked set.`,\n              {\n                contextId,\n              },\n            );\n          } else if (\n            message ===\n            GraphContextDestroyReasonMessage.RECEIVE_WILL_DESTROY_EVENT\n          ) {\n            DLOG(\n              `Audio Context will be destroyed.` +\n                `Removing the context from the tracked set.`,\n              {\n                contextId,\n              },\n            );\n          }\n\n          const space = contexts[contextId];\n          if (space) {\n            delete contexts[contextId];\n            return of({\n              id: contextId,\n              eventCount: space.graphContext?.eventCount + 1,\n              context: null,\n              realtimeData: null,\n              nodes: null,\n              params: null,\n              graph: null,\n            });\n          } else {\n            DLOG(\n              `Audio Context could not be removed from the tracked set.` +\n                `It was not tracked.`,\n              {\n                contextId,\n              },\n            );\n          }\n          return EMPTY;\n        }),\n      ),\n      contexts[contextId].realtimeDataGraphContext$,\n    );\n  },\n\n  [WebAudioDebuggerEvent.contextWillBeDestroyed]: (\n    helpers,\n    contexts,\n    contextDestroyed,\n  ) => {\n    const {contextId} = contextDestroyed;\n    const space = contexts[contextId];\n    space?.graphContextDestroyed$?.next(\n      GraphContextDestroyReasonMessage.RECEIVE_WILL_DESTROY_EVENT,\n    );\n\n    DLOG(`A BaseAudioContext will be destroyed.`, {\n      contextId,\n    });\n  },\n\n  [WebAudioDebuggerEvent.nodeParamConnected]: (\n    helpers,\n    contexts,\n    nodeParamConnected,\n  ) => {\n    const {\n      contextId,\n      sourceId: sourceNodeId,\n      sourceOutputIndex = 0,\n      destinationId: destinationParamId,\n    } = nodeParamConnected;\n\n    DLOG(`An AudioNode is connected to an AudioParam.`, {\n      contextId,\n      sourceNodeId,\n      destinationParamId,\n    });\n\n    const space = contexts[contextId];\n    if (!space) {\n      return;\n    }\n\n    const context = space.graphContext;\n    context.eventCount += 1;\n\n    const sourceNode = context.nodes[sourceNodeId];\n    if (!sourceNode) {\n      return;\n    }\n    const destinationParam = context.params[destinationParamId];\n    if (!destinationParam) {\n      return;\n    }\n    const destinationNodeId = destinationParam.nodeId;\n    const destinationNode = context.nodes[destinationNodeId];\n    if (!destinationNode) {\n      return;\n    }\n\n    sourceNode.edges.push(nodeParamConnected);\n    context.graph.setEdge(\n      sourceNodeId,\n      destinationNodeId,\n      {\n        sourceOutputIndex,\n        destinationType: Audion.GraphEdgeType.PARAM,\n        destinationParamId,\n        destinationParamIndex: destinationNode.params.findIndex(\n          ({paramId}) => paramId === destinationParamId,\n        ),\n      } as Audion.GraphEdge,\n      sourceOutputIndex.toString(),\n    );\n    return context;\n  },\n\n  [WebAudioDebuggerEvent.nodeParamDisconnected]: (\n    helpers,\n    contexts,\n    nodesDisconnected,\n  ) => {\n    const {\n      contextId,\n      sourceId: sourceNodeId,\n      sourceOutputIndex = 0,\n      destinationId: destinationParamId,\n    } = nodesDisconnected;\n\n    DLOG(`An AudioNode is disconnected to an AudioParam.`, {\n      contextId,\n      sourceNodeId,\n      destinationParamId,\n    });\n\n    const space = contexts[contextId];\n    if (!space) {\n      return;\n    }\n\n    const context = space.graphContext;\n    context.eventCount += 1;\n\n    const sourceNode = context.nodes[sourceNodeId];\n    if (!sourceNode) {\n      return;\n    }\n\n    const {edges} = sourceNode;\n    removeAll(\n      edges,\n      (edge) =>\n        edge.destinationId === destinationParamId &&\n        edge.sourceOutputIndex === sourceOutputIndex,\n    );\n    context.graph.removeEdge(\n      sourceNodeId,\n      destinationParamId,\n      sourceOutputIndex.toString(),\n    );\n    return context;\n  },\n\n  [WebAudioDebuggerEvent.nodesConnected]: (\n    helpers,\n    contexts,\n    nodesConnected,\n  ) => {\n    const {\n      contextId,\n      sourceId,\n      sourceOutputIndex = 0,\n      destinationId,\n      destinationInputIndex = 0,\n    } = nodesConnected;\n\n    DLOG(`Two AudioNodes are connected.`, {\n      contextId,\n      sourceId,\n      destinationId,\n    });\n\n    const space = contexts[contextId];\n    if (!space) {\n      return;\n    }\n\n    const context = space.graphContext;\n    context.eventCount += 1;\n\n    const sourceNode = context.nodes[sourceId];\n    if (!sourceNode) {\n      return;\n    }\n    const destinationNode = context.nodes[destinationId];\n    if (!destinationNode) {\n      return;\n    }\n\n    sourceNode.edges.push(nodesConnected);\n    context.graph.setEdge(\n      sourceId,\n      destinationId,\n      {\n        sourceOutputIndex,\n        destinationType: Audion.GraphEdgeType.NODE,\n        destinationInputIndex,\n      } as Audion.GraphNodeEdge,\n      `${sourceOutputIndex},${destinationInputIndex}`,\n    );\n    return context;\n  },\n\n  [WebAudioDebuggerEvent.nodesDisconnected]: (\n    helpers,\n    contexts,\n    nodesDisconnected,\n  ) => {\n    const {\n      contextId,\n      sourceId,\n      sourceOutputIndex = 0,\n      destinationId,\n      destinationInputIndex = 0,\n    } = nodesDisconnected;\n\n    DLOG(\n      `Notifies AudioNodes is disconnected. The destination` +\n        `can be null, and it means all the outgoing connections` +\n        `from the source are disconnected.`,\n      {contextId, sourceId, destinationId},\n    );\n\n    const space = contexts[contextId];\n    if (!space) {\n      return;\n    }\n\n    const context = space.graphContext;\n    context.eventCount += 1;\n\n    const sourceNode = context.nodes[sourceId];\n    if (!sourceNode) {\n      return;\n    }\n\n    const {edges} = sourceNode;\n    removeAll(\n      edges,\n      (edge) =>\n        edge.destinationId === destinationId &&\n        edge.sourceOutputIndex === sourceOutputIndex &&\n        edge.destinationInputIndex === destinationInputIndex,\n    );\n    context.graph.removeEdge(\n      sourceId,\n      destinationId,\n      `${sourceOutputIndex},${destinationInputIndex}`,\n    );\n    return context;\n  },\n\n  [PageDebuggerEvent.frameNavigated]: (helpers, contexts) => {\n    console.debug(\n      getTimestampAsString() +\n        `Checking if tracked Audio Contexts (${Object.keys(contexts)\n          .map((contextId) => contextId.slice(-6))\n          .join(`, `)}) exist after frame navigated.`,\n    );\n\n    return ensureContextsExist(contexts, helpers);\n  },\n\n  [PageDebuggerEvent.loadEventFired]: (helpers, contexts) => {\n    console.debug(\n      getTimestampAsString() +\n        `Checking if tracked Audio Contexts (${Object.keys(contexts)\n          .map((contextId) => contextId.slice(-6))\n          .join(`, `)}) exist after load event.`,\n    );\n\n    return ensureContextsExist(contexts, helpers);\n  },\n\n  [ChromeDebuggerAPIEventName.detached]: (\n    helpers,\n    contexts,\n    debuggerDetached,\n  ) => {\n    if (debuggerDetached.reason === `target_closed`) {\n      console.debug(\n        getTimestampAsString() +\n          `Checking if tracked Audio Contexts (${Object.keys(contexts)\n            .map((contextId) => contextId.slice(-6))\n            .join(\n              `, `,\n            )}) exist after debugger detached because target was closed.`,\n      );\n\n      return ensureContextsExist(contexts, helpers);\n    }\n  },\n};\n\nfunction ensureContextsExist(\n  contexts: MutableContexts,\n  helpers: EventHelpers,\n): void | Audion.GraphContext | Observable<Audion.GraphContext> {\n  return merge(\n    ...Object.keys(contexts).map((contextId) =>\n      helpers.realtimeData.pollContext(contextId).pipe(\n        take(1),\n        ignoreElements(),\n        catchError((reason) => {\n          reason = WebAudioRealtimeDataReason.parseReason(reason);\n\n          if (WebAudioRealtimeDataReason.isCannotFindReason(reason)) {\n            const space = contexts[contextId];\n            if (space) {\n              space?.graphContextDestroyed$?.next(\n                GraphContextDestroyReasonMessage.CANNOT_FIND_REALTIME_DATA,\n              );\n            }\n          } else if (WebAudioRealtimeDataReason.isRealtimeOnlyReason(reason)) {\n            // OfflineAudioContexts emit this error if they are still alive.\n          } else {\n            console.error(\n              getTimestampAsString() +\n                `Unexpected error determining if context \"${contextId}\" is ` +\n                `stale with devtools protocol WebAudio.getRealtimeData.` +\n                `\"${WebAudioRealtimeDataReason.toString(reason)}\"`,\n            );\n          }\n\n          return EMPTY;\n        }),\n      ),\n    ),\n  );\n}\n\nfunction removeAll<T>(array: T[], fn: (value: T) => boolean) {\n  if (array) {\n    let index = array.findIndex(fn);\n    while (index >= 0) {\n      array.splice(index, 1);\n      index = array.findIndex(fn);\n    }\n  }\n}\n\n/**\n * Collect WebAudio debugger events into per context graphs.\n */\nexport function integrateWebAudioGraph(\n  webAudioRealtimeData: WebAudioRealtimeData,\n): OperatorFunction<IntegratableEvent, Audion.GraphContext> {\n  const helpers = {realtimeData: webAudioRealtimeData};\n  const contexts: MutableContexts = {};\n  return pipe(\n    mergeMap(({method, params}) => {\n      if (EVENT_HANDLERS[method]) {\n        const result = EVENT_HANDLERS[method]?.(\n          helpers,\n          contexts,\n          params as any,\n        );\n        if (typeof result !== 'object' || result === null) return EMPTY;\n        if (isObservable(result)) {\n          return result;\n        }\n        return of(result);\n      }\n      return EMPTY;\n    }),\n  );\n}\n"
  },
  {
    "path": "src/devtools/WebAudioRealtimeData.ts",
    "content": "import Protocol from 'devtools-protocol';\nimport {bindCallback, concatMap, interval, Observable} from 'rxjs';\nimport {map, timeout} from 'rxjs/operators';\n\nimport {invariant} from '../utils/error';\n\nimport {chrome} from '../chrome';\nimport {WebAudioDebuggerMethod} from '../chrome/DebuggerWebAudioDomain';\n\nimport {Audion} from './Types';\nimport {bindChromeCallback} from '../utils/rxChrome';\n\n/**\n * Error messages returned by WebAudio.getRealtimeData devtool protocol method.\n */\nexport enum RealtimeDataErrorMessage {\n  /** Error returned when a AudioContext cannot be find. */\n  CANNOT_FIND = 'Cannot find BaseAudioContext with such id.',\n  /** Error returned when realtime data is requested from an OfflineAudioContext. */\n  REALTIME_ONLY = 'ContextRealtimeData is only avaliable for an AudioContext.',\n}\n\ninterface RealtimeDataReason<Message extends RealtimeDataErrorMessage> {\n  message: Message;\n}\n\nconst {tabId} = chrome.devtools.inspectedWindow;\n\nconst sendCommand = bindChromeCallback<\n  [{tabId: string}, WebAudioDebuggerMethod.getRealtimeData, any?],\n  [{realtimeData: Protocol.WebAudio.ContextRealtimeData}]\n>(chrome.debugger.sendCommand, chrome.debugger);\n\nexport const INITIAL_CONTEXT_REALTIME_DATA = {\n  callbackIntervalMean: 0,\n  callbackIntervalVariance: 0,\n  currentTime: 0,\n  renderCapacity: 0,\n} as Audion.ContextRealtimeData;\n\nexport class WebAudioRealtimeData {\n  private readonly intervalMS = 1000;\n  private readonly timeoutMS = 500;\n\n  private readonly interval$ = interval(this.intervalMS);\n\n  pollContext(contextId: string) {\n    return this.interval$.pipe(\n      concatMap(() =>\n        sendCommand({tabId}, WebAudioDebuggerMethod.getRealtimeData, {\n          contextId,\n        }).pipe(\n          timeout({first: this.timeoutMS}),\n          map((result) => {\n            invariant(\n              result && result !== null,\n              'ContextRealtimeData not returned for WebAudio context %0.',\n              contextId,\n            );\n            return result.realtimeData;\n          }),\n        ),\n      ),\n    );\n  }\n}\n\nexport const WebAudioRealtimeDataReason = {\n  parseReason(reason: any) {\n    if (reason && reason.message && !reason.code) {\n      try {\n        reason = JSON.parse(reason.message);\n      } catch (e) {}\n    }\n    return reason;\n  },\n\n  toString(reason: any) {\n    return reason && reason.message ? reason.message : reason;\n  },\n\n  isRealtimeOnlyReason(\n    reason: any,\n  ): reason is RealtimeDataReason<RealtimeDataErrorMessage.REALTIME_ONLY> {\n    return reason && reason.message === RealtimeDataErrorMessage.REALTIME_ONLY;\n  },\n\n  isCannotFindReason(\n    reason: any,\n  ): reason is RealtimeDataReason<RealtimeDataErrorMessage.CANNOT_FIND> {\n    return reason && reason.message === RealtimeDataErrorMessage.CANNOT_FIND;\n  },\n};\n"
  },
  {
    "path": "src/devtools/deserializeGraphContext.ts",
    "content": "import * as graphlib from 'graphlib';\nimport {Audion} from './Types';\n\nexport interface SerializedGraphContext extends Audion.GraphContext {\n  graph: any;\n}\n\nexport function deserializeGraphContext(\n  graphContext: SerializedGraphContext,\n): Audion.GraphContext {\n  if (graphContext.graph) {\n    return {\n      ...graphContext,\n      // TODO: dagre's graphlib typings are inaccurate, which is why we use\n      // graphlib directly here. Revert to dagre's types once the issue is fixed:\n      // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/47439\n      graph: graphlib.json.read(graphContext.graph),\n    };\n  } else {\n    return graphContext;\n  }\n}\n"
  },
  {
    "path": "src/devtools/layoutGraphContext.ts",
    "content": "import * as dagre from 'dagre';\n\nimport {Audion} from './Types';\n\nexport function layoutGraphContext(\n  context: Audion.GraphContext,\n): Audion.GraphContext {\n  if (context.context && context.graph) {\n    // TODO: dagre's graphlib typings are inaccurate, which is why we use\n    // graphlib's types. Revert to dagre's types once the issue is fixed:\n    // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/47439\n    dagre.layout(context.graph as unknown as dagre.graphlib.Graph);\n  }\n  return context;\n}\n"
  },
  {
    "path": "src/devtools/main.ts",
    "content": "import {merge} from 'rxjs';\nimport {\n  map,\n  scan,\n  take,\n  shareReplay,\n  share,\n  mergeMap,\n  auditTime,\n} from 'rxjs/operators';\n\nimport {Audion} from './Types';\n\nimport {DebuggerAttachEventController} from './DebuggerAttachEventController';\nimport {DevtoolsGraphPanel} from './DevtoolsGraphPanel';\nimport {serializeGraphContext} from './serializeGraphContext';\nimport {integrateWebAudioGraph} from './WebAudioGraphIntegrator';\nimport {WebAudioRealtimeData} from './WebAudioRealtimeData';\nimport {partitionMap} from './partitionMap';\nimport {DebuggerEventsObservable} from './DebuggerEvents';\n\nconst attachController = new DebuggerAttachEventController();\n\nconst pageEvent$ = new DebuggerEventsObservable(attachController, {\n  domain: 'page',\n});\nconst webAudioEvents$ = new DebuggerEventsObservable(attachController, {\n  domain: 'webAudio',\n});\nconst webAudioRealtimeData = new WebAudioRealtimeData();\n\nconst serializedGraphContext$ = merge(\n  pageEvent$,\n  webAudioEvents$,\n  attachController.debuggerEvent$,\n).pipe(\n  integrateWebAudioGraph(webAudioRealtimeData),\n  // Split graph contexts into an observable for each unique graph context id.\n  partitionMap({\n    getPartitionId: ({id}) => id,\n    isPartitionComplete: ({context}) => context === null,\n  }),\n  // For each partition, start a timer on the first value in that partition but\n  // emit the last value during that timer when the timer completes.\n  map(auditTime(16)),\n  // Merge all the partitions together.\n  mergeMap((source) => source),\n  map(serializeGraphContext),\n  share(),\n);\n\nconst allGraphs$ = merge(serializedGraphContext$).pipe(\n  // Persistently observe web audio events and integrate events into context\n  // objects. Collect those into an object of all current graphs.\n  scan<Audion.GraphContext, {[key: string]: Audion.GraphContext}>(\n    (allGraphs, graphContext) => {\n      if (graphContext.graph) {\n        return {...allGraphs, [graphContext.id]: graphContext};\n      }\n      const {[graphContext.id]: _, ...otherGraphs} = allGraphs;\n      return otherGraphs;\n    },\n    {},\n  ),\n  shareReplay(),\n);\n\n// There must be at least one subscription to keep allGraphs$ up to date if\n// panel is connected or otherwise.\nallGraphs$.subscribe();\n\n// When the panel is opened it'll connect to the devtools page, immediately send\n// the current set of graphs.\nconst panel = new DevtoolsGraphPanel(\n  merge(\n    allGraphs$.pipe(\n      map((allGraphs) => ({allGraphs})),\n      take(1),\n    ),\n    serializedGraphContext$.pipe(map((graphContext) => ({graphContext}))),\n  ),\n);\n\n// When the panel is first shown, grant attachController permission to attach to\n// the debugger.\npanel.onPanelShown$.pipe(take(1)).subscribe({\n  next() {\n    attachController.permission$.grantTemporary();\n  },\n});\n\n// Respond to requests from the panel accordingly.\npanel.requests$.subscribe({\n  next(value) {\n    if (value.type === Audion.DevtoolsRequestType.COLLECT_GARBAGE) {\n      attachController.sendCommand('HeapProfiler.collectGarbage').subscribe();\n    }\n  },\n});\n"
  },
  {
    "path": "src/devtools/partitionMap.ts",
    "content": "import {Observable, OperatorFunction, Subject} from 'rxjs';\n\ninterface PartitionMapConfig<V> {\n  /** Callback that returns id string of partition to push to. */\n  getPartitionId: (value: V) => string;\n  /** Callback that determines if as of that value the partition is complete. */\n  isPartitionComplete: (value: V) => boolean;\n}\n\n/**\n * Split input observable's values into an observable of observables of those values.\n *\n * @param config when to create partition observables and complete them\n * @returns an observable that pushs an observable for each created partition\n */\nexport function partitionMap<V>({\n  getPartitionId,\n  isPartitionComplete,\n}: PartitionMapConfig<V>): OperatorFunction<V, Observable<V>> {\n  return (source: Observable<V>) => {\n    const partitions = {} as {[key: string]: Subject<V>};\n    return new Observable<Observable<V>>((subscriber) => {\n      return source.subscribe({\n        next(graphChange) {\n          const key = getPartitionId(graphChange);\n          const isComplete = isPartitionComplete(graphChange);\n\n          // If the key is not in the partition cache, add a new one for that\n          // key and push it.\n          if (!(key in partitions)) {\n            partitions[key] = new Subject<V>();\n            subscriber.next(partitions[key]);\n          }\n\n          // Push the value through the selected partition.\n          partitions[key].next(graphChange);\n\n          // When completeSelector returns true, complete the partition and\n          // delete it from the cache.\n          if (isComplete) {\n            partitions[key].complete();\n            delete partitions[key];\n          }\n        },\n\n        // When source completes, all partitions complete.\n        complete: () => subscriber.complete(),\n\n        // When source errors, all partitions error.\n        error: (reason) => subscriber.error(reason),\n      });\n    });\n  };\n}\n"
  },
  {
    "path": "src/devtools/serializeGraphContext.js",
    "content": "import dagre from 'dagre';\n\n/**\n * @param {Audion.GraphContext} graphContext\n * @return {Audion.GraphContext}\n */\nexport function serializeGraphContext(graphContext) {\n  if (graphContext.graph) {\n    return {\n      ...graphContext,\n      graph: dagre.graphlib.json.write(graphContext.graph),\n    };\n  }\n  return graphContext;\n}\n"
  },
  {
    "path": "src/devtools/setOptionsToGraphContext.ts",
    "content": "import * as dagre from 'dagre';\n\nimport {Audion} from './Types';\n\nexport function setOptionsToGraphContext([context, layoutOptions]: [\n  Audion.GraphContext,\n  dagre.GraphLabel,\n]): Audion.GraphContext {\n  if (context.context && context.graph) {\n    context.graph.setGraph(layoutOptions);\n  }\n  return context;\n}\n"
  },
  {
    "path": "src/devtools.html",
    "content": "<html>\n  <head>\n    <title>DevTools: Audion Extension</title>\n  </head>\n  <body>\n    <script src=\"audion-devtools.js\"></script>\n  </body>\n</html>\n"
  },
  {
    "path": "src/extraSettingPage/options.html",
    "content": "<!DOCTYPE html>\n<html>\n<head>\n    <title>Audion Addition Setting Options</title>\n</head>\n<body>\n    <label>\n        Click here to show more debug info:\n        <input type=\"checkbox\" id=\"showDebugInfo\">\n    </label>\n    <script src=\"options.js\"></script>\n</body>\n</html>"
  },
  {
    "path": "src/extraSettingPage/options.js",
    "content": "// prettier-ignore\n/**\n * Initializes the options page by setting up event listeners and\n * restoring saved options.\n */\ndocument.addEventListener(\n  'DOMContentLoaded',\n  /**\n   * Handles the DOMContentLoaded event to set up the options page.\n   */\n  function() {\n    /**\n     * Function to save the options in storage\n     */\n    function saveOptions() {\n      const checkboxValue = document.getElementById('showDebugInfo').checked;\n      localStorage.setItem('showExtraDebugLog', checkboxValue);\n    }\n\n    /**\n     * Function to restore the options from storage\n     */\n    function restoreOptions() {\n      document.getElementById('showDebugInfo').checked =\n          localStorage.getItem('showExtraDebugLog') === 'true';\n    }\n\n    /**\n     * Event listeners\n     */\n    document\n      .getElementById('showDebugInfo')\n      .addEventListener('change', saveOptions);\n    restoreOptions();\n  },\n);\n"
  },
  {
    "path": "src/panel/GraphSelector.ts",
    "content": "import {Observable, combineLatest, BehaviorSubject} from 'rxjs';\nimport {map, shareReplay, distinctUntilChanged} from 'rxjs/operators';\n\nimport {Audion} from '../devtools/Types';\n\ntype GraphMap = {[key: string]: Audion.GraphContext};\n\ntype GraphMapRX = Observable<GraphMap>;\n\nconst EMPTY_GRAPH = {\n  graph: {value: {width: 0, height: 0}, nodes: [], edges: []},\n} as Audion.GraphContext;\n\n/**\n * Control which graph is observed.\n */\nexport class GraphSelector {\n  options$: Observable<string[]>;\n  graphId$: Observable<string>;\n  graph$: Observable<Audion.GraphContext>;\n\n  private _graphIdSubject: BehaviorSubject<string>;\n\n  get graphId(): string {\n    return this._graphIdSubject.value;\n  }\n\n  /**\n   * Create a GraphSelector.\n   * @param options\n   */\n  constructor({allGraphs$: allGraphs$}: {allGraphs$: GraphMapRX}) {\n    this.options$ = allGraphs$.pipe(\n      map((allGraphs) =>\n        Object.entries(allGraphs)\n          .filter(([key, graphContext]) => graphContext)\n          .map(([key]) => key),\n      ),\n    );\n\n    const graphIdSubject = new BehaviorSubject('');\n    this._graphIdSubject = graphIdSubject;\n    this.graphId$ = graphIdSubject;\n\n    const props$ = combineLatest({\n      id: this.graphId$,\n      allGraphs: allGraphs$,\n    });\n\n    this.graph$ = props$.pipe(\n      map(({id, allGraphs}) => allGraphs[id] || EMPTY_GRAPH),\n      distinctUntilChanged(),\n      shareReplay(1),\n    );\n  }\n\n  /**\n   * Select the graph to observe.\n   * @param graphId\n   */\n  select(graphId: string) {\n    if (graphId !== this.graphId) {\n      this._graphIdSubject.next(graphId);\n    }\n  }\n}\n"
  },
  {
    "path": "src/panel/Observer.runtime.ts",
    "content": "import {Observable} from 'rxjs';\nimport {share} from 'rxjs/operators';\n\nimport {chrome} from '../chrome';\n\n/**\n * Connect to chrome runtime through an observable.\n * @param requests$ observable of requests to send to devtools extension context\n * @returns observable of messages recevied from devtools extension context\n */\nexport function connect<S, T>(requests$: Observable<S>): Observable<T> {\n  return new Observable<T>((subscriber) => {\n    const port = chrome.runtime.connect();\n\n    // Send values pushed by requests$ to devtools context.\n    const subjectSubscription = requests$.subscribe({\n      next(value) {\n        port.postMessage(value);\n      },\n    });\n\n    // Publish messages from devtools context through returned observable.\n    const onMessage: (arg0: any, arg1: Chrome.RuntimePort) => void = (\n      message,\n    ) => {\n      subscriber.next(message);\n    };\n    const onDisconnect = () =>\n      subscriber.error(new Error('chrome.runtime disconnected'));\n\n    port.onMessage.addListener(onMessage);\n    port.onDisconnect.addListener(onDisconnect);\n\n    return () => {\n      subjectSubscription.unsubscribe();\n\n      port.onMessage.removeListener(onMessage);\n      port.onDisconnect.removeListener(onDisconnect);\n      port.disconnect();\n    };\n  }).pipe(share());\n}\n"
  },
  {
    "path": "src/panel/Types.ts",
    "content": "import * as PIXI from 'pixi.js';\n\n/** @namespace AudionPanel */\n\n/**\n * @typedef AudionPanel.Point\n * @property {number} x\n * @property {number} y\n */\n\n/**\n * @typedef AudionPanel.Node\n * @property {AudionPanel.Point} position\n * @property {AudionPanel.Point} size\n */\n\n/**\n * @typedef AudionPanel.Port\n * @property {AudionPanel.Node} node\n * @property {AudionPanel.Point} offset\n * @property {number} radius\n * @property {Array} edges\n */\n\nexport namespace AudionPanel {\n  export interface Point {\n    x: number;\n    y: number;\n  }\n\n  export interface Node {\n    position: Point;\n    size: Point;\n\n    updatePortDisplay(portType: PortType, portIndex: number): void;\n  }\n\n  export enum PortType {\n    INPUT = 'input',\n    OUTPUT = 'output',\n    PARAM = 'param',\n  }\n\n  export interface Port {\n    node: Node;\n    offset: Point;\n    radius: number;\n    edges: any[];\n\n    updateNodeDisplay(): void;\n    drawConnect(graphics: PIXI.Graphics): void;\n  }\n}\n"
  },
  {
    "path": "src/panel/components/WholeGraphButton.css",
    "content": ".wholeGraphButton {\n  position: absolute;\n  top: 5px;\n  left: 5px;\n  cursor: pointer;\n  opacity: 0.8;\n  border-radius: 3px;\n  width: 20px;\n  height: 20px;\n}\n"
  },
  {
    "path": "src/panel/components/WholeGraphButton.ts",
    "content": "import {fromEvent} from 'rxjs';\n\nimport style from './WholeGraphButton.css';\nimport wholeGraphButtonImage from './WholeGraphButton.svg';\n\n/**\n * Render a button. Can be observed for when the button is clicked.\n */\nexport class WholeGraphButton {\n  private readonly view = document.createElement('div');\n\n  readonly click$ = fromEvent(this.view, 'click');\n\n  /** Create a WholeGraphButton. */\n  constructor() {\n    this.view.className = style.wholeGraphButton;\n    this.view.innerHTML = `<img src=\"${wholeGraphButtonImage}\"\n      alt=\"Resize to fit\"\n      title=\"Resize to fit\" />`;\n  }\n\n  /** Render the button. */\n  render() {\n    return this.view;\n  }\n}\n"
  },
  {
    "path": "src/panel/components/collectGarbage.css",
    "content": ":global(.-theme-with-dark-background) .collectIcon {\n  --override-icon-mask-background-color: rgb(145 145 145);\n}\n.collectIcon {\n  display: inline-block;\n  -webkit-mask: url('./collectGarbage.svg') no-repeat center;\n  mask: url('./collectGarbage.svg') no-repeat center;\n  width: 28px;\n  height: 24px;\n  background-color: var(--override-icon-mask-background-color);\n  --override-icon-mask-background-color: rgb(110 110 110);\n}\n:global(.toolbar-button):hover .collectIcon {\n  background-color: var(--color-text-primary);\n}\n"
  },
  {
    "path": "src/panel/components/collectGarbage.ts",
    "content": "import {fromEvent, merge, NEVER, Observable} from 'rxjs';\nimport {map, startWith, switchMap} from 'rxjs/operators';\n\nimport {Audion} from '../../devtools/Types';\n\nimport {setElementHTML} from './domUtils';\nimport style from './collectGarbage.css';\n\n/**\n * @returns html representation of the collect garbage icon\n */\nfunction collectGarbageImageHTML(): string {\n  return `<span class=\"${style.collectIcon}\"></span>`;\n}\n\n/**\n * @param buttonElement$ observable of html elements to listen to events and\n * render a icon in\n * @returns observable of elements when they are modified or actions to be acted\n * on by the extension's devtools context\n */\nexport function renderCollectGarbage(\n  buttonElement$: Observable<HTMLElement>,\n): Observable<HTMLElement | Audion.DevtoolsCollectGarbageRequest> {\n  // Map clicks to actions to request devtools to collect garbage.\n  const collectGarbageAction$ = buttonElement$.pipe(\n    switchMap((element) => fromEvent(element, 'click')),\n    map(\n      () => ({type: 'collectGarbage'} as Audion.DevtoolsCollectGarbageRequest),\n    ),\n  );\n\n  // Observable that pushs the button icon once and never completes. If the\n  // observable completes, setElementHTML will clean up and remove the html.\n  const collectGarbageIcon$ = NEVER.pipe(startWith(collectGarbageImageHTML()));\n\n  return merge(\n    setElementHTML(buttonElement$, collectGarbageIcon$),\n    collectGarbageAction$,\n  );\n}\n"
  },
  {
    "path": "src/panel/components/detailPanel.css",
    "content": ".detailPanel > * {\n  padding: 0 1rem;\n}\n.detailPanel h1,\n.detailPanel h2,\n.detailPanel h3,\n.detailPanel h4,\n.detailPanel h5,\n.detailPanel h6 {\n  font-weight: normal;\n}\n.detailPanel table {\n  font-size: 12px;\n}\n.detailPanel th {\n  color: var(--color-text-secondary);\n  font-weight: normal;\n  text-align: left;\n}\n.detailPanel th,\n.detailPanel td {\n  padding: 0.2rem;\n}\n"
  },
  {
    "path": "src/panel/components/detailPanel.ts",
    "content": "import {merge, NEVER, Observable} from 'rxjs';\nimport {distinctUntilChanged, map, startWith, switchMap} from 'rxjs/operators';\n\nimport {Audion} from '../../devtools/Types';\nimport {setElementHTML, toggleElementClassList} from './domUtils';\nimport style from './detailPanel.css';\n\nconst contextTypeNameMap = {\n  realtime: 'AudioContext',\n  offline: 'OfflineAudioContext',\n};\n\n/**\n * @param context web audio context's context information\n * @returns html representation of context information\n */\nfunction graphContextHTML({\n  contextType,\n  contextId,\n  contextState,\n  sampleRate,\n  callbackBufferSize,\n  maxOutputChannelCount,\n}: Audion.GraphContext['context']): string {\n  return `<h2>${contextTypeNameMap[contextType] || contextType}</h2>\n<p>${contextId.slice(-6)}</p>\n<hr>\n<table cellspacing=\"0\" cellpadding=\"0\">\n<tr><th>State</th><td>${contextState}</td></tr>\n<tr><th>Sample Rate</th><td>${sampleRate}</td></tr>\n<tr><th>Callback Buffer Size</th><td>${callbackBufferSize}</td></tr>\n<tr><th>Max Output Channels</th><td>${maxOutputChannelCount}</td></tr>\n</table>\n`;\n}\n\n/**\n * @param node web audio node's node information\n * @returns html representation of web audio node information\n */\nfunction graphNodeBaseHTML({\n  nodeType,\n  nodeId,\n  channelCount,\n  channelCountMode,\n  channelInterpretation,\n  numberOfInputs,\n  numberOfOutputs,\n}: Audion.GraphNode['node']): string {\n  return `<h2>${nodeType}</h2>\n<p>${nodeId}</p>\n<hr>\n<table cellspacing=\"0\" cellpadding=\"0\">\n<tr><th>Channel Count</th><td>${channelCount}</td></tr>\n<tr><th>Channel Count Mode</th><td>${channelCountMode}</td></tr>\n<tr><th>Channel Interpretation</th><td>${channelInterpretation}</td></tr>\n<tr><th>Number of Inputs</th><td>${numberOfInputs}</td></tr>\n<tr><th>Number of Outputs</th><td>${numberOfOutputs}</td></tr>\n</table>\n`;\n}\n\n/**\n * @param param web audio node's single parameter information\n * @returns html representation of parameter information\n */\nfunction graphParamHTML({\n  paramType,\n  paramId,\n  rate,\n  defaultValue,\n  minValue,\n  maxValue,\n}: Audion.GraphNode['params'][number]): string {\n  return `<h4>${paramType}</h4>\n<p>${paramId}</p>\n<hr>\n<table cellspacing=\"0\" cellpadding=\"0\">\n<tr><th>Automation Rate</th><td>${rate}</td></tr>\n<tr><th>Default Value</th><td>${defaultValue}</td></tr>\n<tr><th>Minimum Value</th><td>${minValue}</td></tr>\n<tr><th>Maximum Value</th><td>${maxValue}</td></tr>\n</table>\n`;\n}\n\n/**\n * @param node web audio node\n * @returns html representation of a node's node and parameters information\n */\nfunction graphNodeHTML({node, params}: Audion.GraphNode): string {\n  return `${graphNodeBaseHTML(node)}\n${\n  params.length\n    ? `<h3>Parameters:</h3>\n${params.map(graphParamHTML).join('')}`\n    : ''\n}\n`;\n}\n\n/**\n * @param element$ observable of html element to render detail panel into\n * @param contextData$ observable of context data to render\n * @param nodeData$ observable of node data to render\n * @returns observable of html elements as they are modified\n */\nexport function renderDetailPanel(\n  element$: Observable<HTMLElement>,\n  contextData$: Observable<Audion.GraphContext>,\n  nodeData$: Observable<Audion.GraphNode>,\n): Observable<HTMLElement> {\n  return merge(\n    toggleElementClassList(\n      element$,\n      NEVER.pipe(startWith([style.detailPanel])),\n    ),\n    setElementHTML(\n      element$,\n      contextData$.pipe(\n        distinctUntilChanged((previous, current) =>\n          previous && previous.context && current && current.context\n            ? previous.context.contextId === current.context.contextId\n            : false,\n        ),\n        switchMap((graphContext) =>\n          nodeData$.pipe(\n            distinctUntilChanged((previous, current) =>\n              previous && previous.node && current && current.node\n                ? previous.node.nodeId === current.node.nodeId\n                : false,\n            ),\n            map((graphNode) =>\n              graphNode && graphNode.node\n                ? graphNodeHTML(graphNode)\n                : graphContext && graphContext.context\n                ? graphContextHTML(graphContext.context)\n                : '(no recordings)',\n            ),\n          ),\n        ),\n      ),\n    ),\n  );\n}\n"
  },
  {
    "path": "src/panel/components/domUtils.ts",
    "content": "import {defer, Observable, of} from 'rxjs';\nimport {finalize, map, scan, switchMap} from 'rxjs/operators';\n\n/**\n * Create a factory that modifies the most latest element from an observable of elements to value from an observable of other values.\n * @param property html element property\n * @returns factory that modifies a latest element with the latest data\n */\nexport function setElementProperty<\n  E extends HTMLElement,\n  K extends keyof E,\n  T extends E[K],\n>(property: K) {\n  return function (element$: Observable<E>, data$: Observable<T>) {\n    return element$.pipe(\n      switchMap((view) =>\n        data$.pipe(\n          map((value) => {\n            if (view) {\n              view[property] = value;\n            }\n            return view;\n          }),\n          finalize(() => {\n            if (view) {\n              view[property] = null;\n            }\n          }),\n        ),\n      ),\n    );\n  };\n}\n\n/**\n * Set that values can be added to and removed from.\n */\ninterface PropertySet<T> {\n  add(value: T): any;\n  remove(value: T): any;\n}\n\n/**\n * Description of a change to a PropertySet.\n */\ninterface PropertySetChange {\n  /** Items to remove from the PropertySet. */\n  deleteItems: string[];\n  /** Items to add to the PropertySet. */\n  addItems: string[];\n  /** All items to remove if the element changes or finalizes. */\n  allItems: string[];\n}\n\n/**\n * Create a factory that adds and removes the items contained in a observable of\n * array values to the latest element.\n * @param property html element property\n * @returns factory that adds and removes items on an elements property\n */\nexport function toggleElementPropertySet<\n  E extends HTMLElement,\n  K extends {\n    [key in keyof E]: E[key] extends PropertySet<string> ? key : never;\n  }[any],\n  T extends string[],\n>(property: K) {\n  return function (element$: Observable<E>, data$: Observable<T>) {\n    const valueDiff$ = data$.pipe(\n      scan(\n        ([previous], current) => {\n          const allItems = current;\n          const deleteItems = previous.filter(\n            (value) => !current.includes(value),\n          );\n          const addItems = allItems.filter(\n            (value) => !previous.includes(value),\n          );\n\n          return [current, {deleteItems, addItems, allItems}] as [\n            T,\n            PropertySetChange,\n          ];\n        },\n        [[], {deleteItems: [], addItems: []}] as [T, PropertySetChange],\n      ),\n      map(([, change]) => change),\n    );\n    return element$.pipe(\n      switchMap((view) =>\n        valueDiff$.pipe(\n          map((diff) => {\n            if (view) {\n              for (const value of diff.deleteItems) {\n                (view[property] as PropertySet<string>).remove(value);\n              }\n              for (const value of diff.addItems) {\n                (view[property] as PropertySet<string>).add(value);\n              }\n            }\n            return view;\n          }),\n          finalize(() => {}),\n        ),\n      ),\n    );\n  };\n}\n\n/**\n * Change to a html element property's map structure.\n */\ninterface PropertyMapChange {\n  /** Keys to remove from the property's map. */\n  deleteKeys: string[];\n  /** Keys to change to a given value. */\n  setKeys: [string, any][];\n  /** All keys. Used to remove all keys when the element changes or finalizes. */\n  allKeys: string[];\n}\n\nexport function assignElementProperty<\n  E extends HTMLElement,\n  K extends keyof E,\n  T extends {[key in keyof E[K]]?: E[K][key]},\n>(property: K) {\n  return function (element$: Observable<E>, data$: Observable<T>) {\n    const valueDiff$ = data$.pipe(\n      scan(\n        ([previous], current) => {\n          const allKeys = Object.keys(current);\n          const deleteKeys = Object.keys(previous).filter(\n            (key) => !(key in current),\n          );\n          const setKeys = allKeys\n            .filter((key) => current[key] !== previous[key])\n            .map((key) => [key, current[key]]);\n\n          return [current, {deleteKeys, setKeys, allKeys}] as [\n            T,\n            PropertyMapChange,\n          ];\n        },\n        [{}, {deleteKeys: [], setKeys: []}] as [T, PropertyMapChange],\n      ),\n      map(([, change]) => change),\n    );\n    return element$.pipe(\n      switchMap((view) => {\n        let finalizeKeys = [];\n        return valueDiff$.pipe(\n          map((diff) => {\n            if (view) {\n              for (const key of diff.deleteKeys) {\n                view[property][key] = undefined;\n              }\n              for (const [key, value] of diff.setKeys) {\n                view[property][key] = value;\n              }\n              finalizeKeys = diff.allKeys;\n            }\n            return view;\n          }),\n          finalize(() => {\n            if (view) {\n              for (const key of finalizeKeys) {\n                view[property][key] = undefined;\n              }\n            }\n          }),\n        );\n      }),\n    );\n  };\n}\n\n/**\n * Set latest element's innerText property to latest data string value.\n */\nexport const setElementText = setElementProperty('innerText');\n\n/**\n * Set latest element's innerHTML property to latest data string value.\n */\nexport const setElementHTML = setElementProperty('innerHTML');\n\n/**\n * Set latest element's className property to latest data string value.\n */\nexport const setElementClassName = setElementProperty('className');\n\n/**\n * Add and remove latest data string array to latest element's classList set\n * property.\n */\nexport const toggleElementClassList = toggleElementPropertySet('classList');\n\n/**\n * Set and delete changes keys of latest data object to latest element's style\n * object map property.\n */\nexport const assignElementStyle = assignElementProperty('style');\n\n/**\n * @param query css query selector to find an element for\n * @param dom document to query\n * @returns observable of a html element matching the query\n */\nexport function querySelector(\n  query: string,\n  dom: {querySelector(...args: any): any} = document,\n): Observable<HTMLElement> {\n  return defer(() => of(dom.querySelector(query)));\n}\n"
  },
  {
    "path": "src/panel/components/realtimeSummary.ts",
    "content": "import {map, Observable} from 'rxjs';\n\nimport {Audion} from '../../devtools/Types';\nimport {setElementHTML} from './domUtils';\n\n/**\n * Format web audio context performance data in html.\n * @param realtimeData realtime performance data for a web audio context\n * @returns rendered html summary of performance data\n */\nexport function realtimeSummaryHTML(realtimeData: Audion.ContextRealtimeData) {\n  if (!realtimeData) return '';\n  const currentTime = realtimeData.currentTime.toFixed(3);\n  const callbackIntervalMean = (\n    realtimeData.callbackIntervalMean * 1000\n  ).toFixed(3);\n  const callbackIntervalVariance = (\n    Math.sqrt(realtimeData.callbackIntervalVariance) * 1000\n  ).toFixed(3);\n  const renderCapacity = (realtimeData.renderCapacity * 100).toFixed(3);\n  return realtimeData\n    ? `<span>Current Time: ${currentTime} s</span>&nbsp;\n<span>&#10072;</span>&nbsp;\n<span>Callback Interval: &mu; = ${callbackIntervalMean} ms &sigma; = ${callbackIntervalVariance} ms</span>&nbsp;\n<span>&#10072;</span>&nbsp;\n<span>Render Capacity: ${renderCapacity} %</span>`\n    : '';\n}\n\n/**\n * Render a summary of web audio context performance.\n * @param element$ current html element to render summary into\n * @param data$ current performance data\n * @returns an element pushed to renderRealtimeSummary after its content is modified\n */\nexport function renderRealtimeSummary(\n  element$: Observable<HTMLElement>,\n  data$: Observable<Audion.ContextRealtimeData>,\n) {\n  const realtimeHTML$ = data$.pipe(map(realtimeSummaryHTML));\n  return setElementHTML(element$, realtimeHTML$);\n}\n"
  },
  {
    "path": "src/panel/components/selectGraph.css",
    "content": ".dropdownOption {\n  display: flex;\n  height: 2rem;\n  align-items: center;\n  cursor: pointer;\n  padding: 0 0.2rem;\n}\n\n.dropdownOption:hover,\n.dropdownButtonActive {\n  background: var(--color-background-elevation-2);\n}\n"
  },
  {
    "path": "src/panel/components/selectGraph.ts",
    "content": "import {\n  BehaviorSubject,\n  combineLatest,\n  fromEvent,\n  merge,\n  Observable,\n  of,\n} from 'rxjs';\nimport {\n  distinctUntilChanged,\n  filter,\n  map,\n  switchMap,\n  tap,\n} from 'rxjs/operators';\n\nimport {Audion} from '../../devtools/Types';\nimport {\n  assignElementStyle,\n  setElementClassName,\n  setElementHTML,\n  setElementText,\n  toggleElementClassList,\n} from './domUtils';\nimport style from './selectGraph.css';\n\n/**\n * Title of the dropdown toggle button when no graphs are selected or available\n * to select.\n */\nconst NO_GRAPHS_AVAILABLE_TITLE = '(no recordings)';\n\n/**\n * Render title for an audio graph with only the graphId.\n * @param graphId unique graph identifier\n * @returns rendered graph title\n */\nfunction graphIdTitle(graphId: string) {\n  return `unknown (${graphId.slice(-6)})`;\n}\n\n/**\n * Render title for an audio graph.\n * @param graph\n * @returns rendered graph title\n */\nfunction graphTitle(graph: Audion.GraphContext) {\n  return `${graph.context.contextType} (${graph.id.slice(-6)})`;\n}\n\n/**\n * Create a map of graph IDs to rendered graph titles.\n * @param allGraphs map of graph IDs to graph contexts\n * @returns map of graph IDs to rendered graph titles\n */\nfunction graphTitles(allGraphs: Audion.GraphContextsById): {\n  [key: string]: string;\n} {\n  return Object.entries(allGraphs)\n    .map(([id, graph]) => [id, graphTitle(graph)])\n    .reduce((accum, [id, title]) => {\n      accum[id] = title;\n      return accum;\n    }, {} as {[key: string]: string});\n}\n\n/**\n * Render current graph title or some copy to indicate no graph is selected or\n * no graph is available.\n * @param param currently selected graph ID and ID to title map\n * @returns rendered button title text\n */\nfunction buttonTitle([graphId, graphTitles]) {\n  return graphId\n    ? graphTitles[graphId] || graphIdTitle(graphId)\n    : NO_GRAPHS_AVAILABLE_TITLE;\n}\n\n/**\n * Render html list of graph options to select from.\n * @param graphTitles graph ID to title map\n * @returns html list of graph titles to select from\n */\nconst dropdownListHTML = function (graphTitles: {\n  [graphId: string]: string;\n}): string {\n  return Object.entries(graphTitles)\n    .map(\n      ([graphId, title]) =>\n        `<div class=\"${style.dropdownOption}\" data-option=\"${graphId}\"><div class=\"${style.dropdownOptionTitle}\">${title}</div></div>`,\n    )\n    .join('');\n};\n\n/**\n * Test if two maps of graph titles are equivalent.\n *\n * Used to reduce further processing of graph title information like updating\n * the dom with new html for the new set of titles.\n *\n * @param previousTitles map of graph titles\n * @param currentTitles map of graph titles\n * @returns true if maps match\n */\nfunction equalTitles(\n  previousTitles: {[graphId: string]: string},\n  currentTitles: {[graphId: string]: string},\n) {\n  const previousEntries = Object.entries(previousTitles);\n  const currentEntries = Object.entries(currentTitles);\n  return (\n    previousEntries.length === currentEntries.length &&\n    previousEntries.every(([previousKey, previousValue], index) => {\n      const [currentKey, currentValue] = currentEntries[index];\n      return previousKey === currentKey && previousValue === currentValue;\n    })\n  );\n}\n\n/**\n * Render a widget displaying the current selected graph title. When clicked\n * show a list of currently available graphs to select from.\n *\n * @param titleElement$ current html element to render dropdown button\n * title into\n * @param dropdownListElement$ current html element to render dropdown\n * list into\n * @param buttonElement$ current html element that when clicked opens the dropdown\n * @param graphId$ currently selected graph id\n * @param allGraphs$ current map of graph ids to graph contexts\n * @returns an element pushed to renderSelectGraph after its content is modified\n */\nexport function renderSelectGraph(\n  titleElement$: Observable<HTMLElement>,\n  dropdownListElement$: Observable<HTMLElement>,\n  buttonElement$: Observable<HTMLElement>,\n  graphId$: Observable<string>,\n  allGraphs$: Observable<Audion.GraphContextsById>,\n) {\n  const distinctGraphId$ = graphId$.pipe(distinctUntilChanged());\n  const graphTitles$ = allGraphs$.pipe(\n    map(graphTitles),\n    distinctUntilChanged(equalTitles),\n  );\n  const graphIdAndTitles$ = combineLatest([distinctGraphId$, graphTitles$]);\n\n  const dropdownVisible$ = new BehaviorSubject(false);\n\n  const body$ = of(document.body);\n  const bodyClick$ = body$.pipe(\n    switchMap((element) => fromEvent(element, 'click')),\n  );\n\n  const openDropdownAction$ = buttonElement$.pipe(\n    switchMap((element) => fromEvent(element, 'click')),\n    tap(() => dropdownVisible$.next(!dropdownVisible$.value)),\n    filter(() => false),\n    map(() => {}),\n  );\n  const closeDropdownAction$ = combineLatest([\n    buttonElement$,\n    dropdownListElement$,\n  ]).pipe(\n    switchMap(([buttonElement, dropdownElement]) =>\n      bodyClick$.pipe(\n        filter(\n          (ev) =>\n            ev.target instanceof Element &&\n            !(\n              buttonElement.contains(ev.target) ||\n              dropdownElement.contains(ev.target)\n            ),\n        ),\n      ),\n    ),\n    tap(() => dropdownVisible$.next(false)),\n    filter(() => false),\n    map(() => {}),\n  );\n\n  const eventAction$ = merge(openDropdownAction$, closeDropdownAction$);\n\n  const titleText$ = graphIdAndTitles$.pipe(map(buttonTitle));\n  const buttonClassName$ = dropdownVisible$.pipe(\n    map((visible) => (visible ? [style.dropdownButtonActive] : [])),\n  );\n  const dropdownListHTML$ = graphTitles$.pipe(map(dropdownListHTML));\n  const dropdownListIdSelected$ = dropdownListElement$.pipe(\n    switchMap((element) => fromEvent(element, 'click')),\n    map((clickEvent) => {\n      let {target} = clickEvent;\n      if (target instanceof HTMLElement) {\n        const optionElement = target.closest('[data-option]');\n        if (optionElement instanceof HTMLElement) {\n          const graphId = optionElement.dataset['option'];\n          if (graphId) {\n            return {type: 'selectGraph', graphId};\n          }\n        }\n      }\n    }),\n    filter(Boolean),\n    tap(() => dropdownVisible$.next(false)),\n  );\n  const dropdownClassName$ = dropdownVisible$.pipe(\n    map(\n      (visible) => `web-audio-select-graph-dropdown ${visible ? '' : 'hidden'}`,\n    ),\n  );\n  const dropdownPositionStyle$ = buttonElement$.pipe(\n    switchMap((buttonElement) =>\n      dropdownVisible$.pipe(\n        map((visible) => {\n          const rect = buttonElement.getBoundingClientRect();\n          return visible\n            ? {\n                top: `${rect.bottom}px`,\n                left: `${rect.left}px`,\n              }\n            : {};\n        }),\n      ),\n    ),\n  );\n\n  return merge(\n    setElementText(titleElement$, titleText$),\n    toggleElementClassList(buttonElement$, buttonClassName$),\n    setElementHTML(dropdownListElement$, dropdownListHTML$),\n    setElementClassName(dropdownListElement$, dropdownClassName$),\n    assignElementStyle(dropdownListElement$, dropdownPositionStyle$),\n    dropdownListIdSelected$,\n    eventAction$,\n  );\n}\n"
  },
  {
    "path": "src/panel/graph/AudioEdgeArrowGraphics.ts",
    "content": "import * as PIXI from 'pixi.js';\nimport {GraphColor} from './graphStyle';\n\nconst ARROW_LENGTH = 16;\nconst ARROW_HEIGHT = 8;\n\nconst ARROW_ANGLE_ROUNDING = 32;\n\nexport class EdgeArrowGraphics {\n  geometryCache = new Array(ARROW_ANGLE_ROUNDING * 2 + 1).fill(null);\n\n  drawFromPoint(\n    pointOnLine: PIXI.Point,\n    end: PIXI.Point,\n    graphics: PIXI.Graphics,\n  ) {\n    const arrowMagnitude = Math.hypot(\n      pointOnLine.y - end.y,\n      pointOnLine.x - end.x,\n    );\n    const arrowUnitX = (pointOnLine.x - end.x) / arrowMagnitude;\n    const arrowUnitY = (pointOnLine.y - end.y) / arrowMagnitude;\n\n    this.drawFromUnit(arrowUnitX, arrowUnitY, end, graphics);\n  }\n\n  drawFromUnit(\n    arrowUnitX: number,\n    arrowUnitY: number,\n    end: PIXI.Point,\n    graphics: PIXI.Graphics,\n  ) {\n    graphics.beginFill(GraphColor.INPUT_OUTPUT);\n    graphics.drawPolygon([\n      new PIXI.Point(\n        end.x + arrowUnitX * ARROW_LENGTH + arrowUnitY * ARROW_HEIGHT,\n        end.y + arrowUnitY * ARROW_LENGTH - arrowUnitX * ARROW_HEIGHT,\n      ),\n      new PIXI.Point(\n        end.x + arrowUnitX * ARROW_LENGTH - arrowUnitY * ARROW_HEIGHT,\n        end.y + arrowUnitY * ARROW_LENGTH + arrowUnitX * ARROW_HEIGHT,\n      ),\n      new PIXI.Point(end.x, end.y),\n    ]);\n    graphics.endFill();\n  }\n\n  getGeometry(pointOnLine: PIXI.Point, end: PIXI.Point) {\n    const magnitude = Math.hypot(pointOnLine.x - end.x, pointOnLine.y - end.y);\n    const unitX = (pointOnLine.x - end.x) / magnitude;\n    const unitY = (pointOnLine.y - end.y) / magnitude;\n    const angle = Math.atan2(unitY, unitX);\n    const angleSliceIndex = Math.round(\n      (angle / Math.PI) * ARROW_ANGLE_ROUNDING,\n    );\n    const cacheIndex = angleSliceIndex + ARROW_ANGLE_ROUNDING;\n    if (this.geometryCache[cacheIndex] === null) {\n      const graphics = new PIXI.Graphics();\n      const angleRounded = (angleSliceIndex / ARROW_ANGLE_ROUNDING) * Math.PI;\n      this.drawFromUnit(\n        Math.cos(angleRounded),\n        Math.sin(angleRounded),\n        new PIXI.Point(Math.cos(angleRounded) * 4, Math.sin(angleRounded) * 4),\n        graphics,\n      );\n      this.geometryCache[cacheIndex] = graphics.geometry;\n    }\n    return this.geometryCache[cacheIndex];\n  }\n\n  createGraphics(pointOnLine: PIXI.Point, end: PIXI.Point) {\n    const graphics = new PIXI.Graphics(this.getGeometry(pointOnLine, end));\n    graphics.position.set(end.x, end.y);\n    return graphics;\n  }\n}\n"
  },
  {
    "path": "src/panel/graph/AudioEdgeCurvedLineGraphics.ts",
    "content": "import * as PIXI from 'pixi.js';\n\nimport {AudionPanel} from '../Types';\n\nimport {GraphColor} from './graphStyle';\n\nconst STEP_RATIO = 1 / 10;\n\nconst LINE_COEFF = createLineCoefficients();\n\ninterface LineCoefficients {\n  ax: number;\n  ay: number;\n  bx: number;\n  by: number;\n  cx: number;\n  cy: number;\n  dx: number;\n  dy: number;\n}\n\nexport class EdgeCurvedLineGraphics {\n  geometryCache: PIXI.GraphicsGeometry[][] = [];\n\n  getGeometry(a: PIXI.Point, d: PIXI.Point) {\n    const i = Math.floor(Math.abs(d.x - a.x));\n    const j = Math.floor(Math.abs(d.y - a.y));\n\n    if (i > 100 || j > 100) {\n      const graphics = new PIXI.Graphics();\n      this.drawCurvedLine(\n        new PIXI.Point(),\n        new PIXI.Point(i / 2, j / 3),\n        new PIXI.Point(i / 2, (j * 2) / 3),\n        new PIXI.Point(i, j),\n        graphics,\n        new PIXI.Point(),\n      );\n      return graphics.geometry;\n    }\n\n    if (!this.geometryCache[i]) {\n      this.geometryCache[i] = [];\n    }\n    if (!this.geometryCache[i][j]) {\n      const b0 = new PIXI.Point(i / 2, j / 3);\n      const c0 = new PIXI.Point(i / 2, (j * 2) / 3);\n      const d0 = new PIXI.Point(i, j);\n      const graphics = new PIXI.Graphics();\n      this.drawCurvedLine(\n        new PIXI.Point(),\n        b0,\n        c0,\n        d0,\n        graphics,\n        new PIXI.Point(),\n      );\n      this.geometryCache[i][j] = graphics.geometry;\n    }\n\n    return this.geometryCache[i][j];\n  }\n\n  createGraphics(a: PIXI.Point, d: PIXI.Point) {\n    const graphics = new PIXI.Graphics(this.getGeometry(a, d));\n    graphics.position.set(a.x, a.y);\n    const x = d.x - a.x;\n    const y = d.y - a.y;\n    graphics.scale.set(\n      x === 0 ? 1 : x / Math.abs(x),\n      y === 0 ? 1 : y / Math.abs(y),\n    );\n    return graphics;\n  }\n\n  /**\n   * Draw a curved line with 3 points to control its shape.\n   * @param a\n   * @param b\n   * @param c\n   * @param graphics\n   * @param pointOnLine\n   */\n  drawCurvedLine(\n    a: AudionPanel.Point,\n    b: AudionPanel.Point,\n    c: AudionPanel.Point,\n    d: AudionPanel.Point,\n    graphics: PIXI.Graphics,\n    pointOnLine: AudionPanel.Point,\n  ) {\n    const lineCoeffs = buildLineCoefficients(a, b, c, d, LINE_COEFF);\n\n    const lineMagnitudeEstimate = Math.hypot(a.y - d.y, a.x - d.x);\n    const steps = Math.max(2, Math.ceil(lineMagnitudeEstimate * STEP_RATIO));\n\n    graphics.lineStyle(2, GraphColor.INPUT_OUTPUT);\n\n    graphics.moveTo(a.x, a.y);\n    for (let i = 1; i < steps; i++) {\n      interpolateCoefficients(lineCoeffs, i / steps, pointOnLine);\n      graphics.lineTo(pointOnLine.x, pointOnLine.y);\n    }\n    graphics.lineStyle(0);\n    graphics.closePath();\n  }\n\n  /**\n   * Adjust a point along a line by amount radius.\n   * @param end\n   * @param destination\n   * @param radius\n   */\n  adjustPoint(\n    end: AudionPanel.Point,\n    destination: AudionPanel.Point,\n    radius: number,\n  ) {\n    const magnitude = Math.hypot(end.y - destination.y, end.x - destination.x);\n\n    destination.x += ((end.x - destination.x) / magnitude) * radius;\n    destination.y += ((end.y - destination.y) / magnitude) * radius;\n  }\n}\n\n/**\n * Create a LineCoefficients object.\n * @return\n */\nfunction createLineCoefficients(): LineCoefficients {\n  return {ax: 0, ay: 0, bx: 0, by: 0, cx: 0, cy: 0, dx: 0, dy: 0};\n}\n\n/**\n * Interpolate a line from 4 points: a, b, c, d.\n * @param a\n * @param b\n * @param c\n * @param d\n * @param coeff\n * @return\n */\nfunction buildLineCoefficients(\n  a: AudionPanel.Point,\n  b: AudionPanel.Point,\n  c: AudionPanel.Point,\n  d: AudionPanel.Point,\n  coeff = createLineCoefficients(),\n): LineCoefficients {\n  const {x: ax, y: ay} = a;\n  const {x: bx, y: by} = b;\n  const {x: cx, y: cy} = c;\n  const {x: dx, y: dy} = d;\n\n  coeff.ax = dx - 3 * cx + 3 * bx - ax;\n  coeff.ay = dy - 3 * cy + 3 * by - ay;\n  coeff.bx = 3 * cx - 6 * bx + 3 * ax;\n  coeff.by = 3 * cy - 6 * by + 3 * ay;\n  coeff.cx = 3 * bx - 3 * ax;\n  coeff.cy = 3 * by - 3 * ay;\n  coeff.dx = ax;\n  coeff.dy = ay;\n\n  return coeff;\n}\n\n/**\n * @param coeff\n * @param t number between 0 and 1 inclusive\n * @param destination\n * @return\n */\nfunction interpolateCoefficients(\n  coeff: LineCoefficients,\n  t: number,\n  destination: AudionPanel.Point = new PIXI.Point(),\n): AudionPanel.Point {\n  const t2 = t * t;\n  const t3 = t2 * t;\n  destination.x = coeff.ax * t3 + coeff.bx * t2 + coeff.cx * t + coeff.dx;\n  destination.y = coeff.ay * t3 + coeff.by * t2 + coeff.cy * t + coeff.dy;\n  return destination;\n}\n"
  },
  {
    "path": "src/panel/graph/AudioEdgeRender.ts",
    "content": "import * as PIXI from 'pixi.js';\n\nimport type {AudionPanel} from '../Types';\n\nimport {EdgeArrowGraphics} from './AudioEdgeArrowGraphics';\nimport {EdgeCurvedLineGraphics} from './AudioEdgeCurvedLineGraphics';\n\nimport {GraphColor} from './graphStyle';\n\nconst ARROW_LENGTH = 12;\nconst ARROW_HEIGHT = 4;\n\nconst STEP_RATIO = 1 / 10;\n\nconst LINE_COEFF = createLineCoefficients();\n\nexport interface AudioEdgeKey {\n  v: string;\n  w: string;\n  name: string;\n}\n\n/**\n * Render a line between AudionNodes and their inputs, outputs, and parameters.\n */\nexport class AudioEdgeRender {\n  key: AudioEdgeKey;\n  source: AudionPanel.Port;\n  destination: AudionPanel.Port;\n  parent: PIXI.Container;\n  graphics: PIXI.Graphics;\n  container: PIXI.Container;\n\n  /**\n   * @param options\n   */\n  constructor({\n    key,\n    source,\n    destination,\n  }: {\n    key: AudioEdgeKey;\n    source: AudionPanel.Port;\n    destination: AudionPanel.Port;\n  }) {\n    this.key = key;\n    this.source = source;\n    this.destination = destination;\n    this.parent = null;\n    this.graphics = new PIXI.Graphics();\n    this.container = new PIXI.Container();\n\n    this.source.edges.push(this);\n    this.destination.edges.push(this);\n  }\n  /**\n   * @param parent\n   */\n  setPIXIParent(parent: PIXI.Container) {\n    this.parent = parent;\n    parent.addChild(this.container);\n  }\n  /**\n   * Remove the PIXI DisplayObject from the rendered hierarchy.\n   */\n  remove() {\n    this.container.parent.removeChild(this.container);\n\n    this.source.edges.splice(this.source.edges.indexOf(this), 1);\n    this.destination.edges.splice(this.destination.edges.indexOf(this), 1);\n  }\n  /**\n   * @param line\n   */\n  draw(\n    line: AudionPanel.Point[],\n    {\n      edgeArrowGraphics: arrowGraphics,\n      edgeCurvedLineGraphics: curvedLineGraphics,\n    }: {\n      edgeArrowGraphics: EdgeArrowGraphics;\n      edgeCurvedLineGraphics: EdgeCurvedLineGraphics;\n    },\n  ) {\n    const {\n      offset: start,\n      node: {position: sourcePosition},\n    } = this.source;\n    const {\n      offset: end,\n      node: {position: destinationPosition},\n    } = this.destination;\n    const a = new PIXI.Point(\n      sourcePosition.x + start.x,\n      sourcePosition.y + start.y,\n    );\n    const d = new PIXI.Point(\n      destinationPosition.x + end.x,\n      destinationPosition.y + end.y,\n    );\n    this.container.removeChildren();\n    this.container.addChild(arrowGraphics.createGraphics(a, d));\n    this.container.addChild(curvedLineGraphics.createGraphics(a, d));\n  }\n\n  /**\n   * Draw an arrow.\n   * @param pointOnLine\n   * @param end\n   * @param graphics\n   */\n  drawArrow(\n    pointOnLine: AudionPanel.Point,\n    end: AudionPanel.Point,\n    graphics: PIXI.Graphics,\n  ) {\n    const arrowMagnitude = Math.hypot(\n      pointOnLine.y - end.y,\n      pointOnLine.x - end.x,\n    );\n    const arrowUnitX = (pointOnLine.x - end.x) / arrowMagnitude;\n    const arrowUnitY = (pointOnLine.y - end.y) / arrowMagnitude;\n\n    graphics.beginFill(GraphColor.INPUT_OUTPUT);\n    graphics.lineTo(\n      end.x + arrowUnitX * ARROW_LENGTH + arrowUnitY * ARROW_HEIGHT,\n      end.y + arrowUnitY * ARROW_LENGTH - arrowUnitX * ARROW_HEIGHT,\n    );\n    graphics.lineTo(\n      end.x + arrowUnitX * ARROW_LENGTH - arrowUnitY * ARROW_HEIGHT,\n      end.y + arrowUnitY * ARROW_LENGTH + arrowUnitX * ARROW_HEIGHT,\n    );\n    graphics.lineTo(end.x, end.y);\n    graphics.endFill();\n  }\n\n  /**\n   * Draw a curved line with 3 points to control its shape.\n   * @param a\n   * @param b\n   * @param c\n   * @param graphics\n   * @param pointOnLine\n   */\n  drawCurvedLine(\n    a: AudionPanel.Point,\n    b: AudionPanel.Point,\n    c: AudionPanel.Point,\n    graphics: PIXI.Graphics,\n    pointOnLine: AudionPanel.Point,\n  ) {\n    const lineCoeffs = lineCoefficients(a, b, c, LINE_COEFF);\n\n    const lineMagnitudeEstimate = Math.hypot(a.y - c.y, a.x - c.x);\n    const steps = Math.max(2, Math.ceil(lineMagnitudeEstimate * STEP_RATIO));\n\n    graphics.lineStyle(2, GraphColor.INPUT_OUTPUT);\n\n    graphics.moveTo(a.x, a.y);\n    for (let i = 1; i < steps; i++) {\n      interpolateCoefficients(lineCoeffs, i / steps, pointOnLine);\n      graphics.lineTo(pointOnLine.x, pointOnLine.y);\n    }\n    graphics.lineTo(c.x, c.y);\n  }\n\n  /**\n   * Adjust a point along a line by amount radius.\n   * @param end\n   * @param destination\n   * @param radius\n   */\n  adjustPoint(\n    end: AudionPanel.Point,\n    destination: AudionPanel.Point,\n    radius: number,\n  ) {\n    const magnitude = Math.hypot(end.y - destination.y, end.x - destination.x);\n\n    destination.x += ((end.x - destination.x) / magnitude) * radius;\n    destination.y += ((end.y - destination.y) / magnitude) * radius;\n  }\n}\n\n/**\n * Create a LineCoefficients object.\n * @return\n */\nfunction createLineCoefficients(): LineCoefficients {\n  return {ax: 0, ay: 0, bx: 0, by: 0, cx: 0, cy: 0};\n}\n\n/**\n * Interpolate a line from 3 points: a, b, c.\n * @param a\n * @param b\n * @param c\n * @param coeff\n * @return\n */\nfunction lineCoefficients(\n  a: AudionPanel.Point,\n  b: AudionPanel.Point,\n  c: AudionPanel.Point,\n  coeff = createLineCoefficients(),\n): LineCoefficients {\n  const {x: ax, y: ay} = a;\n  const {x: bx, y: by} = b;\n  const {x: cx, y: cy} = c;\n\n  const cbx = cx - bx;\n  const bax = bx - ax;\n  const cby = cy - by;\n  const bay = by - ay;\n\n  coeff.ax = cbx - bax;\n  coeff.ay = cby - bay;\n  coeff.bx = 2 * bax;\n  coeff.by = 2 * bay;\n  coeff.cx = ax;\n  coeff.cy = ay;\n\n  return coeff;\n}\n\n/**\n * @param coeff\n * @param t number between 0 and 1 inclusive\n * @param destination\n * @return\n */\nfunction interpolateCoefficients(\n  coeff: LineCoefficients,\n  t: number,\n  destination: AudionPanel.Point = new PIXI.Point(),\n): AudionPanel.Point {\n  destination.x = coeff.ax * t * t + coeff.bx * t + coeff.cx;\n  destination.y = coeff.ay * t * t + coeff.by * t + coeff.cy;\n  return destination;\n}\n\n/**\n * @typedef LineCoefficients\n * @property {number} ax\n * @property {number} ay\n * @property {number} bx\n * @property {number} by\n * @property {number} cx\n * @property {number} cy\n */\n\ninterface LineCoefficients {\n  ax: number;\n  ay: number;\n  bx: number;\n  by: number;\n  cx: number;\n  cy: number;\n}\n"
  },
  {
    "path": "src/panel/graph/AudioGraphRender.ts",
    "content": "/// <reference path=\"../../chrome/Types.js\" />\n\nimport * as PIXI from 'pixi.js';\nimport {BehaviorSubject} from 'rxjs';\n\nimport {Audion} from '../../devtools/Types';\n\nimport {AudioEdgeKey, AudioEdgeRender} from './AudioEdgeRender';\nimport {AudioNodeRender} from './AudioNodeRender';\nimport {Camera} from './Camera';\nimport {GraphicsCache} from './GraphicsCache';\n\ntype AnimationFrameId = ReturnType<typeof requestAnimationFrame>;\n\n/**\n * Render a graph of nodes and edges.\n */\nexport class AudioGraphRender {\n  nodeMap: Map<string, AudioNodeRender>;\n  edgeIdMap: Map<string, Map<string, Map<string, AudioEdgeKey>>>;\n  edgeMap: Map<AudioEdgeKey, AudioEdgeRender>;\n\n  camera: Camera;\n\n  elementContainer: HTMLElement;\n  pixiApplication: PIXI.Application<HTMLCanvasElement> | null;\n  pixiView: HTMLCanvasElement | null;\n  pixiNodeContainer: PIXI.Container | null;\n  pixiEdgeContainer: PIXI.Container | null;\n\n  renderFrameId: AnimationFrameId | null;\n\n  graphicsCache: GraphicsCache;\n\n  selectedNode$: BehaviorSubject<Audion.GraphNode>;\n\n  /**\n   * Create an AudioGraphRender.\n   * @param options\n   */\n  constructor({elementContainer}: {elementContainer: HTMLElement}) {\n    this.nodeMap = new Map();\n    this.edgeIdMap = new Map();\n    this.edgeMap = new Map();\n\n    this.camera = new Camera();\n\n    this.elementContainer = elementContainer;\n    this.pixiView = null;\n    this.pixiApplication = null;\n    this.pixiNodeContainer = null;\n    this.pixiEdgeContainer = null;\n\n    this.renderFrameId = null;\n\n    this.graphicsCache = null;\n\n    this._render = this._render.bind(this);\n\n    this.selectedNode$ = new BehaviorSubject<Audion.GraphNode>(null);\n  }\n\n  /** Initialize. */\n  init() {\n    const app = (this.pixiApplication = new PIXI.Application<HTMLCanvasElement>(\n      {\n        backgroundColor: 0xffffff,\n        resizeTo: this.elementContainer,\n        antialias: true,\n        autoDensity: true,\n        resolution: window.devicePixelRatio,\n      },\n    ));\n    this.pixiView = app.view;\n\n    this.graphicsCache = new GraphicsCache();\n\n    const nodeContainer = (this.pixiNodeContainer = new PIXI.Container());\n    app.stage.addChild(nodeContainer);\n\n    const edgeContainer = (this.pixiEdgeContainer = new PIXI.Container());\n    app.stage.addChild(edgeContainer);\n\n    this.initEvents();\n\n    this.camera.viewportObserver.observe((viewport) => {\n      const {x, y, width, height} = this.camera.viewport;\n      app.stage.setTransform(-x / width, -y / height, 1 / width, 1 / height);\n      this.requestRender();\n    });\n  }\n\n  /** Render the graph. */\n  requestRender() {\n    if (this.renderFrameId === null) {\n      this.renderFrameId = requestAnimationFrame(this._render);\n    }\n  }\n\n  _render() {\n    this.renderFrameId = null;\n\n    const {pixiApplication: app} = this;\n\n    this.camera.setScreenSize(app.screen.width, app.screen.height);\n    app.render();\n  }\n\n  /** Stop rendering. */\n  stop() {\n    cancelAnimationFrame(this.renderFrameId);\n  }\n\n  /**\n   * @param message\n   */\n  updateGraphSizes(message: Audion.GraphContext): Audion.GraphContext {\n    if (message.graph) {\n      message.graph.nodes.forEach(({v: nodeId, value: node}) => {\n        if (node) {\n          const nodeRender = this.createNodeRender(\n            nodeId,\n            message.nodes[nodeId],\n          );\n          node.width = nodeRender.size.x;\n          node.height = nodeRender.size.y;\n        }\n      });\n    } else {\n      for (const nodeId of this.nodeMap.keys()) {\n        this.destroyNodeRender(nodeId);\n      }\n      for (const edgeId of this.edgeMap.keys()) {\n        this.destroyEdgeRender(edgeId);\n      }\n    }\n    return message;\n  }\n\n  /**\n   * @param message\n   */\n  update(message: Audion.GraphContext) {\n    this.camera.setGraphSize(\n      message.graph.value.width,\n      message.graph.value.height,\n    );\n\n    const previousNodeRenders = new Set(this.nodeMap.values());\n    for (let i = 0; i < message.graph.nodes.length; i++) {\n      const nodeKeyValue = message.graph.nodes[i];\n      const nodeId = nodeKeyValue.v;\n      const node = nodeKeyValue.value;\n\n      if (node) {\n        const nodeRender = this.createNodeRender(nodeId, message.nodes[nodeId]);\n        nodeRender.container.visible = true;\n        nodeRender.position.set(\n          node.x - nodeRender.size.x / 2,\n          node.y - nodeRender.size.y / 2,\n        );\n        previousNodeRenders.delete(nodeRender);\n      } else {\n        this.destroyNodeRender(nodeId);\n      }\n    }\n    for (const nodeRender of previousNodeRenders) {\n      this.destroyNodeRender(nodeRender.id);\n    }\n\n    const previousEdgeRenders = new Set(this.edgeMap.values());\n    for (let i = 0; i < message.graph.edges.length; i++) {\n      const edgeKeyValue = message.graph.edges[i];\n      const edge = edgeKeyValue.value;\n\n      if (edge) {\n        const edgeRender = this.createEdgeRender(edgeKeyValue, message);\n        if (edgeRender) {\n          edgeRender.draw(edge.points, this.graphicsCache);\n        }\n        previousEdgeRenders.delete(edgeRender);\n      }\n    }\n    for (const edgeRender of previousEdgeRenders) {\n      this.destroyEdgeRender(edgeRender.key);\n    }\n\n    this.requestRender();\n  }\n\n  getNodeAtViewportPoint(viewportPoint: {x: number; y: number}) {\n    const screenPoint = new PIXI.Point(\n      viewportPoint.x * this.camera.screen.width,\n      viewportPoint.y * this.camera.screen.height,\n    );\n    return this.getNodeAtScreenPoint(screenPoint);\n  }\n\n  getNodeAtScreenPoint(screenPoint: {x: number; y: number}) {\n    for (const nodeRender of this.nodeMap.values()) {\n      if (\n        nodeRender.container.getBounds().contains(screenPoint.x, screenPoint.y)\n      ) {\n        return nodeRender.node;\n      }\n    }\n\n    return null;\n  }\n\n  /** Initialize event handling. */\n  initEvents() {\n    const {pixiApplication: app} = this;\n\n    app.stage.eventMode = 'dynamic';\n    let lastPoint = null;\n    app.stage.addListener('mousemove', (e) => {\n      if (lastPoint && e.buttons) {\n        this.camera.move(lastPoint.x - e.globalX, lastPoint.y - e.globalY);\n      }\n      lastPoint = e.global.clone();\n    });\n\n    app.view.onclick = ({offsetX, offsetY}) => {\n      const {clientWidth, clientHeight} = app.view;\n      const viewportPoint = new PIXI.Point(\n        offsetX / clientWidth,\n        offsetY / clientHeight,\n      );\n\n      const lastSelectedNode = this.selectedNode$.value;\n      const selectedNode = this.getNodeAtViewportPoint(viewportPoint);\n      this.nodeMap.get(lastSelectedNode?.node?.nodeId)?.setHighlight(false);\n      this.nodeMap.get(selectedNode?.node?.nodeId)?.setHighlight(true);\n      this.requestRender();\n\n      this.selectedNode$.next(selectedNode);\n    };\n\n    app.view.onwheel = (e) => {\n      this.camera.zoom(\n        e.clientX - app.view.clientLeft,\n        e.clientY - app.view.clientTop,\n        e.deltaY / 1000,\n      );\n    };\n  }\n\n  /**\n   * Create the rendering for an audio node.\n   * @param nodeId\n   * @param node\n   * @returns\n   */\n  createNodeRender(nodeId: string, node: Audion.GraphNode): AudioNodeRender {\n    let nodeRender = this.nodeMap.get(nodeId);\n    if (!nodeRender) {\n      if (node.node && node.node.nodeType) {\n        nodeRender = new AudioNodeRender(nodeId).init(node, this.graphicsCache);\n        nodeRender.setPixiParent(this.pixiNodeContainer);\n        this.nodeMap.set(nodeId, nodeRender);\n      }\n    }\n    return nodeRender;\n  }\n\n  /**\n   * Destroy the rendering for an audio node.\n   * @param nodeId\n   */\n  destroyNodeRender(nodeId: any) {\n    const nodeRender = this.nodeMap.get(nodeId);\n    if (nodeRender) {\n      nodeRender.remove();\n      this.nodeMap.delete(nodeId);\n\n      if (nodeId === this.selectedNode$.value?.node?.nodeId) {\n        this.selectedNode$.next(null);\n      }\n    }\n  }\n\n  compareEdgeKey(left: AudioEdgeKey, right: AudioEdgeKey) {\n    if (left.v < right.v) {\n      return -1;\n    } else if (left.v > right.v) {\n      return 1;\n    }\n    if (left.w < right.w) {\n      return -1;\n    } else if (left.w > right.w) {\n      return 1;\n    }\n    if (left.name < right.name) {\n      return -1;\n    } else if (left.name > right.name) {\n      return 1;\n    }\n    return 0;\n  }\n\n  createEdgeId({v, w, name}: Audion.GraphlibEdge) {\n    if (!this.edgeIdMap.has(v)) {\n      this.edgeIdMap.set(v, new Map());\n    }\n    const edgeIdVMap = this.edgeIdMap.get(v);\n    if (!edgeIdVMap.has(w)) {\n      edgeIdVMap.set(w, new Map());\n    }\n    const edgeIdVWMap = edgeIdVMap.get(w);\n    if (!edgeIdVWMap.has(name)) {\n      edgeIdVWMap.set(name, {v, w, name});\n    }\n    return edgeIdVWMap.get(name);\n  }\n\n  destroyEdgeId(edgeId: AudioEdgeKey) {\n    if (this.edgeIdMap.has(edgeId.v)) {\n      const edgeIdVMap = this.edgeIdMap.get(edgeId.v);\n      if (edgeIdVMap.has(edgeId.w)) {\n        const edgeIdVWMap = edgeIdVMap.get(edgeId.w);\n        if (edgeIdVWMap.has(edgeId.name)) {\n          edgeIdVWMap.delete(edgeId.name);\n        }\n        if (edgeIdVWMap.size === 0) {\n          edgeIdVMap.delete(edgeId.w);\n        }\n      }\n      if (edgeIdVMap.size === 0) {\n        this.edgeIdMap.delete(edgeId.v);\n      }\n    }\n  }\n\n  /**\n   * @param edge\n   * @param context\n   * @return\n   */\n  createEdgeRender(\n    edge: Audion.GraphlibEdge,\n    context: Audion.GraphContext,\n  ): AudioEdgeRender {\n    const edgeId = this.createEdgeId(edge);\n    let edgeRender = this.edgeMap.get(edgeId);\n    if (!edgeRender) {\n      const sourceData = context.nodes[edge.v];\n      const destinationData = context.nodes[edge.w];\n      if (sourceData && destinationData) {\n        const sourceNode = this.nodeMap.get(sourceData.node.nodeId);\n        const destinationNode = this.nodeMap.get(destinationData.node.nodeId);\n\n        if (sourceNode && destinationNode) {\n          const {sourceOutputIndex, destinationType} = edge.value;\n          const sourceNodePort = sourceNode.output[sourceOutputIndex];\n          const destinationNodePort =\n            destinationType === Audion.GraphEdgeType.NODE\n              ? destinationNode.input[edge.value.destinationInputIndex]\n              : destinationNode.param[edge.value.destinationParamIndex];\n\n          if (sourceNodePort && destinationNodePort) {\n            edgeRender = new AudioEdgeRender({\n              key: edgeId,\n              source: sourceNodePort,\n              destination: destinationNodePort,\n            });\n            edgeRender.setPIXIParent(this.pixiEdgeContainer);\n\n            edgeRender.source.updateNodeDisplay();\n            edgeRender.destination.updateNodeDisplay();\n\n            this.edgeMap.set(edgeId, edgeRender);\n          }\n        }\n      }\n    }\n    return edgeRender;\n  }\n\n  /**\n   * @param edgeId\n   */\n  destroyEdgeRender(edgeId: AudioEdgeKey) {\n    const edgeRender = this.edgeMap.get(edgeId);\n    if (edgeRender) {\n      edgeRender.remove();\n\n      edgeRender.source.updateNodeDisplay();\n      edgeRender.destination.updateNodeDisplay();\n\n      this.edgeMap.delete(edgeId);\n\n      this.destroyEdgeId(edgeId);\n    }\n  }\n}\n"
  },
  {
    "path": "src/panel/graph/AudioGraphText.ts",
    "content": "import * as PIXI from 'pixi.js';\n\nexport class AudioGraphText {\n  bounds: PIXI.Rectangle;\n  content: string;\n  text: PIXI.Text;\n  textStyle: PIXI.TextStyle;\n  texture: PIXI.Texture;\n\n  constructor(textStyle: PIXI.TextStyle, content: string) {\n    this.textStyle = textStyle;\n    this.content = content;\n\n    this.text = new PIXI.Text(content, this.textStyle);\n    this.bounds = this.text.getLocalBounds(new PIXI.Rectangle());\n    this.texture = this.text.texture;\n  }\n\n  createSprite() {\n    return new PIXI.Sprite(this.texture);\n  }\n}\n"
  },
  {
    "path": "src/panel/graph/AudioGraphTextCacheGroup.ts",
    "content": "import * as PIXI from 'pixi.js';\n\nimport {AudioGraphText} from './AudioGraphText';\nimport {GraphTextStyle} from './graphStyle';\n\nexport class AudioGraphTextCache {\n  textStyle: PIXI.TextStyle;\n\n  cache: Map<string, AudioGraphText> = new Map();\n\n  constructor({textStyle}: {textStyle: PIXI.TextStyle}) {\n    this.textStyle = textStyle;\n  }\n\n  getText(content: string) {\n    if (!this.cache.has(content)) {\n      const newText = new AudioGraphText(this.textStyle, content);\n      this.cache.set(content, newText);\n    }\n    return this.cache.get(content);\n  }\n\n  getTextBounds(content: string) {\n    return this.getText(content).bounds;\n  }\n}\n\nexport class AudioGraphTextCacheGroup {\n  paramText: AudioGraphTextCache;\n  titleText: AudioGraphTextCache;\n\n  constructor() {\n    this.paramText = new AudioGraphTextCache({\n      textStyle: new PIXI.TextStyle(GraphTextStyle.PARAM),\n    });\n\n    this.titleText = new AudioGraphTextCache({\n      textStyle: new PIXI.TextStyle(GraphTextStyle.TITLE),\n    });\n  }\n}\n"
  },
  {
    "path": "src/panel/graph/AudioNodeBackground.ts",
    "content": "import * as PIXI from 'pixi.js';\n\nimport {Audion} from '../../devtools/Types';\n\nimport {AudionPanel} from '../Types';\n\nimport {AudioGraphTextCacheGroup} from './AudioGraphTextCacheGroup';\nimport {AudioNodePort} from './AudioNodePort';\nimport {\n  GraphColor,\n  colorFromNodeType,\n  GraphPortStyle,\n  GraphNodeStyle,\n} from './graphStyle';\n\nexport interface AudioNodeBackgroundStyle {\n  isHighlighted: boolean;\n}\n\nexport class AudioNodeTextMetrics {\n  title: PIXI.Rectangle = null;\n  param: PIXI.Rectangle[] = [];\n\n  static from(\n    node: Audion.GraphNode,\n    textCacheGroup: AudioGraphTextCacheGroup,\n  ) {\n    const metrics = new AudioNodeTextMetrics();\n    metrics.title = textCacheGroup.titleText.getTextBounds(node.node.nodeType);\n    for (let i = 0; i < node.params.length; i++) {\n      metrics.param.push(\n        textCacheGroup.paramText.getTextBounds(node.params[i].paramType),\n      );\n    }\n    return metrics;\n  }\n}\n\nexport class AudioNodeMetrics {\n  nodeType: string;\n  text: AudioNodeTextMetrics;\n  numberOfInputs: number;\n  numberOfOutputs: number;\n  numberOfParams: number;\n\n  static from(\n    node: Audion.GraphNode,\n    textCacheGroup: AudioGraphTextCacheGroup,\n  ) {\n    const metrics = new AudioNodeMetrics();\n    metrics.nodeType = node.node.nodeType;\n    metrics.text = AudioNodeTextMetrics.from(node, textCacheGroup);\n    metrics.numberOfInputs = node.node.numberOfInputs;\n    metrics.numberOfOutputs = node.node.numberOfOutputs;\n    metrics.numberOfParams = node.params.length;\n    return metrics;\n  }\n}\n\nexport class AudioNodeBackground {\n  metrics: AudioNodeMetrics;\n\n  input: AudioNodePort[] = [];\n  output: AudioNodePort[] = [];\n  param: AudioNodePort[] = [];\n  size: PIXI.Point = new PIXI.Point();\n\n  /** Padding around input ports. */\n  static get INPUT_GROUP_MARGIN() {\n    return GraphPortStyle.INPUT_GROUP_MARGIN;\n  }\n\n  /** Height of input output ports. */\n  static get INPUT_HEIGHT() {\n    return GraphPortStyle.INPUT_HEIGHT;\n  }\n\n  /** Radius of the visible port icon. */\n  static get INPUT_RADIUS() {\n    return GraphPortStyle.INPUT_RADIUS;\n  }\n\n  /** Padding around the group of params. */\n  static get PARAM_GROUP_MARGIN() {\n    return GraphPortStyle.PARAM_GROUP_MARGIN;\n  }\n\n  /** Height of audio parameter ports. */\n  static get PARAM_HEIGHT() {\n    return GraphPortStyle.PARAM_HEIGHT;\n  }\n\n  /** Radius of visible port icon. */\n  static get PARAM_RADIUS() {\n    return GraphPortStyle.PARAM_RADIUS;\n  }\n\n  init(metrics: AudioNodeMetrics) {\n    this.metrics = metrics;\n    const {numberOfInputs, numberOfOutputs, numberOfParams} = metrics;\n\n    const {input, output, param, size} = this;\n\n    this._getSize(metrics, size);\n\n    for (let i = input.length; i < numberOfInputs; i++) {\n      input[i] = new AudioNodePort({\n        node: null,\n        portType: AudionPanel.PortType.INPUT,\n        portIndex: i,\n        point: new PIXI.Point(\n          0,\n          AudioNodeBackground.INPUT_GROUP_MARGIN +\n            (i + 0.5) * AudioNodeBackground.INPUT_HEIGHT,\n        ),\n        radius: AudioNodeBackground.INPUT_RADIUS,\n        color: GraphColor.INPUT_OUTPUT,\n      });\n    }\n\n    for (let i = output.length; i < numberOfOutputs; i++) {\n      output[i] = new AudioNodePort({\n        node: null,\n        portType: AudionPanel.PortType.OUTPUT,\n        portIndex: i,\n        point: new PIXI.Point(\n          size.x,\n          AudioNodeBackground.INPUT_GROUP_MARGIN +\n            (i + 0.5) * AudioNodeBackground.INPUT_HEIGHT,\n        ),\n        radius: AudioNodeBackground.INPUT_RADIUS,\n        color: GraphColor.INPUT_OUTPUT,\n      });\n    }\n\n    const paramYStart = this._getParamYStart(metrics);\n\n    for (let i = 0; i < numberOfParams; i++) {\n      param[i] = new AudioNodePort({\n        node: null,\n        portType: AudionPanel.PortType.PARAM,\n        portIndex: i,\n        point: new PIXI.Point(\n          0,\n          paramYStart + (i + 0.5) * AudioNodeBackground.PARAM_HEIGHT,\n        ),\n        radius: AudioNodeBackground.PARAM_RADIUS,\n        color: GraphColor.AUDIO_PARAM,\n      });\n    }\n  }\n\n  private _getParamYStart({\n    text: textMetrics,\n    numberOfInputs,\n  }: AudioNodeMetrics) {\n    return Math.max(\n      textMetrics.title.height + GraphNodeStyle.TITLE_PADDING,\n      AudioNodeBackground.INPUT_GROUP_MARGIN +\n        numberOfInputs * AudioNodeBackground.INPUT_HEIGHT +\n        Math.max(\n          AudioNodeBackground.INPUT_GROUP_MARGIN,\n          AudioNodeBackground.PARAM_GROUP_MARGIN,\n        ),\n    );\n  }\n\n  private _getSize(\n    {\n      text: textMetrics,\n      numberOfInputs,\n      numberOfOutputs,\n      numberOfParams,\n    }: AudioNodeMetrics,\n    size: PIXI.Point,\n  ) {\n    const maxParamTextSize = new PIXI.Point();\n\n    for (let i = 0; i < numberOfParams; i++) {\n      const param = textMetrics.param[i];\n      maxParamTextSize.x = Math.max(maxParamTextSize.x, param.width);\n      maxParamTextSize.y = Math.max(maxParamTextSize.y, param.height);\n    }\n\n    size.set(\n      Math.max(textMetrics.title.width, maxParamTextSize.x) +\n        2 * GraphNodeStyle.PADDING,\n      Math.max(\n        Math.max(\n          textMetrics.title.height + 2 * GraphNodeStyle.TITLE_PADDING,\n          AudioNodeBackground.INPUT_GROUP_MARGIN +\n            AudioNodeBackground.INPUT_HEIGHT * numberOfInputs +\n            Math.max(\n              AudioNodeBackground.INPUT_GROUP_MARGIN,\n              AudioNodeBackground.PARAM_GROUP_MARGIN,\n            ),\n        ) +\n          AudioNodeBackground.PARAM_HEIGHT * numberOfParams +\n          AudioNodeBackground.PARAM_GROUP_MARGIN,\n        AudioNodeBackground.INPUT_GROUP_MARGIN +\n          AudioNodeBackground.INPUT_HEIGHT * numberOfOutputs +\n          AudioNodeBackground.INPUT_GROUP_MARGIN,\n      ),\n    );\n  }\n}\n\nexport class AudioNodeBackgroundRender {\n  background: AudioNodeBackground;\n  style: AudioNodeBackgroundStyle;\n\n  geometry: PIXI.GraphicsGeometry = null;\n  material: PIXI.MeshMaterial;\n\n  constructor(\n    background: AudioNodeBackground,\n    style: AudioNodeBackgroundStyle,\n    material: PIXI.MeshMaterial,\n  ) {\n    this.background = background;\n    this.style = style;\n    this.material = material;\n  }\n\n  draw(graphics: PIXI.Graphics) {\n    graphics.clear();\n\n    if (this.style.isHighlighted) {\n      graphics.lineStyle({\n        width: GraphNodeStyle.HIGHLIGHT_STROKE_WIDTH,\n        color: GraphNodeStyle.HIGHLIGHT_STROKE_COLOR,\n      });\n    } else {\n      graphics.lineStyle(0);\n    }\n    graphics.beginFill(colorFromNodeType(this.background.metrics.nodeType));\n    graphics.drawRoundedRect(\n      0,\n      0,\n      this.background.size.x,\n      this.background.size.y,\n      GraphNodeStyle.CORNER_RADIUS,\n    );\n    graphics.endFill();\n\n    for (let i = 0; i < this.background.input.length; i++) {\n      this.background.input[i].drawSocket(graphics);\n    }\n\n    for (let i = 0; i < this.background.output.length; i++) {\n      this.background.output[i].drawSocket(graphics);\n    }\n\n    for (let i = 0; i < this.background.param.length; i++) {\n      this.background.param[i].drawSocket(graphics);\n    }\n  }\n\n  getGeometry() {\n    if (this.geometry === null) {\n      const graphics = new PIXI.Graphics();\n      this.draw(graphics);\n      this.geometry = graphics.geometry;\n    }\n    return this.geometry;\n  }\n\n  createMesh() {\n    return new PIXI.Graphics(this.getGeometry());\n  }\n}\n"
  },
  {
    "path": "src/panel/graph/AudioNodeBackgroundRenderCacheGroup.ts",
    "content": "import * as PIXI from 'pixi.js';\nimport {MeshMaterial} from 'pixi.js';\n\nimport {Audion} from '../../devtools/Types';\n\nimport {AudioGraphTextCacheGroup} from './AudioGraphTextCacheGroup';\nimport {\n  AudioNodeBackground,\n  AudioNodeBackgroundRender,\n  AudioNodeBackgroundStyle,\n  AudioNodeMetrics,\n} from './AudioNodeBackground';\n\nexport class AudioNodeBackgroundCache {\n  textCacheGroup: AudioGraphTextCacheGroup;\n\n  cache: Map<string, AudioNodeBackground> = new Map();\n\n  constructor(textCacheGroup: AudioGraphTextCacheGroup) {\n    this.textCacheGroup = textCacheGroup;\n  }\n\n  getBackground(node: Audion.GraphNode) {\n    if (!this.cache.has(node.node.nodeType)) {\n      const background = new AudioNodeBackground();\n      background.init(AudioNodeMetrics.from(node, this.textCacheGroup));\n      this.cache.set(node.node.nodeType, background);\n    }\n    return this.cache.get(node.node.nodeType);\n  }\n}\n\nexport class AudioNodeBackgroundRenderCache {\n  material: PIXI.MeshMaterial;\n\n  textCacheGroup: AudioGraphTextCacheGroup;\n  background: AudioNodeBackgroundCache;\n\n  style: AudioNodeBackgroundStyle;\n\n  cache: Map<string, AudioNodeBackgroundRender> = new Map();\n\n  constructor({\n    background,\n    style,\n    material,\n  }: {\n    background: AudioNodeBackgroundCache;\n    style: AudioNodeBackgroundStyle;\n    material: PIXI.MeshMaterial;\n  }) {\n    this.material = material;\n\n    this.background = background;\n\n    this.style = style;\n  }\n\n  getBackground(node: Audion.GraphNode) {\n    if (!this.cache.has(node.node.nodeType)) {\n      const background = this.background.getBackground(node);\n      const backgroundRender = new AudioNodeBackgroundRender(\n        background,\n        this.style,\n        this.material,\n      );\n      this.cache.set(node.node.nodeType, backgroundRender);\n    }\n    return this.cache.get(node.node.nodeType);\n  }\n}\n\nexport class AudioNodeBackgroundRenderCacheGroup {\n  textCacheGroup: AudioGraphTextCacheGroup;\n  defaultMaterial: PIXI.MeshMaterial;\n\n  plain: AudioNodeBackgroundRenderCache;\n  highlight: AudioNodeBackgroundRenderCache;\n\n  constructor({textCacheGroup}: {textCacheGroup: AudioGraphTextCacheGroup}) {\n    this.textCacheGroup = textCacheGroup;\n\n    const material = (this.defaultMaterial = new MeshMaterial(\n      PIXI.Texture.EMPTY,\n    ));\n\n    const background = new AudioNodeBackgroundCache(textCacheGroup);\n\n    this.plain = new AudioNodeBackgroundRenderCache({\n      background,\n      style: {isHighlighted: false},\n      material,\n    });\n    this.highlight = new AudioNodeBackgroundRenderCache({\n      background,\n      style: {isHighlighted: true},\n      material,\n    });\n  }\n}\n"
  },
  {
    "path": "src/panel/graph/AudioNodePort.ts",
    "content": "import * as PIXI from 'pixi.js';\n\nimport {AudionPanel} from '../Types';\nimport {GraphPortStyle} from './graphStyle';\n\nconst ZERO_POINT = new PIXI.Point();\n\nexport enum AudioNodePortType {\n  INPUT = 'input',\n  OUTPUT = 'output',\n  PARAM = 'param',\n}\n\n/**\n * Port.\n */\nexport class AudioNodePort {\n  node: AudionPanel.Node;\n  portType: AudionPanel.PortType;\n  portIndex: number;\n  offset: AudionPanel.Point;\n  radius: number;\n  color: number;\n  edges: any[];\n\n  /** Radius of the visible port icon. */\n  static get INPUT_RADIUS() {\n    return GraphPortStyle.INPUT_RADIUS;\n  }\n\n  /** Radius of visible port icon. */\n  static get PARAM_RADIUS() {\n    return GraphPortStyle.PARAM_RADIUS;\n  }\n\n  /**\n   * Create a port.\n   * @param options\n   */\n  constructor({\n    node,\n    portType,\n    portIndex,\n    point,\n    radius,\n    color,\n  }: {\n    node: AudionPanel.Node;\n    portType: AudionPanel.PortType;\n    portIndex: number;\n    point: AudionPanel.Point;\n    radius: number;\n    color: number;\n  }) {\n    this.node = node;\n    this.portType = portType;\n    this.portIndex = portIndex;\n    this.offset = point;\n    this.radius = radius;\n    this.color = color;\n    this.edges = [];\n  }\n\n  updateNodeDisplay() {\n    this.node.updatePortDisplay(this.portType, this.portIndex);\n  }\n\n  /**\n   * @param graphics\n   */\n  drawSocket(\n    graphics: PIXI.Graphics,\n    fill: number = GraphPortStyle.DISCONNECTED_FILL_COLOR,\n    offset: AudionPanel.Point = ZERO_POINT,\n  ) {\n    graphics.lineStyle(GraphPortStyle.STROKE_WIDTH, this.color);\n    graphics.beginFill(fill);\n    graphics.drawCircle(\n      offset.x + this.offset.x,\n      offset.y + this.offset.y,\n      this.radius,\n    );\n    graphics.endFill();\n  }\n\n  /**\n   * @param graphics\n   */\n  drawConnect(graphics: PIXI.Graphics) {\n    this.drawSocket(graphics, this.color, this.node.position);\n  }\n}\n"
  },
  {
    "path": "src/panel/graph/AudioNodeRender.ts",
    "content": "import * as PIXI from 'pixi.js';\n\nimport {Audion} from '../../devtools/Types';\n\nimport {\n  GraphColor,\n  colorFromNodeType,\n  GraphNodeStyle,\n  GraphPortStyle,\n  GraphTextStyle,\n} from './graphStyle';\nimport {AudioNodePort, AudioNodePortType} from './AudioNodePort';\nimport {AudioGraphTextCacheGroup} from './AudioGraphTextCacheGroup';\nimport {AudioNodeBackgroundRenderCacheGroup} from './AudioNodeBackgroundRenderCacheGroup';\nimport {AudioPortCacheGroup} from './AudioPortCacheGroup';\nimport {AudionPanel} from '../Types';\n\n/**\n * Manage the rendered representation of a WebAudio node.\n */\nexport class AudioNodeRender {\n  id: string;\n  node: Audion.GraphNode;\n\n  backgroundCacheGroup: AudioNodeBackgroundRenderCacheGroup;\n\n  parent: PIXI.Container;\n  container: PIXI.Container;\n  title: PIXI.DisplayObject;\n  labelContainer: PIXI.Container;\n  background: PIXI.DisplayObject;\n  portContainer: PIXI.Container;\n  inputPortDisplays: PIXI.DisplayObject[];\n  outputPortDisplays: PIXI.DisplayObject[];\n  paramPortDisplays: PIXI.DisplayObject[];\n\n  size: PIXI.Point;\n  position: PIXI.Point;\n  input: AudioNodePort[];\n  output: AudioNodePort[];\n  param: AudioNodePort[];\n  isHighlighted: boolean;\n\n  /**\n   * Create a AudioNodeRender instance.\n   * @param id\n   */\n  constructor(id: string) {\n    this.id = id;\n    this.node = null;\n    this.parent = null;\n    this.container = null;\n    this.title = null;\n    this.labelContainer = null;\n    this.background = null;\n    this.backgroundCacheGroup = null;\n    this.inputPortDisplays = [];\n    this.outputPortDisplays = [];\n    this.paramPortDisplays = [];\n    this.size = new PIXI.Point();\n    this.position = null;\n    this.input = [];\n    this.output = [];\n    this.param = [];\n  }\n\n  /** Padding around input ports. */\n  static get INPUT_GROUP_MARGIN() {\n    return GraphPortStyle.INPUT_GROUP_MARGIN;\n  }\n\n  /** Height of input output ports. */\n  static get INPUT_HEIGHT() {\n    return GraphPortStyle.INPUT_HEIGHT;\n  }\n\n  /** Radius of the visible port icon. */\n  static get INPUT_RADIUS() {\n    return GraphPortStyle.INPUT_RADIUS;\n  }\n\n  /** Padding around the group of params. */\n  static get PARAM_GROUP_MARGIN() {\n    return GraphPortStyle.PARAM_GROUP_MARGIN;\n  }\n\n  /** Height of audio parameter ports. */\n  static get PARAM_HEIGHT() {\n    return GraphPortStyle.PARAM_HEIGHT;\n  }\n\n  /** Radius of visible port icon. */\n  static get PARAM_RADIUS() {\n    return GraphPortStyle.PARAM_RADIUS;\n  }\n\n  /**\n   * @param node\n   * @return\n   */\n  init(\n    node: Audion.GraphNode,\n    {\n      textCacheGroup,\n      backgroundCacheGroup,\n      portCacheGroup,\n    }: {\n      textCacheGroup: AudioGraphTextCacheGroup;\n      backgroundCacheGroup: AudioNodeBackgroundRenderCacheGroup;\n      portCacheGroup: AudioPortCacheGroup;\n    },\n  ): AudioNodeRender {\n    if (this.node && node.params.length === Object.keys(this.param).length) {\n      return this;\n    }\n\n    this.node = node;\n    this.backgroundCacheGroup = backgroundCacheGroup;\n\n    const container = (this.container = new PIXI.Container());\n    this.position = container.position;\n\n    container.visible = false;\n\n    const title = (this.title = textCacheGroup.titleText\n      .getText(node.node.nodeType)\n      .createSprite());\n    title.position.set(GraphNodeStyle.PADDING, GraphNodeStyle.TITLE_PADDING);\n\n    const background = (this.background = backgroundCacheGroup.plain\n      .getBackground(node)\n      .createMesh());\n\n    const labelContainer = (this.labelContainer = new PIXI.Container());\n    const portContainer = (this.portContainer = new PIXI.Container());\n    container.addChild(background);\n    container.addChild(labelContainer);\n    container.addChild(title);\n    container.addChild(portContainer);\n\n    this.initSize(textCacheGroup);\n    this.initPorts(portCacheGroup);\n\n    this.draw();\n\n    return this;\n  }\n\n  /**\n   * @param parent\n   */\n  setPixiParent(parent: PIXI.Container) {\n    this.parent = parent;\n    parent.addChild(this.container);\n  }\n\n  /**\n   * Remove from the rendering hierarchy.\n   */\n  remove() {\n    this.container.parent.removeChild(this.container);\n  }\n\n  /** Deteremine the size of the node. */\n  initSize(textCacheGroup: AudioGraphTextCacheGroup) {\n    const {node, title} = this;\n    const localBounds = new PIXI.Rectangle();\n\n    this.labelContainer.removeChildren();\n\n    const maxParamTextSize = new PIXI.Point();\n    for (let i = 0; i < node.params.length; i++) {\n      const param = node.params[i];\n\n      const label = textCacheGroup.paramText\n        .getText(param.paramType)\n        .createSprite();\n      this.labelContainer.addChild(label);\n\n      label.getLocalBounds(localBounds);\n      maxParamTextSize.x = Math.max(maxParamTextSize.x, localBounds.width);\n      maxParamTextSize.y = Math.max(maxParamTextSize.y, localBounds.height);\n    }\n\n    title.getLocalBounds(localBounds);\n\n    this.size.set(\n      Math.max(localBounds.width, maxParamTextSize.x) +\n        2 * GraphNodeStyle.PADDING,\n      Math.max(\n        Math.max(\n          localBounds.height + 2 * GraphNodeStyle.TITLE_PADDING,\n          AudioNodeRender.INPUT_GROUP_MARGIN +\n            AudioNodeRender.INPUT_HEIGHT * node.node.numberOfInputs +\n            Math.max(\n              AudioNodeRender.INPUT_GROUP_MARGIN,\n              AudioNodeRender.PARAM_GROUP_MARGIN,\n            ),\n        ) +\n          AudioNodeRender.PARAM_HEIGHT * node.params.length +\n          AudioNodeRender.PARAM_GROUP_MARGIN,\n        AudioNodeRender.INPUT_GROUP_MARGIN +\n          AudioNodeRender.INPUT_HEIGHT * node.node.numberOfOutputs +\n          AudioNodeRender.INPUT_GROUP_MARGIN,\n      ),\n    );\n  }\n\n  /**\n   * Initialize ports.\n   */\n  initPorts(portCacheGroup: AudioPortCacheGroup) {\n    const {\n      input,\n      node,\n      output,\n      param,\n      inputPortDisplays,\n      outputPortDisplays,\n      paramPortDisplays,\n    } = this;\n\n    for (let i = input.length; i < node.node.numberOfInputs; i++) {\n      const inputPoint = new PIXI.Point(\n        0,\n        AudioNodeRender.INPUT_GROUP_MARGIN +\n          (i + 0.5) * AudioNodeRender.INPUT_HEIGHT,\n      );\n\n      input[i] = new AudioNodePort({\n        node: this,\n        portType: AudionPanel.PortType.INPUT,\n        portIndex: i,\n        point: inputPoint,\n        radius: AudioNodeRender.INPUT_RADIUS,\n        color: GraphColor.INPUT_OUTPUT,\n      });\n\n      inputPortDisplays[i] =\n        portCacheGroup.inputOutput.createGraphics(inputPoint);\n      this.portContainer.addChild(inputPortDisplays[i]);\n    }\n\n    for (let i = output.length; i < node.node.numberOfOutputs; i++) {\n      const outputPoint = new PIXI.Point(\n        this.size.x,\n        AudioNodeRender.INPUT_GROUP_MARGIN +\n          (i + 0.5) * AudioNodeRender.INPUT_HEIGHT,\n      );\n\n      output[i] = new AudioNodePort({\n        node: this,\n        portType: AudionPanel.PortType.OUTPUT,\n        portIndex: i,\n        point: outputPoint,\n        radius: AudioNodeRender.INPUT_RADIUS,\n        color: GraphColor.INPUT_OUTPUT,\n      });\n\n      outputPortDisplays[i] =\n        portCacheGroup.inputOutput.createGraphics(outputPoint);\n      this.portContainer.addChild(outputPortDisplays[i]);\n    }\n\n    const localBounds = new PIXI.Rectangle();\n    this.title.getLocalBounds(localBounds);\n    const paramYStart = Math.max(\n      localBounds.height + GraphNodeStyle.TITLE_PADDING,\n      AudioNodeRender.INPUT_GROUP_MARGIN +\n        input.length * AudioNodeRender.INPUT_HEIGHT +\n        Math.max(\n          AudioNodeRender.INPUT_GROUP_MARGIN,\n          AudioNodeRender.PARAM_GROUP_MARGIN,\n        ),\n    );\n\n    for (let i = 0; i < node.params.length; i++) {\n      const paramPoint = new PIXI.Point(\n        0,\n        paramYStart + (i + 0.5) * AudioNodeRender.PARAM_HEIGHT,\n      );\n\n      const paramPort = (param[i] = new AudioNodePort({\n        node: this,\n        portType: AudionPanel.PortType.PARAM,\n        portIndex: i,\n        point: paramPoint,\n        radius: AudioNodeRender.PARAM_RADIUS,\n        color: GraphColor.AUDIO_PARAM,\n      }));\n\n      paramPortDisplays[i] = portCacheGroup.param.createGraphics(paramPoint);\n      this.portContainer.addChild(paramPortDisplays[i]);\n\n      const label = this.labelContainer.getChildAt(i);\n      label.position.set(\n        GraphNodeStyle.PADDING,\n        paramPort.offset.y - 0.5 * GraphTextStyle.PARAM.fontSize,\n      );\n    }\n  }\n\n  setHighlight(isHighlighted: boolean) {\n    this.isHighlighted = isHighlighted;\n    this.draw();\n  }\n\n  updatePortDisplay(portType: AudionPanel.PortType, index: number) {\n    if (portType === AudionPanel.PortType.INPUT) {\n      this.inputPortDisplays[index].visible =\n        this.input[index].edges.length > 0;\n    } else if (portType === AudionPanel.PortType.OUTPUT) {\n      this.outputPortDisplays[index].visible =\n        this.output[index].edges.length > 0;\n    } else if (portType === AudionPanel.PortType.PARAM) {\n      this.paramPortDisplays[index].visible =\n        this.param[index].edges.length > 0;\n    }\n  }\n\n  /**\n   * Update the rendering.\n   */\n  draw() {\n    const newBackground = (\n      this.isHighlighted\n        ? this.backgroundCacheGroup.highlight\n        : this.backgroundCacheGroup.plain\n    )\n      .getBackground(this.node)\n      .createMesh();\n\n    this.container.removeChild(this.background);\n    this.container.addChildAt(newBackground, 0);\n    this.background = newBackground;\n  }\n}\n"
  },
  {
    "path": "src/panel/graph/AudioPortCacheGroup.ts",
    "content": "import * as PIXI from 'pixi.js';\nimport {AudionPanel} from '../Types';\nimport {AudioNodePort} from './AudioNodePort';\nimport {GraphColor} from './graphStyle';\n\nexport class AudioPortCache {\n  port: AudioNodePort;\n\n  geometry: PIXI.GraphicsGeometry = null;\n\n  constructor(port: AudioNodePort) {\n    this.port = port;\n  }\n\n  getGeometry() {\n    if (this.geometry === null) {\n      const graphics = new PIXI.Graphics();\n      this.port.drawSocket(graphics, this.port.color);\n      this.geometry = graphics.geometry;\n    }\n    return this.geometry;\n  }\n\n  createGraphics(position = new PIXI.Point()) {\n    const graphics = new PIXI.Graphics(this.getGeometry());\n    graphics.position.set(position.x, position.y);\n    graphics.visible = false;\n    return graphics;\n  }\n}\n\nexport class AudioPortCacheGroup {\n  inputOutput: AudioPortCache;\n  param: AudioPortCache;\n\n  constructor() {\n    this.inputOutput = new AudioPortCache(\n      new AudioNodePort({\n        node: null,\n        portType: AudionPanel.PortType.INPUT,\n        portIndex: -1,\n        point: new PIXI.Point(),\n        radius: AudioNodePort.INPUT_RADIUS,\n        color: GraphColor.INPUT_OUTPUT,\n      }),\n    );\n\n    this.param = new AudioPortCache(\n      new AudioNodePort({\n        node: null,\n        portType: AudionPanel.PortType.PARAM,\n        portIndex: -1,\n        point: new PIXI.Point(),\n        radius: AudioNodePort.PARAM_RADIUS,\n        color: GraphColor.AUDIO_PARAM,\n      }),\n    );\n  }\n}\n"
  },
  {
    "path": "src/panel/graph/Camera.js",
    "content": "import {Rectangle} from '@pixi/math';\n\nimport {Observer} from '../../utils/Observer';\nimport {trunc, clamp} from '../../utils/math';\n\nconst MIN_ZOOM = 0.5;\n\n/**\n * Camera.\n */\nexport class Camera {\n  /** Create a Camera. */\n  constructor() {\n    /** Area that can be viewed. */\n    this.bounds = new Rectangle(-50, -50, 100, 100);\n    this.screen = new Rectangle();\n    this.viewport = new Rectangle(-50, -50, 1, 1);\n    /** @type {Observer<Rectangle>} */\n    this.viewportObserver = new Observer((onNext) => {\n      this.update = () => {\n        onNext(this.viewport);\n      };\n      return () => {};\n    });\n  }\n  /** Update. */\n  update() {}\n  /**\n   * Move the viewport.\n   * @param {number} dx\n   * @param {number} dy\n   */\n  move(dx, dy) {\n    const zoomFactor = this.viewport.width;\n    const {x, y} = this.viewport;\n    this.viewport.x = trunc(\n      clamp(\n        x + dx * zoomFactor,\n        this.bounds.x,\n        Math.max(\n          this.bounds.x,\n          this.bounds.x + this.bounds.width - this.screen.width * zoomFactor,\n        ),\n      ),\n      -2,\n    );\n    this.viewport.y = trunc(\n      clamp(\n        y + dy * zoomFactor,\n        this.bounds.y,\n        Math.max(\n          this.bounds.y,\n          this.bounds.y + this.bounds.height - this.screen.height * zoomFactor,\n        ),\n      ),\n      -2,\n    );\n    this.update();\n  }\n  /**\n   * Zoom in or out by `delta`.\n   * @param {number} screenX\n   * @param {number} screenY\n   * @param {number} zoomDelta\n   */\n  zoom(screenX, screenY, zoomDelta) {\n    const maxScaleX = this.bounds.width / this.screen.width;\n    const maxScaleY = this.bounds.height / this.screen.height;\n    const maxScale = Math.max(1, maxScaleX, maxScaleY);\n    const zoomFactor = this.viewport.width;\n    const newZoom = trunc(\n      clamp(zoomFactor + zoomDelta, MIN_ZOOM, maxScale),\n      -2,\n    );\n    const {x, y} = this.viewport;\n    this.viewport.x = trunc(\n      clamp(\n        x + screenX * (zoomFactor - newZoom),\n        this.bounds.x,\n        Math.max(\n          this.bounds.x,\n          this.bounds.x + this.bounds.width - this.screen.width * newZoom,\n        ),\n      ),\n      -2,\n    );\n    this.viewport.y = trunc(\n      clamp(\n        y + screenY * (zoomFactor - newZoom),\n        this.bounds.y,\n        Math.max(\n          this.bounds.y,\n          this.bounds.y + this.bounds.height - this.screen.height * newZoom,\n        ),\n      ),\n      -2,\n    );\n    this.viewport.width = newZoom;\n    this.viewport.height = newZoom;\n    this.update();\n  }\n  /**\n   * Fit the viewport to the whole bounds.\n   */\n  fitToScreen() {\n    this.zoom(0, 0, Infinity);\n  }\n  /**\n   * Set graph bounds with and height.\n   * @param {number} width\n   * @param {number} height\n   */\n  setGraphSize(width, height) {\n    this.bounds.x = -50;\n    this.bounds.y = -50;\n    this.bounds.width = width + 100;\n    this.bounds.height = height + 100;\n  }\n  /**\n   * Set screen size.\n   * @param {number} width\n   * @param {number} height\n   */\n  setScreenSize(width, height) {\n    this.screen.width = width;\n    this.screen.height = height;\n  }\n}\n"
  },
  {
    "path": "src/panel/graph/GraphicsCache.ts",
    "content": "import {EdgeArrowGraphics} from './AudioEdgeArrowGraphics';\nimport {EdgeCurvedLineGraphics} from './AudioEdgeCurvedLineGraphics';\nimport {AudioGraphTextCacheGroup} from './AudioGraphTextCacheGroup';\nimport {AudioNodeBackgroundRenderCacheGroup} from './AudioNodeBackgroundRenderCacheGroup';\nimport {AudioPortCacheGroup} from './AudioPortCacheGroup';\n\nexport class GraphicsCache {\n  textCacheGroup: AudioGraphTextCacheGroup = new AudioGraphTextCacheGroup();\n  backgroundCacheGroup: AudioNodeBackgroundRenderCacheGroup =\n    new AudioNodeBackgroundRenderCacheGroup({\n      textCacheGroup: this.textCacheGroup,\n    });\n  portCacheGroup: AudioPortCacheGroup = new AudioPortCacheGroup();\n  edgeArrowGraphics: EdgeArrowGraphics = new EdgeArrowGraphics();\n  edgeCurvedLineGraphics: EdgeCurvedLineGraphics = new EdgeCurvedLineGraphics();\n}\n"
  },
  {
    "path": "src/panel/graph/graphStyle.js",
    "content": "/** @enum {number} */\nexport const Color = {\n  PROCESSOR: 0x64b5f6,\n  MEDIA: 0xba68c8,\n  SOURCE: 0x81c784,\n  DESTINATION: 0x90a4ad,\n  ANALYSER: 0xf48fb1,\n  AUDIO_WORKLET: 0x9fa8da,\n\n  DEPRECATED: 0xe0e0e0,\n  AUDIO_PARAM: 0xffa726,\n  TEXT: 0x263238,\n  INPUT_OUTPUT: 0x455a63,\n  EDGE: 0x455a63,\n};\n\nexport const TextStyle = {\n  TITLE: {\n    fill: Color.TEXT,\n    fontSize: 16,\n  },\n  PARAM: {\n    fill: Color.TEXT,\n    fontSize: 9,\n  },\n};\n\nexport const PortStyle = {\n  /** Stroke width around port radius. */\n  STROKE_WIDTH: 2,\n  /** Inner color for ports without connecting edges. */\n  DISCONNECTED_FILL_COLOR: 0xffffff,\n  /** Padding around input ports. */\n  INPUT_GROUP_MARGIN: 2,\n  /** Height of input output ports. */\n  INPUT_HEIGHT: 20,\n  /** Radius of the visible port icon. */\n  INPUT_RADIUS: 6,\n  /** Padding around the group of params. */\n  PARAM_GROUP_MARGIN: 2,\n  /** Height of audio parameter ports. */\n  PARAM_HEIGHT: 12,\n  /** Radius of visible port icon. */\n  PARAM_RADIUS: 4,\n};\n\nexport const NodeStyle = {\n  TITLE_PADDING: 4,\n  /** Stroke width around node when highlighted. */\n  HIGHLIGHT_STROKE_WIDTH: 5,\n  /** Stroke color around node when highlighted. */\n  HIGHLIGHT_STROKE_COLOR: 0x000000,\n  /** Node background corner radius. */\n  CORNER_RADIUS: 3,\n  /** Node background padding around contained text. */\n  PADDING: 10,\n};\n\n/**\n * @param {string} nodeType\n * @param {boolean} [isOffline]\n * @return {number}\n */\nexport const colorFromNodeType = (nodeType, isOffline = false) => {\n  // AudioNodes are grouped into color categories based on their purposes.\n  switch (nodeType) {\n    case 'AudioDestination':\n      // The destination nodes of OfflineAudioContexts are brown. Those of\n      // \"non-offline\" AudioContexts are a dark grey.\n      return isOffline ? 0x5d4037 : Color.DESTINATION;\n    case 'AudioBufferSource':\n    case 'ConstantSource':\n    case 'Oscillator':\n      return Color.SOURCE;\n    case 'Analyser':\n      return Color.ANALYSER;\n    case 'BiquadFilter':\n    case 'Convolver':\n    case 'Delay':\n    case 'DynamicsCompressor':\n    case 'IIRFilter':\n    case 'Panner':\n    case 'StereoPanner':\n    case 'WaveShaper':\n    case 'Gain':\n    case 'ChannelMerger':\n    case 'ChannelSplitter':\n      return Color.PROCESSOR;\n    case 'MediaElementAudioSource':\n    case 'MediaStreamAudioDestination':\n    case 'MediaStreamAudioSource':\n      return Color.MEDIA;\n    case 'AudioWorklet':\n      return Color.AUDIO_WORKLET;\n    case 'ScriptProcessor':\n      return Color.DEPRECATED;\n  }\n\n  // Nothing matched. Odd. Highlight this node in dark red.\n  return 0xc62828;\n};\n"
  },
  {
    "path": "src/panel/graph/graphStyle.ts",
    "content": "export enum GraphColor {\n  PROCESSOR = 0x64b5f6,\n  MEDIA = 0xba68c8,\n  SOURCE = 0x81c784,\n  DESTINATION = 0x90a4ad,\n  ANALYSER = 0xf48fb1,\n  AUDIO_WORKLET = 0x9fa8da,\n  OFFLINE_DESTINATION = 0x5d4037,\n\n  DEPRECATED = 0xe0e0e0,\n  AUDIO_PARAM = 0xffa726,\n  TEXT = 0x263238,\n  INPUT_OUTPUT = 0x455a63,\n  EDGE = 0x455a63,\n\n  UNKNOWN = 0xc62828,\n}\n\nexport const GraphTextStyle = {\n  TITLE: {\n    fill: GraphColor.TEXT,\n    fontSize: 16,\n  },\n  PARAM: {\n    fill: GraphColor.TEXT,\n    fontSize: 9,\n  },\n};\n\nexport const GraphPortStyle = {\n  /** Stroke width around port radius. */\n  STROKE_WIDTH: 2,\n  /** Inner color for ports without connecting edges. */\n  DISCONNECTED_FILL_COLOR: 0xffffff,\n  /** Padding around input ports. */\n  INPUT_GROUP_MARGIN: 2,\n  /** Height of input output ports. */\n  INPUT_HEIGHT: 20,\n  /** Radius of the visible port icon. */\n  INPUT_RADIUS: 6,\n  /** Padding around the group of params. */\n  PARAM_GROUP_MARGIN: 2,\n  /** Height of audio parameter ports. */\n  PARAM_HEIGHT: 12,\n  /** Radius of visible port icon. */\n  PARAM_RADIUS: 4,\n};\n\nexport const GraphNodeStyle = {\n  /** Padding above and below title text. */\n  TITLE_PADDING: 4,\n  /** Stroke width around node when highlighted. */\n  HIGHLIGHT_STROKE_WIDTH: 5,\n  /** Stroke color around node when highlighted. */\n  HIGHLIGHT_STROKE_COLOR: 0x000000,\n  /** Node background corner radius. */\n  CORNER_RADIUS: 3,\n  /** Node background padding around contained text. */\n  PADDING: 10,\n};\n\n/**\n * @param nodeType\n * @param isOffline\n */\nexport const colorFromNodeType = (nodeType: string, isOffline = false) => {\n  // AudioNodes are grouped into color categories based on their purposes.\n  switch (nodeType) {\n    case 'AudioDestination':\n      // The destination nodes of OfflineAudioContexts are brown. Those of\n      // \"non-offline\" AudioContexts are a dark grey.\n      return isOffline\n        ? GraphColor.OFFLINE_DESTINATION\n        : GraphColor.DESTINATION;\n    case 'AudioBufferSource':\n    case 'ConstantSource':\n    case 'Oscillator':\n      return GraphColor.SOURCE;\n    case 'Analyser':\n      return GraphColor.ANALYSER;\n    case 'BiquadFilter':\n    case 'Convolver':\n    case 'Delay':\n    case 'DynamicsCompressor':\n    case 'IIRFilter':\n    case 'Panner':\n    case 'StereoPanner':\n    case 'WaveShaper':\n    case 'Gain':\n    case 'ChannelMerger':\n    case 'ChannelSplitter':\n      return GraphColor.PROCESSOR;\n    case 'MediaElementAudioSource':\n    case 'MediaStreamAudioDestination':\n    case 'MediaStreamAudioSource':\n      return GraphColor.MEDIA;\n    case 'AudioWorklet':\n      return GraphColor.AUDIO_WORKLET;\n    case 'ScriptProcessor':\n      return GraphColor.DEPRECATED;\n  }\n\n  // Nothing matched. Odd. Highlight this node in dark red.\n  return GraphColor.UNKNOWN;\n};\n"
  },
  {
    "path": "src/panel/main.ts",
    "content": "/// <reference path=\"../chrome/DebuggerWebAudioDomain.ts\" />\n\n// This module disable's pixi.js use of new Function to optimize rendering.\nimport '@pixi/unsafe-eval';\n\nimport {merge, Subject} from 'rxjs';\nimport {catchError, filter, map, scan, shareReplay, tap} from 'rxjs/operators';\n\nimport {chrome} from '../chrome';\n\nimport {Audion} from '../devtools/Types';\n\nimport {mapThruWorker} from '../utils/mapThruWorker';\n\nimport {WholeGraphButton} from './components/WholeGraphButton';\nimport {querySelector} from './components/domUtils';\nimport {renderRealtimeSummary} from './components/realtimeSummary';\nimport {renderSelectGraph} from './components/selectGraph';\nimport {renderDetailPanel} from './components/detailPanel';\nimport {renderCollectGarbage} from './components/collectGarbage';\n\nimport {connect} from './Observer.runtime';\nimport {AudioGraphRender} from './graph/AudioGraphRender';\nimport {GraphSelector} from './GraphSelector';\nimport {updateGraphRender} from './updateGraphRender';\nimport {updateGraphSizes} from './updateGraphSizes';\n\nif (chrome.devtools.panels.themeName === 'dark') {\n  document.querySelector('html').className = '-theme-with-dark-background';\n}\n\nconst devtoolsRequestSubject$ = new Subject<Audion.DevtoolsRequest>();\nconst devtoolsObserver$ = connect<\n  Audion.DevtoolsRequest,\n  Audion.DevtoolsMessage\n>(devtoolsRequestSubject$);\n\nconst allGraphsObserver$ = devtoolsObserver$.pipe(\n  scan((allGraphs, message) => {\n    if ('allGraphs' in message) {\n      return message.allGraphs;\n    } else if ('graphContext' in message) {\n      if (\n        message.graphContext.graph &&\n        message.graphContext.context.contextState !== 'closed'\n      ) {\n        return {\n          ...allGraphs,\n          [message.graphContext.id]: message.graphContext,\n        };\n      } else {\n        allGraphs = {...allGraphs};\n        delete allGraphs[message.graphContext.id];\n        return allGraphs;\n      }\n    }\n    return allGraphs;\n  }, {} as Audion.GraphContextsById),\n  shareReplay({bufferSize: 1, refCount: true}),\n);\n\nconst graphSelector = new GraphSelector({\n  allGraphs$: allGraphsObserver$,\n});\ngraphSelector.options$.subscribe((options) => {\n  if (\n    // Select a graph automatically if one is not selected.\n    graphSelector.graphId === '' ||\n    // Select a graph automatically if current selected graph is no longer available.\n    !options.includes(graphSelector.graphId)\n  ) {\n    // Select the newest graph (the last in the list).\n    graphSelector.select(options[options.length - 1] || '');\n  }\n});\n\nconst graphContainer =\n  /** @type {HTMLElement} */ document.getElementsByClassName(\n    'web-audio-graph',\n  )[0] as HTMLElement;\n\nconst graphRender = new AudioGraphRender({elementContainer: graphContainer});\ngraphRender.init();\n\nconst layoutWorker = new Worker('audion-panelWorker.js');\n\ngraphSelector.graph$\n  .pipe(\n    map(updateGraphSizes(graphRender)),\n    map((graphContext) => ({graphContext})),\n    mapThruWorker<Audion.GraphContext>(layoutWorker),\n    map(updateGraphRender(graphRender)),\n    catchError((reason, caught) => {\n      console.error(\n        'An error handling the latest audio graph context occured:',\n        reason,\n      );\n      return caught;\n    }),\n  )\n  .subscribe();\n\nconst wholeGraphButton = new WholeGraphButton();\nwholeGraphButton.click$.subscribe(() => {\n  graphRender.camera.fitToScreen();\n});\n\ngraphContainer.appendChild(graphRender.pixiView);\ngraphContainer.appendChild(wholeGraphButton.render());\n\nmerge(\n  renderCollectGarbage(querySelector('.toolbar-garbage-button')).pipe(\n    tap((action) => {\n      if (action && 'type' in action && action.type === 'collectGarbage') {\n        devtoolsRequestSubject$.next(action);\n      }\n    }),\n    filter(isHTMLElement),\n  ),\n  renderSelectGraph(\n    querySelector('.web-audio-toolbar-container .dropdown-title'),\n    querySelector('.web-audio-select-graph-dropdown'),\n    querySelector('.web-audio-toolbar-container .toolbar-dropdown'),\n    graphSelector.graphId$,\n    allGraphsObserver$,\n  ).pipe(\n    tap((action) => {\n      if (action && 'type' in action && action.type === 'selectGraph') {\n        graphSelector.select(action.graphId);\n      }\n    }),\n    filter(isHTMLElement),\n  ),\n  renderRealtimeSummary(\n    querySelector('.web-audio-status'),\n    graphSelector.graph$.pipe(map(({realtimeData}) => realtimeData)),\n  ),\n  renderDetailPanel(\n    querySelector('.web-audio-detail-panel'),\n    graphSelector.graph$,\n    graphRender.selectedNode$,\n  ),\n)\n  // Observe elements as they are changed.\n  .subscribe();\n\ndocument.getElementsByClassName('web-audio-loading')[0].classList.add('hidden');\n\n/**\n * @param value\n * @returns value is a HTMLElement\n */\nfunction isHTMLElement(value: unknown): value is HTMLElement {\n  return value && value instanceof HTMLElement;\n}\n"
  },
  {
    "path": "src/panel/updateGraphRender.ts",
    "content": "import {Audion} from '../devtools/Types';\nimport {AudioGraphRender} from './graph/AudioGraphRender';\n\nexport function updateGraphRender(\n  graphRender: AudioGraphRender,\n): (value: Audion.GraphContext) => void {\n  return (graphContext) => graphRender.update(graphContext);\n}\n"
  },
  {
    "path": "src/panel/updateGraphSizes.ts",
    "content": "import {Audion} from '../devtools/Types';\n\nimport {AudioGraphRender} from './graph/AudioGraphRender';\n\nexport function updateGraphSizes(\n  graphRender: AudioGraphRender,\n): (value: Audion.GraphContext, index: number) => Audion.GraphContext {\n  return (graphContext) => graphRender.updateGraphSizes(graphContext);\n}\n"
  },
  {
    "path": "src/panel/worker.ts",
    "content": "import * as dagre from 'dagre';\nimport {fromEvent, Observable} from 'rxjs';\nimport {\n  auditTime,\n  distinctUntilChanged,\n  filter,\n  map,\n  startWith,\n  withLatestFrom,\n} from 'rxjs/operators';\n\nimport {serializeGraphContext} from '../devtools/serializeGraphContext';\nimport {\n  deserializeGraphContext,\n  SerializedGraphContext,\n} from '../devtools/deserializeGraphContext';\nimport {setOptionsToGraphContext} from '../devtools/setOptionsToGraphContext';\nimport {layoutGraphContext} from '../devtools/layoutGraphContext';\n\ninterface LayoutOptionsMessage {\n  layoutOptions: dagre.GraphLabel;\n}\n\ninterface GraphContextMessage {\n  graphContext: SerializedGraphContext;\n}\n\ntype PanelMessage = MessageEvent<LayoutOptionsMessage | GraphContextMessage>;\n\nconst messages$ = fromEvent<PanelMessage>(self, 'message').pipe(\n  map((message) => message.data),\n);\n\nconst layoutOptions$: Observable<dagre.GraphLabel> = messages$.pipe(\n  filter((msg): msg is LayoutOptionsMessage => 'layoutOptions' in msg),\n  map((message) => message.layoutOptions),\n  startWith({rankdir: 'LR'}),\n);\n\nmessages$\n  .pipe(\n    filter((msg): msg is GraphContextMessage => 'graphContext' in msg),\n    map((message) => message.graphContext),\n    distinctUntilChanged(\n      (a, b) => a?.id === b?.id && a?.eventCount === b?.eventCount,\n    ),\n    auditTime(16),\n    map((graphContext) => deserializeGraphContext(graphContext)),\n    withLatestFrom(layoutOptions$),\n    map(setOptionsToGraphContext),\n    map(layoutGraphContext),\n    map(serializeGraphContext),\n  )\n  .subscribe((context) => {\n    self.postMessage(context);\n  });\n"
  },
  {
    "path": "src/panel.html",
    "content": "<html>\n  <head>\n    <style>\n      body {\n        cursor: default;\n        font-family: '.SFNSDisplay-Regular', 'Helvetica Neue', 'Lucida Grande',\n          sans-serif;\n        font-size: 12px;\n        user-select: none;\n        color: var(--color-text-primary);\n        background: var(--color-background);\n      }\n\n      /* Default fonts */\n      .platform-linux {\n        font-family: Roboto, Ubuntu, Arial, sans-serif;\n      }\n\n      .platform-mac {\n        font-family: '.SFNSDisplay-Regular', 'Helvetica Neue', 'Lucida Grande',\n          sans-serif;\n      }\n\n      .platform-mac,\n      .platform-linux {\n        --override-text-color: rgb(48 57 66);\n\n        color: var(--override-text-color);\n      }\n\n      .platform-windows {\n        font-family: 'Segoe UI', Tahoma, sans-serif;\n      }\n\n      :focus {\n        outline-width: 0;\n      }\n\n      /* Monospace font per platform configuration */\n      .platform-mac,\n      :host-context(.platform-mac) {\n        --monospace-font-size: 11px;\n        --monospace-font-family: menlo, monospace;\n        --source-code-font-size: 11px;\n        --source-code-font-family: menlo, monospace;\n      }\n\n      .platform-windows,\n      :host-context(.platform-windows) {\n        --monospace-font-size: 12px;\n        --monospace-font-family: consolas, lucida console, courier new,\n          monospace;\n        --source-code-font-size: 12px;\n        --source-code-font-family: consolas, lucida console, courier new,\n          monospace;\n      }\n\n      .platform-linux,\n      :host-context(.platform-linux) {\n        --monospace-font-size: 11px;\n        --monospace-font-family: dejavu sans mono, monospace;\n        --source-code-font-size: 11px;\n        --source-code-font-family: dejavu sans mono, monospace;\n      }\n\n      .-theme-with-dark-background .platform-linux,\n      .-theme-with-dark-background .platform-mac,\n      :host-context(.-theme-with-dark-background) .platform-linux,\n      :host-context(.-theme-with-dark-background) .platform-mac {\n        --override-text-color: rgb(189 198 207);\n      }\n\n      :root {\n        --color-primary: rgb(26 115 232);\n        --color-primary-variant: rgb(66 133 244);\n        --color-background: rgb(255 255 255);\n        --color-background-inverted: rgb(0 0 0);\n        --color-background-inverted-opacity-30: rgb(0 0 0 / 30%);\n        --color-background-inverted-opacity-50: rgb(0 0 0 / 50%);\n        --color-background-opacity-50: rgb(255 255 255 / 50%);\n        --color-background-opacity-80: rgb(255 255 255 / 80%);\n        --color-background-elevation-0: rgb(248 249 249);\n        --color-background-elevation-1: rgb(241 243 244);\n        --color-background-elevation-2: rgb(222 225 230);\n        /** Used when the elevation is visible only on dark theme */\n        --color-background-elevation-dark-only: var(--color-background);\n        --color-background-highlight: rgb(202 205 209);\n        /** To draw grid lines behind elements */\n        --divider-line: rgb(0 0 0 / 10%);\n        /**\n         * When hovering over an element and the background should be a little\n         * bit darker. For example, when hovering over a node in the flame chart\n         * tree in the performance panel. This color is intentionally opague,\n         * since the color of the underlying element should still be dominant.\n         */\n        --color-background-hover-overlay: rgb(56 121 217 / 10%);\n        /**\n         * Used when selecting a range of a section, for example when\n         * selecting a range in the performance panel timeline.\n         */\n        --color-selection-highlight: rgb(56 121 217 / 30%);\n        --color-selection-highlight-border: rgb(16 81 177);\n        /**\n         * When showing matching elements of a particular search filter (for example\n         * when showing all matching css selectors/rules in the elements style\n         * pane). The highlight is intended to be used on top of the original background\n         * color and text color.\n         */\n        --color-match-highlight: rgb(56 121 217 / 20%);\n        --color-text-primary: rgb(32 33 36);\n        --color-text-secondary: rgb(95 99 104);\n        --color-text-secondary-selected: rgb(188 185 182);\n        --color-text-disabled: rgb(128 134 139);\n        --color-details-hairline: rgb(202 205 209);\n        --color-details-hairline-light: rgb(223 225 227);\n        --color-accent-red: rgb(217 48 37);\n        --color-red: rgb(238 68 47);\n        --color-accent-green: rgb(24 128 56);\n        --color-green: rgb(99 172 190);\n        --color-link: var(--color-primary);\n        --drop-shadow: 0 0 0 1px rgb(0 0 0 / 5%), 0 2px 4px rgb(0 0 0 / 20%),\n          0 2px 6px rgb(0 0 0 / 10%);\n        --drop-shadow-depth-1: 0 1px 2px rgb(60 64 67 / 30%),\n          0 1px 3px 1px rgb(60 64 67 / 15%);\n        --drop-shadow-depth-2: 0 1px 2px rgb(60 64 67 / 30%),\n          0 2px 6px 2px rgb(60 64 67 / 15%);\n        --drop-shadow-depth-3: 0 4px 8px 3px rgb(60 64 67 / 15%),\n          0 1px 3px rgb(60 64 67 / 30%);\n        --drop-shadow-depth-4: 0 6px 10px 4px rgb(60 64 67 / 15%),\n          0 2px 3px rgb(60 64 67 / 30%);\n        --drop-shadow-depth-5: 0 8px 12px 6px rgb(60 64 67 / 15%),\n          0 4px 4px rgb(60 64 67 / 30%);\n        --box-shadow-outline-color: rgb(0 0 0 / 50%);\n        /** These are the colors of the native Mac scrollbars */\n        --color-scrollbar-mac: rgb(143 143 143 / 60%);\n        --color-scrollbar-mac-hover: rgb(64 64 64 / 60%);\n        /** These colors are used on all non-Mac platforms for scrollbars */\n        --color-scrollbar-other: rgb(0 0 0 / 50%);\n        --color-scrollbar-other-hover: rgb(0 0 0 / 50%);\n        /** These colors have the same value in dark mode */\n        --lighthouse-red: rgb(255 78 67);\n        --lighthouse-orange: rgb(255 164 0);\n        --lighthouse-green: rgb(12 206 106);\n        /** The colors are for issue icons and related highlights */\n        --issue-color-red: rgb(235 57 65);\n        --issue-color-yellow: rgb(242 153 0);\n        --issue-color-blue: rgb(26 115 232);\n        /** Used to indicate an input box */\n        --input-outline: rgb(202 205 209);\n\n        /** These colors are used to show errors */\n        --color-error-text: #f00;\n        --color-error-border: hsl(0deg 100% 92%);\n        --color-error-background: hsl(0deg 100% 97%);\n        --color-image-preview-background: rgb(255 255 255);\n\n        /* Colors for styling inputs */\n        --color-input-outline: rgb(128 134 139);\n        --color-input-outline-active: rgb(26 115 232);\n        --color-input-outline-error: rgb(217 48 37);\n        --color-input-outline-disabled: rgba(128 134 139 / 20%);\n        --color-input-text-disabled: rgba(128 134 139 / 50%);\n\n        /* Colors for styling buttons */\n        --color-button-outline-focus: rgb(26 115 232 / 50%);\n        --color-button-primary-background-hovering: rgb(77 134 225 / 100%);\n        --color-button-primary-background-pressed: rgb(88 132 205);\n        --color-button-primary-text: rgb(255 255 255);\n        --color-button-secondary-background-hovering: rgb(26 115 232 / 10%);\n        --color-button-secondary-background-pressed: rgb(26 92 178 / 25%);\n        --color-button-secondary-border: rgb(218 220 224);\n      }\n\n      .-theme-with-dark-background {\n        --color-primary: rgb(138 180 248);\n        --color-primary-variant: rgb(102 157 246);\n        --color-background: rgb(32 33 36);\n        --color-background-inverted: rgb(255 255 255);\n        --color-background-inverted-opacity-30: rgb(255 255 255 / 30%);\n        --color-background-inverted-opacity-50: rgb(255 255 255 / 50%);\n        --color-background-opacity-50: rgb(32 33 36 / 50%);\n        --color-background-opacity-80: rgb(32 33 36 / 80%);\n        --color-background-elevation-0: rgb(32 32 35);\n        --color-background-elevation-1: rgb(41 42 45);\n        --color-background-elevation-2: rgb(53 54 58);\n        --color-background-elevation-dark-only: var(\n          --color-background-elevation-1\n        );\n        --color-background-highlight: rgb(75 76 79);\n        --divider-line: rgb(255 255 255 / 10%);\n        --color-background-hover-overlay: rgb(56 121 217 / 10%);\n        --color-selection-highlight: rgb(251 202 70 / 20%);\n        --color-selection-highlight-border: rgb(251 202 70);\n        --color-match-highlight: rgb(56 121 217 / 35%);\n        --color-text-primary: rgb(232 234 237);\n        --color-text-secondary: rgb(154 160 166);\n        --color-text-secondary-selected: rgb(188 185 182);\n        --color-text-disabled: rgb(128 134 139);\n        --color-details-hairline: rgb(73 76 80);\n        --color-details-hairline-light: rgb(54 57 59);\n        --color-accent-red: rgb(242 139 130);\n        --color-red: rgb(237 78 76);\n        --color-accent-green: rgb(129 201 149);\n        --color-link: var(--color-primary);\n        --drop-shadow: 0 0 0 1px rgb(255 255 255 / 20%),\n          0 2px 4px 2px rgb(0 0 0 / 20%), 0 2px 6px 2px rgb(0 0 0 / 10%);\n        --drop-shadow-depth-1: 0 1px 2px rgb(0 0 0 / 30%),\n          0 1px 3px 1px rgb(0 0 0 / 15%);\n        --drop-shadow-depth-2: 0 1px 2px rgb(0 0 0 / 30%),\n          0 2px 6px 2px rgb(0 0 0 / 15%);\n        --drop-shadow-depth-3: 0 4px 8px 3px rgb(0 0 0 / 15%),\n          0 1px 3px rgb(0 0 0 / 30%);\n        --drop-shadow-depth-4: 0 6px 10px 4px rgb(0 0 0 / 15%),\n          0 2px 3px rgb(0 0 0 / 30%);\n        --drop-shadow-depth-5: 0 8px 12px 6px rgb(0 0 0 / 15%),\n          0 4px 4px rgb(0 0 0 / 30%);\n        --box-shadow-outline-color: rgb(0 0 0 / 50%);\n        --color-scrollbar-mac: rgb(51 51 51);\n        --color-scrollbar-mac-hover: rgb(75 76 79);\n        --color-scrollbar-other: rgb(51 51 51);\n        --color-scrollbar-other-hover: rgb(75 76 79);\n        --color-error-text: hsl(0deg 100% 75%);\n        --color-error-border: rgb(92 0 0);\n        --color-error-background: hsl(0deg 100% 8%);\n        /* Colors for styling inputs */\n        --color-input-outline: rgb(189 193 198);\n        --color-input-outline-active: rgb(138 180 248);\n        --color-input-outline-error: rgb(242 139 130);\n        --color-input-outline-disabled: rgba(189 193 198 / 20%);\n        --color-input-text-disabled: rgba(128 134 139 / 70%);\n        /* Colors for styling buttons */\n        --color-button-outline-focus: rgb(138 180 248 / 75%);\n        --color-button-primary-background-hovering: rgb(174 203 250 / 100%);\n        --color-button-primary-background-pressed: rgb(210 227 252 / 100%);\n        --color-button-primary-text: rgb(0 0 0);\n        --color-button-secondary-background-hovering: rgb(138 180 248 / 15%);\n        --color-button-secondary-background-pressed: rgb(138 180 248 / 23%);\n        --color-button-secondary-border: rgb(60 61 65);\n      }\n\n      body {\n        position: relative;\n        margin: 0;\n        width: 100%;\n        height: 100%;\n      }\n      .hidden {\n        display: none !important;\n      }\n      .vbox {\n        display: flex;\n        flex-direction: column;\n        position: relative;\n      }\n      .full-box {\n        display: flex;\n        flex-direction: column;\n        position: absolute;\n        top: 0;\n        right: 0;\n        bottom: 0;\n        left: 0;\n        background: white;\n        flex: 1 1;\n      }\n      .full-box div {\n        display: flex;\n        flex: auto;\n      }\n      .full-box div div {\n        display: flex;\n        flex: auto;\n        justify-content: center;\n        align-items: center;\n      }\n      .web-audio-container {\n        display: flex;\n        position: absolute;\n        top: 0;\n        right: 0;\n        bottom: 0;\n        left: 0;\n        background: white;\n        flex-direction: column;\n      }\n      .web-audio-toolbar-container {\n        background-color: var(--color-background-elevation-1);\n        border-bottom: 1px solid var(--color-background-highlight);\n        min-height: fit-content;\n      }\n      .web-audio-content-container {\n        display: flex;\n        flex: 1;\n        flex-direction: column;\n      }\n      .web-audio-content-panel {\n        display: flex;\n        flex: 1;\n        flex-direction: row;\n      }\n      .web-audio-detail-panel-container {\n        min-width: 20rem;\n        flex: 0;\n        background-color: var(--color-background-elevation-0);\n        position: relative;\n      }\n      .web-audio-detail-panel {\n        position: absolute;\n        top: 0;\n        bottom: 0;\n        left: 0;\n        right: 0;\n        overflow-x: hidden;\n        overflow-y: auto;\n      }\n      .web-audio-select-graph-dropdown {\n        position: absolute;\n        top: 0;\n        left: 0;\n        min-width: 20rem;\n        max-width: 20rem;\n        background-color: var(--color-background-elevation-1);\n      }\n      .toolbar-shadow {\n        position: relative;\n        white-space: nowrap;\n        height: 26px;\n        overflow: hidden;\n        display: flex;\n        flex: none;\n        align-items: center;\n      }\n      .toolbar-dropdown {\n        height: 100%;\n        display: flex;\n        align-items: center;\n        cursor: pointer;\n        padding: 0 0.2rem;\n      }\n      .toolbar-dropdown:hover {\n        background: var(--color-background-elevation-2);\n      }\n      .dropdown-title,\n      .dropdown-button {\n        display: inline-block;\n      }\n      .web-audio-debug {\n        flex: 0 0 200px;\n        width: 200px;\n      }\n      .web-audio-graph {\n        position: relative;\n        flex: 1 1;\n        overflow: hidden;\n      }\n      .web-audio-graph canvas {\n        position: absolute;\n      }\n      .web-audio-status {\n        display: flex;\n        position: relative;\n        flex: none;\n        height: 26px;\n        padding-left: 0.6rem;\n        background: var(--color-background-elevation-1);\n        border-top: 1px solid var(--color-background-highlight);\n        align-items: center;\n      }\n    </style>\n  </head>\n  <body>\n    <div class=\"web-audio-container\">\n      <div class=\"web-audio-toolbar-container vbox\">\n        <div class=\"toolbar-shadow\">\n          <div class=\"toolbar-button toolbar-garbage-button\"></div>\n          <div class=\"toolbar-divider\"></div>\n          <div class=\"toolbar-dropdown\">\n            <div>\n              <div class=\"dropdown-title\">(no recordings)</div>\n              <div class=\"dropdown-button\">&blacktriangledown;</div>\n            </div>\n          </div>\n        </div>\n      </div>\n      <div class=\"web-audio-content-container\">\n        <div class=\"web-audio-content-panel\">\n          <div class=\"web-audio-detail-panel-container\">\n            <div class=\"web-audio-detail-panel\"></div>\n          </div>\n          <div class=\"web-audio-graph\"></div>\n        </div>\n        <div class=\"web-audio-status\"></div>\n      </div>\n      <div class=\"web-audio-open-page-instruction full-box hidden\">\n        <div>\n          <p>Open a page that uses Web Audio API to start monitoring.</p>\n        </div>\n      </div>\n      <div class=\"web-audio-reload-instruction full-box hidden\">\n        <div>\n          <p>Web Audio API is already in use. Reload to begin monitoring.</p>\n        </div>\n      </div>\n    </div>\n    <div class=\"web-audio-select-graph-dropdown hidden\"></div>\n    <div class=\"web-audio-loading full-box\">\n      <div>\n        <div><p>Loading ...</p></div>\n      </div>\n    </div>\n    <script src=\"audion-panel.js\"></script>\n  </body>\n</html>\n"
  },
  {
    "path": "src/utils/Observer.emitter.js",
    "content": "/// <reference path=\"Types.ts\" />\n\nimport {Observer} from './Observer';\n\n/**\n * @param {Utils.DataEmitter<T>} emitter\n * @return {Utils.Observer<T>}\n * @template T\n */\nexport function observeMessageEvents(emitter) {\n  return new Observer((onNext) => {\n    const onMessage = (message) => onNext(message.data);\n    emitter.addEventListener('message', onMessage);\n    return () => {\n      emitter.removeEventListener('message', onMessage);\n    };\n  });\n}\n\n/**\n * @param {Utils.Observer<T>} observer\n * @param {Utils.Poster<T>} poster\n * @return {function(): void} stop posting observations\n * @template T\n */\nexport function postObservations(observer, poster) {\n  return observer.observe((message) => poster.postMessage(message));\n}\n"
  },
  {
    "path": "src/utils/Observer.test.js",
    "content": "import {describe, expect, it, jest} from '@jest/globals';\n\nimport {InvariantError} from './error';\nimport {Observer} from './Observer';\nimport {retry} from './retry';\n\ndescribe('Observer', () => {\n  it('observes values', () => {\n    const subscribeMock = jest.fn();\n    const nextMock = jest.fn();\n    const o = new Observer(subscribeMock);\n    o.observe(nextMock);\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])('value');\n    expect(nextMock).toBeCalledWith('value');\n  });\n\n  it('observes completion', () => {\n    const subscribeMock = jest.fn();\n    const completeMock = jest.fn();\n    const o = new Observer(subscribeMock);\n    o.observe(() => {}, completeMock);\n    /** @type {function} */ (subscribeMock.mock.calls[0][1])();\n    expect(completeMock).toBeCalledWith();\n  });\n\n  it('observes errors', () => {\n    const subscribeMock = jest.fn();\n    const errorMock = jest.fn();\n    const o = new Observer(subscribeMock);\n    o.observe(\n      () => {},\n      () => {},\n      errorMock,\n    );\n    /** @type {function} */ (subscribeMock.mock.calls[0][2])('reason');\n    expect(errorMock).toBeCalledWith('reason');\n  });\n\n  it('subscribes when first observed', () => {\n    const subscribeMock = jest.fn();\n    const o = new Observer(subscribeMock);\n    expect(subscribeMock).toBeCalledTimes(0);\n    o.observe(() => {});\n    expect(subscribeMock).toBeCalledTimes(1);\n    o.observe(() => {});\n    expect(subscribeMock).toBeCalledTimes(1);\n  });\n\n  it('unsubscribes when last observer unsubscribes', () => {\n    const unsubscribeMock = jest.fn();\n    const o = new Observer(jest.fn().mockReturnValue(unsubscribeMock));\n    expect(unsubscribeMock).toBeCalledTimes(0);\n    const unsubscribe1 = o.observe(() => {});\n    expect(unsubscribeMock).toBeCalledTimes(0);\n    const unsubscribe2 = o.observe(() => {});\n    expect(unsubscribeMock).toBeCalledTimes(0);\n    unsubscribe2();\n    expect(unsubscribeMock).toBeCalledTimes(0);\n    unsubscribe1();\n    expect(unsubscribeMock).toBeCalledTimes(1);\n  });\n});\n\ndescribe('Observer.throttle', () => {\n  it('must throw when observing non-object or null', () => {\n    const subscribeMock = jest.fn();\n    const nextMock = jest.fn();\n    const o = Observer.throttle(new Observer(subscribeMock));\n    o.observe(nextMock);\n    expect(() => {\n      /** @type {function} */ (subscribeMock.mock.calls[0][0])('value');\n    }).toThrowError(InvariantError);\n    expect(() => {\n      /** @type {function} */ (subscribeMock.mock.calls[0][0])('value');\n    }).toThrowError(\n      'Observer.throttle must observe non-null objects. Received: string',\n    );\n    expect(() => {\n      /** @type {function} */ (subscribeMock.mock.calls[0][0])(null);\n    }).toThrowError(InvariantError);\n    expect(() => {\n      /** @type {function} */ (subscribeMock.mock.calls[0][0])(null);\n    }).toThrowError(\n      'Observer.throttle must observe non-null objects. Received: null',\n    );\n  });\n\n  it('immediately sends first value when no throttle is running', () => {\n    const subscribeMock = jest.fn();\n    const nextMock = jest.fn();\n    const o = Observer.throttle(new Observer(subscribeMock), {\n      timeout: () => Promise.resolve(),\n    });\n    o.observe(nextMock);\n    const value = {};\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    expect(nextMock).toBeCalledWith(value);\n  });\n\n  it('calls key option', async () => {\n    const subscribeMock = jest.fn();\n    const keyMock = jest.fn(({key}) => key);\n    const o = Observer.throttle(new Observer(subscribeMock), {\n      key: keyMock,\n    });\n    o.observe(() => {});\n    const value = {key: 'key'};\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    expect(keyMock).toBeCalledWith(value);\n    expect(keyMock).toReturnWith(value.key);\n  });\n\n  it('calls timeout option', async () => {\n    const subscribeMock = jest.fn();\n    const timeoutMock = jest.fn().mockImplementation(() => Promise.resolve());\n    const o = Observer.throttle(new Observer(subscribeMock), {\n      timeout: timeoutMock,\n    });\n    o.observe(() => {});\n    const value = {};\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    expect(timeoutMock).toBeCalledTimes(1);\n  });\n\n  it('sends a second value after a throttle timer', async () => {\n    const subscribeMock = jest.fn();\n    const nextMock = jest.fn();\n    const o = Observer.throttle(new Observer(subscribeMock), {\n      timeout: () => Promise.resolve(),\n    });\n    o.observe(nextMock);\n    const value = {};\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    expect(nextMock).toBeCalledTimes(1);\n    expect(nextMock).nthCalledWith(1, value);\n    await retry(() => expect(nextMock).toBeCalledTimes(2));\n    expect(nextMock).nthCalledWith(2, value);\n  });\n\n  it('calls default timeout option', async () => {\n    const subscribeMock = jest.fn();\n    const nextMock = jest.fn();\n    const o = Observer.throttle(new Observer(subscribeMock));\n    o.observe(nextMock);\n    const value = {};\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    expect(nextMock).toBeCalledTimes(1);\n    await retry(() => expect(nextMock).toBeCalledTimes(2), {\n      timeout: () => new Promise((resolve) => setTimeout(resolve, 5)),\n    });\n    expect(nextMock).nthCalledWith(2, value);\n  });\n\n  it('skips second and sends a third value during a timer', async () => {\n    const subscribeMock = jest.fn();\n    const nextMock = jest.fn();\n    const o = Observer.throttle(new Observer(subscribeMock), {\n      timeout: () => Promise.resolve(),\n    });\n    o.observe(nextMock);\n    const value = {};\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    expect(nextMock).toBeCalledTimes(1);\n    expect(nextMock).nthCalledWith(1, value);\n    await retry(() => expect(nextMock).toBeCalledTimes(2));\n    expect(nextMock).nthCalledWith(2, value);\n  });\n\n  it('throttles per object value', async () => {\n    const subscribeMock = jest.fn();\n    const nextMock = jest.fn();\n    const o = Observer.throttle(new Observer(subscribeMock), {\n      timeout: () => Promise.resolve(),\n    });\n    o.observe(nextMock);\n    const value1 = {};\n    const value2 = {};\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value1);\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value2);\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value1);\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value2);\n    expect(nextMock).toBeCalledTimes(2);\n    expect(nextMock).nthCalledWith(1, value1);\n    expect(nextMock).nthCalledWith(2, value2);\n    await retry(() => expect(nextMock).toBeCalledTimes(4));\n    expect(nextMock).nthCalledWith(3, value1);\n    expect(nextMock).nthCalledWith(4, value2);\n  });\n\n  it('flushes most recent messages before completing', () => {\n    const subscribeMock = jest.fn();\n    const nextMock = jest.fn();\n    const completeMock = jest.fn();\n    const o = Observer.throttle(new Observer(subscribeMock), {\n      timeout: () => Promise.resolve(),\n    });\n    o.observe(nextMock, completeMock);\n    const value = {};\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    expect(nextMock).toBeCalledTimes(1);\n    expect(nextMock).nthCalledWith(1, value);\n    /** @type {function} */ (subscribeMock.mock.calls[0][1])();\n    expect(nextMock).toBeCalledTimes(2);\n    expect(nextMock).nthCalledWith(2, value);\n    expect(completeMock).toBeCalledTimes(1);\n  });\n\n  it('flushes most recent messages before error', () => {\n    const subscribeMock = jest.fn();\n    const nextMock = jest.fn();\n    const errorMock = jest.fn();\n    const o = Observer.throttle(new Observer(subscribeMock), {\n      timeout: () => Promise.resolve(),\n    });\n    o.observe(nextMock, () => {}, errorMock);\n    const value = {};\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])(value);\n    expect(nextMock).toBeCalledTimes(1);\n    expect(nextMock).nthCalledWith(1, value);\n    /** @type {function} */ (subscribeMock.mock.calls[0][2])('reason');\n    expect(nextMock).toBeCalledTimes(2);\n    expect(nextMock).nthCalledWith(2, value);\n    expect(errorMock).toBeCalledTimes(1);\n    expect(errorMock).toBeCalledWith('reason');\n  });\n});\n\ndescribe('Observer.transform', () => {\n  it('observes value returned by transform', () => {\n    const subscribeMock = jest.fn();\n    const nextMock = jest.fn();\n    const o = Observer.transform(\n      new Observer(subscribeMock),\n      (value) => 'key: ' + value,\n    );\n    o.observe(nextMock);\n    /** @type {function} */ (subscribeMock.mock.calls[0][0])('value');\n    expect(nextMock).toBeCalledWith('key: value');\n  });\n});\n"
  },
  {
    "path": "src/utils/Observer.ts",
    "content": "import {Utils} from './Types';\n\nimport {invariant} from './error';\n\n/* istanbul ignore next */\n/**\n * Do nothing.\n * @param args\n * @memberof Utils.Observer\n */\nfunction noop(...args: any) {}\n\n/**\n * @param promise\n * @memberof Utils\n * @alias makeCancelable\n */\nfunction makeCancelable<T>(promise: Promise<T>): Utils.CancelablePromise<T> {\n  let cancel = noop;\n  const cancelablePromise = Promise.race([\n    promise.then((value) => ({value, canceled: false})),\n    new Promise((resolve) => (cancel = resolve)).then(() => ({canceled: true})),\n  ]);\n  return {\n    promise: cancelablePromise,\n    cancel,\n  };\n}\n\n/**\n * Implementation of the observer idiom.\n *\n * @memberof Utils\n * @alias Observer\n */\nexport class Observer<T> implements Utils.Observer<T> {\n  subscribe: Utils.SubscribeCallback<T>;\n  _unsubscribeParent: (...args: any) => any;\n  handles: {onNext; onComplete; onError}[];\n\n  constructor(subscribe: Utils.SubscribeCallback<T>) {\n    this.subscribe = subscribe;\n    /** @type {function | null} */\n    this._unsubscribeParent = null;\n    this.handles = [];\n\n    this._onNext = this._onNext.bind(this);\n    this._onComplete = this._onComplete.bind(this);\n    this._onError = this._onError.bind(this);\n  }\n\n  static transform<T1, T2>(\n    target: Utils.Observer<T1>,\n    onTransform: (value: T1) => T2,\n  ): Utils.Observer<T2> {\n    return new Observer((onNext, ...args) => {\n      return target.observe((value) => {\n        onNext(onTransform(value));\n      }, ...args);\n    });\n  }\n\n  static filter<T>(\n    target: Utils.Observer<T>,\n    testFunc: (value: T) => boolean,\n  ): Utils.Observer<T> {\n    return new Observer((onNext, ...args) => {\n      return target.observe((value) => {\n        if (testFunc(value)) {\n          onNext(value);\n        }\n      });\n    });\n  }\n\n  static reduce<T, R>(\n    target: Utils.Observer<T>,\n    reducer: (accum: R, value: T) => R,\n    initial: R,\n  ): Utils.Observer<R> {\n    let latest = initial;\n    return new Observer((onNext, ...args) => {\n      return target.observe((value) => {\n        latest = reducer(latest, value);\n        onNext(latest);\n      }, ...args);\n    });\n  }\n\n  static throttle<T>(\n    target: Utils.Observer<T>,\n    options?: Utils.ThrottleObserverOptions<T>,\n  ): Utils.Observer<T> {\n    return new ThrottleObserver(target, options);\n  }\n\n  /**\n   * Immediately observe a value to any new subscribe.\n   * @param {Observer<T1>} target\n   * @param {function(): T2} onSubscribe\n   * @return {Observer<T1 | T2>}\n   * @template T1\n   * @template T2\n   */\n  static onSubscribe<T1, T2>(\n    target: Utils.Observer<T1>,\n    onSubscribe: () => T2,\n  ): Utils.Observer<T1 | T2> {\n    return new SubscribeImmediateObserver(target, onSubscribe);\n  }\n\n  static props<T extends {[key: string]: any}>(\n    props: {[key in keyof T]: Utils.Observer<T[key]>},\n    latest: T,\n  ): Utils.Observer<T> {\n    return new Observer((onNext, onComplete, onError) => {\n      const unsubscribes = [];\n      for (const [key, prop] of Object.entries(props)) {\n        unsubscribes.push(\n          prop.observe(\n            (value) => {\n              latest = {...latest, [key]: value};\n              onNext(latest);\n            },\n            onComplete,\n            onError,\n          ),\n        );\n      }\n      return () => {\n        for (const unsubscribe of unsubscribes) {\n          unsubscribe();\n        }\n      };\n    });\n  }\n\n  observe(\n    onNext: (value: T) => void,\n    onComplete: () => void = noop,\n    onError: (error: Error) => void = noop,\n  ): () => void {\n    this._subscribeToParent();\n    const handles = {onNext, onComplete, onError};\n    this.handles.push(handles);\n    return () => {\n      this.handles.splice(this.handles.indexOf(handles), 1);\n      this._unsubscribeFromParent();\n    };\n  }\n\n  protected _onNext(message: T): void {\n    for (let i = 0; i < this.handles.length; i++) {\n      this.handles[i].onNext(message);\n    }\n  }\n\n  protected _onComplete(): void {\n    for (let i = 0; i < this.handles.length; i++) {\n      this.handles[i].onComplete();\n    }\n  }\n\n  protected _onError(reason: any): void {\n    for (let i = 0; i < this.handles.length; i++) {\n      this.handles[i].onError(reason);\n    }\n  }\n\n  /**\n   * Subscribe to parent.\n   */\n  protected _subscribeToParent(): void {\n    if (this.handles.length === 0) {\n      this._unsubscribeParent = this.subscribe(\n        this._onNext,\n        this._onComplete,\n        this._onError,\n      );\n    }\n  }\n\n  /**\n   * Unsubscribe from parent.\n   */\n  protected _unsubscribeFromParent(): void {\n    if (this.handles.length === 0) {\n      this._unsubscribeParent();\n      this._unsubscribeParent = null;\n    }\n  }\n}\n\n/**\n * Throttle repeated observed messages.\n * @memberof Utils\n * @alias ThrottleObserver\n */\nexport class ThrottleObserver<T> extends Observer<T> {\n  private _timerMap: Map<any, {cancel(): void; active: boolean; value: T}>;\n\n  /**\n   * Create a ThrottleObserver.\n   */\n  constructor(\n    target: Utils.Observer<T>,\n    {\n      key = (obj) => obj,\n      timeout = () => new Promise((resolve) => setTimeout(resolve, 16)),\n    } = {} as Utils.ThrottleObserverOptions<T>,\n  ) {\n    const timerMap = new Map() as Map<\n      any,\n      {cancel(): void; active: boolean; value: T}\n    >;\n    super((onNext, onComplete, onError) => {\n      /**\n       * @param {T} message\n       */\n      const onThrottleNext = (message: T) => {\n        invariant(\n          typeof message === 'object' && message !== null,\n          'Observer.throttle must observe non-null objects. Received: %0',\n          message === null ? 'null' : typeof message,\n        );\n        const timerKey = key(message);\n        if (timerMap.has(timerKey)) {\n          const timer = timerMap.get(timerKey);\n          timer.active = true;\n          timer.value = message;\n        } else {\n          const timer = {cancel: noop, active: false, value: null};\n          (async () => {\n            timerMap.set(timerKey, timer);\n\n            const {promise, cancel} = makeCancelable(timeout());\n            timer.cancel = cancel;\n\n            const {canceled} = await promise;\n\n            timerMap.delete(timerKey);\n            if (!canceled && timer.active) {\n              onThrottleNext(timer.value);\n            }\n          })();\n\n          onNext(message);\n        }\n      };\n      return target.observe(\n        onThrottleNext,\n        () => {\n          this._flush();\n          onComplete();\n        },\n        (reason) => {\n          this._flush();\n          onError(reason);\n        },\n      );\n    });\n\n    this._timerMap = timerMap;\n  }\n\n  /**\n   * Flush remaining timers.\n   */\n  private _flush() {\n    for (const timer of this._timerMap.values()) {\n      invariant(\n        timer.active,\n        'Observer throttle timer must be active when flushing',\n      );\n      this._onNext(timer.value);\n      timer.cancel();\n    }\n    this._timerMap.clear();\n  }\n}\n\n/**\n * Immediately observe a value to any new subscriber.\n */\nexport class SubscribeImmediateObserver<T1, T2> extends Observer<T1 | T2> {\n  onSubscribe: () => T2;\n\n  /**\n   * Create an SubscribeImmediateObserver.\n   */\n  constructor(target: Utils.Observer<T1>, onSubscribe: () => T2) {\n    super((onNext, onComplete, onError) =>\n      target.observe(onNext, onComplete, onError),\n    );\n\n    this.onSubscribe = onSubscribe;\n  }\n\n  observe(\n    onNext: (value: T1 | T2) => void,\n    onComplete?: () => void,\n    onError?: (error: Error) => void,\n  ): () => void {\n    onNext(this.onSubscribe());\n    return super.observe(onNext, onComplete, onError);\n  }\n}\n"
  },
  {
    "path": "src/utils/Types.ts",
    "content": "/** @namespace Utils */\n\n/**\n * An abstraction of the observer idiom.\n *\n * @typedef Utils.Observer\n * @property {Utils.ObserverObserveMethod<T>} observe\n * @template T\n */\n\n/**\n * Install callbacks for each value observed, when the observer completes, if it\n * does, or if the observer errors.\n *\n * @callback Utils.ObserverObserveMethod\n * @param {Utils.SubscribeOnNext<T>} onNext called with each observed value\n * @param {function(): void} [onComplete] called when the observer completes, if\n *   it does\n * @param {function(*): void} [onError] called when the observer produces an\n *   error, if it does\n * @return {function(): void} function to unsubscribe from this installation\n * @template T\n */\n\n/**\n * @callback Utils.SubscribeCallback\n * @param {Utils.SubscribeOnNext<T>} onNext\n * @param {function(): void} onComplete\n * @param {function(*): void} onError\n * @return {function(): void}\n * @template T\n * @alias SubscribeCallback\n */\n\n/**\n * @callback Utils.SubscribeOnNext\n * @param {T} value\n * @return {void}\n * @template T\n * @alias SubscribeOnNext\n */\n\n/**\n * @typedef Utils.Cancelable\n * @property {T} [value]\n * @property {boolean} canceled\n * @template T\n * @alias Cancelable\n */\n\n/**\n * @typedef Utils.CancelablePromise\n * @property {Promise<Utils.Cancelable<T>>} promise\n * @property {function(): void} cancel\n * @template T\n * @alias CancelablePromise\n */\n\n/**\n * @typedef Utils.ThrottleObserverOptions\n * @property {function(T): *} [key]\n * @property {function(): Promise<void>} [timeout]\n * @alias ThrottleObserverOptions\n * @template T\n */\n\n/**\n * @typedef Utils.RetryOptions\n * @property {function(): Promise<void>} [timeout]\n * @property {number} [times=10]\n */\n\n/**\n * @callback Utils.DataEventListener\n * @param {{data: T}} event\n * @return {void}\n * @template T\n */\n\n/**\n * @callback Utils.ModifyDataEventListeners\n * @param {string} eventName\n * @param {Utils.DataEventListener<T>} listener\n * @return {void}\n * @template T\n */\n\n/**\n * @typedef Utils.DataEmitter\n * @property {Utils.ModifyDataEventListeners<T>} addEventListener\n * @property {Utils.ModifyDataEventListeners<T>} removeEventListener\n * @template T\n */\n\n/**\n * @typedef Utils.Poster\n * @property {function(T): void} postMessage\n * @template T\n */\n\nexport namespace Utils {\n  export interface Observer<T> {\n    /**\n     * @param next called with each observed value\n     * @param complete called when the observer completes, if it does\n     * @param error called when the observer produces an error, if it does\n     * @return function to unsubscribe from this installation\n     */\n    observe(\n      next: (value: T) => void,\n      complete?: () => void,\n      error?: (error: Error) => void,\n    ): () => void;\n  }\n\n  export interface SubscribeCallback<T> {\n    (\n      onNext: (value: T) => void,\n      complete: () => void,\n      error: (error: Error) => void,\n    ): () => void;\n  }\n\n  export interface SubscribeOnNext<T> {\n    (value: T): void;\n  }\n\n  export interface Cancelable<T> {\n    value?: T;\n    canceled: boolean;\n  }\n\n  export interface CancelablePromise<T> {\n    promise: Promise<Utils.Cancelable<T>>;\n    cancel(): void;\n  }\n\n  export interface ThrottleObserverOptions<T> {\n    key?(value: T): any;\n    timeout?(): Promise<void>;\n  }\n\n  export interface RetryOptions {\n    timeout?(): Promise<void>;\n    number?: number;\n  }\n}\n"
  },
  {
    "path": "src/utils/dlog.js",
    "content": "import {getTimestampAsString} from '../devtools/WebAudioGraphIntegrator';\n\n// prettier-ignore\n/**\n * Send console logging to inspect window\n * @param {String} message The description of the debug event\n * @param {Object} properties The properties\n *     of audio element for debugging\n */\nexport function DLOG(message, properties) {\n  const SHOW_EXTRA_DEBUG_LOG =\n      localStorage.getItem('showExtraDebugLog') === 'true';\n  if (SHOW_EXTRA_DEBUG_LOG) {\n    let debugMessage = getTimestampAsString();\n    if (message) {\n      debugMessage += message + '\\n';\n    }\n\n    for (const property in properties) {\n      if (properties[property]) {\n        switch (property) {\n          case 'contextId':\n            debugMessage += `  context ID = ${properties[property]} \\n`;\n            break;\n          case 'sourceNodeId':\n            debugMessage +=\n                `  source node ID = ${properties[property]} \\n`;\n            break;\n          case 'nodeId':\n            debugMessage += `  node ID = ${properties[property]} \\n`;\n            break;\n          case 'destinationNodeId':\n            debugMessage +=\n                `  destination node ID = ${properties[property]} \\n`;\n            break;\n          case 'destinationParamId':\n            debugMessage +=\n                `  destination param ID = ${properties[property]} \\n`;\n            break;\n          case 'paramId':\n            debugMessage +=\n                `  audio param ID = ${properties[property]} \\n`;\n            break;\n          case 'reason':\n            debugMessage +=\n                `  Error reason is ${properties[property]} \\n`;\n            break;\n          default:\n            break;\n        }\n      }\n    }\n    console.debug(debugMessage);\n  }\n}\n"
  },
  {
    "path": "src/utils/error.js",
    "content": "/**\n * An error caused by a falsifiable assumption shown to be false.\n * @memberof Utils\n * @alias InvariantError\n */\nexport class InvariantError extends Error {\n  /**\n   * Create an InvariantError.\n   * @param {string} message\n   * @param {Array} args\n   */\n  constructor(message, args) {\n    super();\n    this._message = message;\n    this._args = args;\n  }\n\n  /**\n   * @type {string}\n   */\n  get message() {\n    return this._message.replace(/%(%|\\d+)/g, (match) => {\n      if (match[1] === '%') {\n        return '%';\n      }\n      return this._args[Number(match[1])];\n    });\n  }\n}\n\n/**\n * @param {boolean} test\n * @param {string} message\n * @param {Array} args\n * @memberof Utils\n * @alias invariant\n */\nexport function invariant(test, message, ...args) {\n  if (!test) {\n    throw new InvariantError(message, args);\n  }\n}\n"
  },
  {
    "path": "src/utils/error.test.js",
    "content": "import {describe, expect, it} from '@jest/globals';\n\nimport {invariant, InvariantError} from './error';\n\ndescribe('invariant', () => {\n  it('does not throw when correct', () => {\n    invariant(true, 'always passes');\n  });\n  it('does throw when incorrect', () => {\n    expect(() => {\n      invariant(false, 'always fails');\n    }).toThrowError(InvariantError);\n  });\n  it('replaces %(\\\\d) with indexed variable argument', () => {\n    expect(() => {\n      invariant(false, 'replaces %%%% with %%');\n    }).toThrowError('replaces %% with %');\n    expect(() => {\n      invariant(false, 'replaces %%0 with first arg %0', '\"first\"');\n    }).toThrowError('replaces %0 with first arg \"first\"');\n  });\n});\n"
  },
  {
    "path": "src/utils/index.js",
    "content": "/// <reference path=\"Types.ts\" />\n"
  },
  {
    "path": "src/utils/mapThruWorker.ts",
    "content": "import {fromEvent, Observable, Subscription} from 'rxjs';\nimport {map} from 'rxjs/operators';\n\nexport function mapThruWorker<T2>(worker: Worker) {\n  return <T1>(source: Observable<T1>) => {\n    const messages = fromEvent<MessageEvent<T2>>(worker, 'message').pipe(\n      map(({data}) => data),\n    );\n    return new Observable<T2>((subscriber) => {\n      const subscription = new Subscription();\n      subscription.add(messages.subscribe(subscriber));\n      subscription.add(\n        source.subscribe({\n          next(value) {\n            worker.postMessage(value);\n          },\n        }),\n      );\n      return subscription;\n    });\n  };\n}\n"
  },
  {
    "path": "src/utils/math.js",
    "content": "import {invariant} from './error';\n\n/**\n * Clamp a value between two extremes.\n * @param {number} value\n * @param {number} min\n * @param {number} max\n * @return {number}\n */\nexport function clamp(value, min, max) {\n  invariant(min <= max, 'clamp(_, min, max): min is less than max');\n  invariant(max >= min, 'clamp(_, min, max): max is greater than min');\n  return Math.min(Math.max(value, min), max);\n}\n\n/**\n * Truncate a number at nth digit.\n *\n * trunc(111.111, 2) returns 100\n * trunc(111.111, -2) returns 111.11\n * @param {number} value\n * @param {number} digits a whole number digit to truncate at\n * @return {number}\n */\nexport function trunc(value, digits) {\n  const factor = Math.pow(10, digits);\n  return Math.floor(value / factor) * factor;\n}\n"
  },
  {
    "path": "src/utils/retry.js",
    "content": "/**\n * @param {function(): PromiseLike<T> | T} fn\n * @param {Utils.RetryOptions} options\n * @return {Promise<T>}\n * @template T\n * @memberof Utils\n * @alias retry\n */\nexport async function retry(\n  fn,\n  {timeout = () => Promise.resolve(), times = 10} = {},\n) {\n  try {\n    return await fn();\n  } catch (err) {\n    if (times > 1) {\n      await timeout();\n      return retry(fn, {timeout, times: times - 1});\n    }\n    throw err;\n  }\n}\n"
  },
  {
    "path": "src/utils/retry.test.js",
    "content": "import {describe, expect, it, jest} from '@jest/globals';\n\nimport {retry} from './retry';\n\ndescribe('retry', () => {\n  it('returns value if first attempt succeeds', async () => {\n    await expect(retry(() => 'answer 0')).resolves.toBe('answer 0');\n  });\n  it('returns value after failed attempts', async () => {\n    await expect(\n      retry(\n        jest\n          .fn()\n          .mockImplementationOnce(() => {\n            throw new Error('reason 0');\n          })\n          .mockImplementationOnce(() => 'answer 1'),\n      ),\n    ).resolves.toBe('answer 1');\n  });\n  it('returns last error if no attempt succeeds', async () => {\n    let i = 0;\n    const func = jest.fn(() => {\n      throw new Error(`reason ${i++}`);\n    });\n    await expect(retry(func)).rejects.toMatchObject({message: 'reason 9'});\n    expect(func).toBeCalledTimes(10);\n  });\n  it('tries after a timeout', async () => {\n    let i = 0;\n    const func = jest.fn(() => {\n      throw new Error(`reason ${i++}`);\n    });\n    const timeout = jest.fn(() => Promise.resolve());\n    const retryPromise = retry(func, {timeout});\n    expect(timeout).toBeCalledTimes(1);\n    await expect(retryPromise).rejects.toMatchObject({message: 'reason 9'});\n    expect(timeout).toBeCalledTimes(9);\n  });\n  it('tries n times', async () => {\n    let i = 0;\n    const func = jest.fn(() => {\n      throw new Error(`reason ${i++}`);\n    });\n    await expect(retry(func, {times: 2})).rejects.toMatchObject({\n      message: 'reason 1',\n    });\n    expect(func).toBeCalledTimes(2);\n  });\n});\n"
  },
  {
    "path": "src/utils/rxChrome.ts",
    "content": "import {fromEventPattern, Observable} from 'rxjs';\nimport {chrome} from '../chrome';\nimport {ChromeDebuggerAPIEvent} from '../devtools/DebuggerAttachEventController';\n\n/**\n * Create a function that returns an observable that completes when the api\n * calls back.\n * @param method `chrome` api method whose last argument is a callback\n * @param thisArg `this` inside of the method\n * @returns observable that completes when the method is done\n */\nexport function bindChromeCallback<P extends any[], R extends any[]>(\n  method: (...args: [...params: P, callback: (...values: R) => void]) => void,\n  thisArg = null,\n) {\n  return (...args: P) =>\n    new Observable<R extends [] ? void : R extends [infer R1] ? R1 : R>(\n      (subscriber) => {\n        method.call(thisArg, ...args, (...returnValues: R) => {\n          if (chrome.runtime.lastError) {\n            subscriber.error(chrome.runtime.lastError);\n          } else {\n            if (returnValues.length === 0) {\n              subscriber.next();\n            } else if (returnValues.length === 1) {\n              subscriber.next(returnValues[0]);\n            } else if (returnValues.length > 1) {\n              subscriber.next(returnValues as any);\n            }\n            subscriber.complete();\n          }\n        });\n      },\n    );\n}\n\nexport const fromChromeEvent = <T extends (...args: any) => any>(\n  onEvent: Chrome.Event<T>,\n) =>\n  fromEventPattern<\n    Parameters<T> extends infer T1\n      ? T1 extends []\n        ? void\n        : T1 extends [infer T2]\n        ? T2\n        : T1\n      : never\n  >(onEvent.addListener.bind(onEvent), onEvent.removeListener.bind(onEvent));\n"
  },
  {
    "path": "src/utils/rxInterop.ts",
    "content": "import {Observable} from 'rxjs';\n\nimport {Observer} from './Observer';\nimport {Utils} from './Types';\n\n/**\n * Wrap a `Utils.Observer` instance with `rxjs.Observable` instance.\n *\n * This is a workaround so `rxjs.Observable` can use `Utils.Observer` as a source\n * until said `Observer` instances can be replaced with `Observable` instances.\n *\n * @param observer observer to wrap\n * @returns observable wrapping an observer\n */\nexport function toRX<T>(observer: Utils.Observer<T>): Observable<T> {\n  return new Observable((subscriber) =>\n    observer.observe(\n      (value) => subscriber.next(value),\n      () => subscriber.complete(),\n      (err) => subscriber.error(err),\n    ),\n  );\n}\n\nexport function toUtilsObserver<T>(\n  observable: Observable<T>,\n): Utils.Observer<T> {\n  return new Observer((next, complete, error) => {\n    const subscription = observable.subscribe({next, complete, error});\n    return () => {\n      subscription.unsubscribe();\n    };\n  });\n}\n"
  },
  {
    "path": "src/webpack.config.js",
    "content": "const {resolve} = require('path');\nconst CopyPlugin = require('copy-webpack-plugin');\n\nmodule.exports = (env, argv) => ({\n  context: __dirname,\n  entry: {\n    'audion-devtools': './devtools/main',\n    'audion-panel': './panel/main',\n    'audion-panelWorker': './panel/worker',\n  },\n  output: {\n    path: resolve(__dirname, '../build/audion'),\n  },\n  devtool: argv.mode === 'development' ? 'source-map' : false,\n  resolve: {\n    extensions: ['', '.webpack.js', '.web.js', '.ts', '.tsx', '.js'],\n  },\n  plugins: [\n    new CopyPlugin({\n      patterns: [\n        {from: './extraSettingPage/options.html', to: 'options.html'},\n        {from: './extraSettingPage/options.js', to: 'options.js'},\n      ],\n    }),\n  ],\n  module: {\n    rules: [\n      {\n        test: /\\.css$/,\n        use: ['style-loader', {loader: 'css-loader', options: {modules: true}}],\n      },\n      {test: /\\.tsx?$/, loader: 'ts-loader'},\n      {test: /\\.js$/, loader: 'source-map-loader'},\n      {\n        test: /\\.(png|jpe?g|gif|svg|eot|ttf|woff|woff2)$/i,\n        // More information here https://webpack.js.org/guides/asset-modules/\n        type: 'asset',\n      },\n    ],\n  },\n});\n"
  },
  {
    "path": "test/.jest-puppeteer.config.json",
    "content": "{\n  \"launch\": {\n    \"headless\": false,\n    \"devtools\": true,\n    \"args\": [\n      \"--no-sandbox\",\n      \"--disable-extensions-except=build/audion\",\n      \"--load-extension=build/audion\"\n    ]\n  }\n}\n"
  },
  {
    "path": "test/.jest.config.json",
    "content": "{\n  \"preset\": \"jest-puppeteer\",\n  \"injectGlobals\": false,\n  \"transform\": {\n    \"\\\\.[jt]sx?$\": \"babel-jest\"\n  },\n  \"testMatch\": [\"!**/.*\", \"**/*.js\"]\n}\n"
  },
  {
    "path": "test/README.md",
    "content": "A directory of integration tests.\n"
  },
  {
    "path": "test/browserLaunch.js",
    "content": "/* global browser */\n\nimport {it} from '@jest/globals';\n\nit('browser launches with extension', async () => {\n  const browserTargets = await browser.targets();\n  const devtoolsTarget = browserTargets.find(\n    (target) => target.type() === 'browser',\n  );\n  await devtoolsTarget.browser();\n});\n"
  },
  {
    "path": "test/updateGraphRender.js",
    "content": "import {resolve} from 'path';\n\nimport {expect, it} from '@jest/globals';\nimport {from, fromEvent, lastValueFrom, takeUntil, toArray} from 'rxjs';\n\nit('updateGraphRender does not error', async () => {\n  const page = globalThis.page;\n  await page.goto(\n    `file://${resolve(__dirname, '../simulations/updateGraphRender.html')}`,\n  );\n  const pageErrors = await lastValueFrom(\n    fromEvent(page, 'pageerror').pipe(\n      takeUntil(from(page.waitForSelector('.complete'))),\n      toArray(),\n    ),\n  );\n  expect(pageErrors).toHaveLength(0);\n});\n"
  },
  {
    "path": "tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    \"allowJs\": true,\n    \"target\": \"es2020\",\n    \"moduleResolution\": \"node\",\n    \"allowSyntheticDefaultImports\": true,\n    \"esModuleInterop\": true\n  },\n  \"include\": [\"./src/**/*\"]\n}\n"
  }
]