Repository: google/audion Branch: main Commit: 11805e3151d3 Files: 101 Total size: 271.5 KB Directory structure: gitextract_0gd149fy/ ├── .babelrc ├── .editorconfig ├── .eslintrc.json ├── .github/ │ └── workflows/ │ └── nodejs-ci.yml ├── .gitignore ├── .husky/ │ ├── .gitignore │ └── pre-commit ├── .jsdoc.json ├── .prettierrc ├── LICENSE ├── README.md ├── fixtures/ │ └── oscillatorGainParam.ts ├── package.json ├── simulations/ │ ├── updateGraphRender.html │ ├── updateGraphRender.ts │ └── webpack.config.js ├── src/ │ ├── .jest.config.json │ ├── build/ │ │ ├── make-chrome-extension.js │ │ └── manifest.json.mustache │ ├── chrome/ │ │ ├── API.js │ │ ├── Debugger.js │ │ ├── DebuggerPageDomain.ts │ │ ├── DebuggerWebAudioDomain.ts │ │ ├── DevTools.js │ │ ├── Runtime.js │ │ ├── Types.js │ │ └── index.js │ ├── custom.d.ts │ ├── devtools/ │ │ ├── DebuggerAttachEventController.ts │ │ ├── DebuggerEvents.ts │ │ ├── DevtoolsGraphPanel.test.js │ │ ├── DevtoolsGraphPanel.ts │ │ ├── Types.ts │ │ ├── WebAudioEventObserver.test.js │ │ ├── WebAudioEventObserver.ts │ │ ├── WebAudioGraphIntegrator.test.js │ │ ├── WebAudioGraphIntegrator.ts │ │ ├── WebAudioRealtimeData.ts │ │ ├── deserializeGraphContext.ts │ │ ├── layoutGraphContext.ts │ │ ├── main.ts │ │ ├── partitionMap.ts │ │ ├── serializeGraphContext.js │ │ └── setOptionsToGraphContext.ts │ ├── devtools.html │ ├── extraSettingPage/ │ │ ├── options.html │ │ └── options.js │ ├── panel/ │ │ ├── GraphSelector.ts │ │ ├── Observer.runtime.ts │ │ ├── Types.ts │ │ ├── components/ │ │ │ ├── WholeGraphButton.css │ │ │ ├── WholeGraphButton.ts │ │ │ ├── collectGarbage.css │ │ │ ├── collectGarbage.ts │ │ │ ├── detailPanel.css │ │ │ ├── detailPanel.ts │ │ │ ├── domUtils.ts │ │ │ ├── realtimeSummary.ts │ │ │ ├── selectGraph.css │ │ │ └── selectGraph.ts │ │ ├── graph/ │ │ │ ├── AudioEdgeArrowGraphics.ts │ │ │ ├── AudioEdgeCurvedLineGraphics.ts │ │ │ ├── AudioEdgeRender.ts │ │ │ ├── AudioGraphRender.ts │ │ │ ├── AudioGraphText.ts │ │ │ ├── AudioGraphTextCacheGroup.ts │ │ │ ├── AudioNodeBackground.ts │ │ │ ├── AudioNodeBackgroundRenderCacheGroup.ts │ │ │ ├── AudioNodePort.ts │ │ │ ├── AudioNodeRender.ts │ │ │ ├── AudioPortCacheGroup.ts │ │ │ ├── Camera.js │ │ │ ├── GraphicsCache.ts │ │ │ ├── graphStyle.js │ │ │ └── graphStyle.ts │ │ ├── main.ts │ │ ├── updateGraphRender.ts │ │ ├── updateGraphSizes.ts │ │ └── worker.ts │ ├── panel.html │ ├── utils/ │ │ ├── Observer.emitter.js │ │ ├── Observer.test.js │ │ ├── Observer.ts │ │ ├── Types.ts │ │ ├── dlog.js │ │ ├── error.js │ │ ├── error.test.js │ │ ├── index.js │ │ ├── mapThruWorker.ts │ │ ├── math.js │ │ ├── retry.js │ │ ├── retry.test.js │ │ ├── rxChrome.ts │ │ └── rxInterop.ts │ └── webpack.config.js ├── test/ │ ├── .jest-puppeteer.config.json │ ├── .jest.config.json │ ├── README.md │ ├── browserLaunch.js │ └── updateGraphRender.js └── tsconfig.json ================================================ FILE CONTENTS ================================================ ================================================ FILE: .babelrc ================================================ { "plugins": [[ "@babel/plugin-transform-modules-commonjs" ], [ "@babel/plugin-proposal-optional-chaining" ]], "presets": ["@babel/preset-typescript"] } ================================================ FILE: .editorconfig ================================================ # EditorConfig is awesome: https://EditorConfig.org # top-most EditorConfig file root = true [*] indent_style = space indent_size = 2 end_of_line = lf charset = utf-8 trim_trailing_whitespace = true insert_final_newline = false ================================================ FILE: .eslintrc.json ================================================ { "env": { "browser": true, "es2021": true, "node": true }, "extends": ["eslint:recommended", "google"], "parserOptions": { "ecmaVersion": 12, "sourceType": "module" }, "rules": { // Indent files with prettier "indent": ["off"], // Allow triple slash comments "spaced-comment": ["error", "always", {"markers": ["/"]}], "operator-linebreak": ["off"] } } ================================================ FILE: .github/workflows/nodejs-ci.yml ================================================ name: Node.js CI on: [push, pull_request] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Use Node.js "18.x" uses: actions/setup-node@v3 with: node-version: '18.x' - run: npm ci - name: Run npm test with xvfb uses: coactions/setup-xvfb@v1 with: run: npm test ================================================ FILE: .gitignore ================================================ .DS_Store # dependencies node_modules # build/test .eslintcache docs coverage /build /simulations/build !src/build ================================================ FILE: .husky/.gitignore ================================================ _ ================================================ FILE: .husky/pre-commit ================================================ #!/bin/sh . "$(dirname "$0")/_/husky.sh" npx lint-staged ================================================ FILE: .jsdoc.json ================================================ { "source": { "include": ["./src/"], "includePattern": ".+\\.js(doc)?$", "excludePattern": "(^|\\/|\\\\)_|\\.test\\.js$" }, "opts": { "encoding": "utf8", "recurse": true, "private": false, "lenient": true, "destination": "./docs", "template": "./node_modules/@pixi/jsdoc-template", "readme": "README.md" }, "plugins": ["plugins/markdown"] } ================================================ FILE: .prettierrc ================================================ { "tabWidth": 2, "useTabs": false, "trailingComma": "all", "singleQuote": true, "bracketSpacing": false } ================================================ FILE: LICENSE ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2021 Google, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: README.md ================================================ # Audion: Web Audio Graph Visualizer [![Node.js CI](https://github.com/GoogleChrome/audion/actions/workflows/nodejs-ci.yml/badge.svg)](https://github.com/GoogleChrome/audion/actions/workflows/nodejs-ci.yml) Audion is a Chrome extension that adds a panel to DevTools. This panel visualizes the audio graph (programmed with Web Audio API) in real-time. Soon you will be able to install the extension from Chrome Web Store page. ![Google Doodle Hiphop](https://raw.githubusercontent.com/GoogleChrome/audion/main/images/hiphop-doodle.png) ## Usage 1. [Install the extension](https://chrome.google.com/webstore/detail/audion/cmhomipkklckpomafalojobppmmidlgl) from Chrome Web Store. 1. Alternatively, you can clone this repository and build the extension locally. Follow [this instruction](https://developer.chrome.com/docs/extensions/mv3/faq/#faq-dev-01) to load the local build. 1. [Open Chrome Developer Tools](https://developer.chrome.com/docs/devtools/open/). You should be able to find “Web Audio” panel in the top. Select the panel. 1. Visit or reload a page that uses Web Audio API. If the page is loaded before opening Developer Tools, you need to reload the page for the extension to work correctly. 1. You can pan and zoom with the mouse and wheel. Click the “autofit” button to fit the graph within the panel. ## Development ### Build and test the extension 1. Install NodeJS 14 or later. 1. Install dependencies with `npm ci` or `npm install`. 1. Run `npm test` to build and test the extension. #### Install the development copy of the extension 1. Open `chrome://extensions` in Chrome. 1. Turn on `Developer mode` if it is not already active. 1. Load an unpacked extension with the `Load unpacked` button. In the file modal that opens, select the `audion` directory inside of the `build` directory under the copy of this repository. #### Use and make changes to the extension 1. Open the added `Web Audio` panel in an inspector window with a page that uses Web Audio API. 1. Make changes to the extension and rebuild with `npm test` or `npm run build`. 1. Open `chrome://extensions`, click `Update` to reload the rebuilt extension. Close and reopen any tab and inspector to get the rebuilt extension's panel. ### Use extra debugging information 1. Open the extension option panel and check "Click here to show more debug info". 2. Right click the visualizer panel and click "Inspect" to the extension's DevTools panel, and see the console for the extra debugging information. ## Acknowledgments Special thanks to [Chi Zeng](https://github.com/chihuahua) (Google), [Gaoping Huang](https://github.com/gaopinghuang0), [Michael "Z" Goddard](https://github.com/mzgoddard) ([Bocoup](https://bocoup.com/)) and [Tenghui Zhang](https://github.com/TenghuiZhang) for their contribution on this project. ## Contribution If you have found an error in this library, please file an issue at: https://github.com/GoogleChrome/audion/issues. Patches are encouraged, and may be submitted by forking this project and submitting a pull request through GitHub. See CONTRIBUTING for more detail. ## License Copyright 2021 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: fixtures/oscillatorGainParam.ts ================================================ /** * Event sequences that would be produced by an audio context with oscillator * and gain nodes connecting outputs to params. * * @file */ import {WebAudioDebuggerEvent} from '../src/chrome/DebuggerWebAudioDomain'; import {Audion} from '../src/devtools/Types'; /** * A sequence of events produced by WebAudioEventObservable from a context * connect some oscillator and gain nodes, especially connecting an output to * another gain node's gain param. * * @example * // unit and integration tests can replace * new WebAudioEventObservable() * // with something like * from(OSCILLATOR_GAIN_PARAM_EVENTS) * // or something over time such as * interval(50).pipe(map((_, i) => * OSCILLATOR_GAIN_PARAM_EVENTS[i])) * * @example * // context that creates this sequence from * // WebAudioEventObservable * const audioContext = new AudioContext(); * const delayNode = new DelayNode(audioContext, * {delayTime: delayTime}); * const inputNode = new GainNode(audioContext); * const outputNode = new GainNode(audioContext); * const depthNode = new GainNode(audioContext, * {gain: width}); * const oscillatorNode = new OscillatorNode(audioContext, * {type: "sine", frequency: speed}); * inputNode.connect(delayNode); * delayNode.connect(outputNode); * oscillatorNode.connect(depthNode); * depthNode.connect(delayNode.delayTime); * * @see https://github.com/GoogleChrome/audion/issues/117 */ export const OSCILLATOR_GAIN_PARAM_EVENTS: Audion.WebAudioEvent[] = [ { method: WebAudioDebuggerEvent.contextCreated, params: { context: { callbackBufferSize: 256, contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', contextState: 'suspended', contextType: 'realtime', maxOutputChannelCount: 2, sampleRate: 48000, }, }, }, { method: WebAudioDebuggerEvent.audioNodeCreated, params: { node: { channelCount: 2, channelCountMode: 'explicit', channelInterpretation: 'speakers', contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', nodeId: '57a4d84b-6165-495e-9ad7-2ad82497d423', nodeType: 'AudioDestination', numberOfInputs: 1, numberOfOutputs: 0, }, }, }, { method: WebAudioDebuggerEvent.audioListenerCreated, params: { listener: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', listenerId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28', }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 0, maxValue: 3.4028234663852886e38, minValue: -3.4028234663852886e38, nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28', paramId: '63a77a6c-1779-42df-bedc-c68c5171722f', paramType: 'positionX', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 0, maxValue: 3.4028234663852886e38, minValue: -3.4028234663852886e38, nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28', paramId: 'e15f2c0e-f466-4d2a-92a2-c3fe23e591f5', paramType: 'positionY', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 0, maxValue: 3.4028234663852886e38, minValue: -3.4028234663852886e38, nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28', paramId: 'bbabbcc8-91eb-4014-9351-43e1742644e9', paramType: 'positionZ', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 0, maxValue: 3.4028234663852886e38, minValue: -3.4028234663852886e38, nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28', paramId: '4e3f5c2d-6b59-4a69-ab4f-da62db30e7db', paramType: 'forwardX', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 0, maxValue: 3.4028234663852886e38, minValue: -3.4028234663852886e38, nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28', paramId: 'd2425aaa-dc91-4e60-ba57-22be7b26f941', paramType: 'forwardY', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: -1, maxValue: 3.4028234663852886e38, minValue: -3.4028234663852886e38, nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28', paramId: '1842fc18-6b51-402b-97f1-c56d4681866a', paramType: 'forwardZ', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 0, maxValue: 3.4028234663852886e38, minValue: -3.4028234663852886e38, nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28', paramId: '872a56b9-ed99-47ea-9957-bda9307fac5b', paramType: 'upX', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 1, maxValue: 3.4028234663852886e38, minValue: -3.4028234663852886e38, nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28', paramId: '4acf61c7-363f-44af-9857-c5e8c8ea5629', paramType: 'upY', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 0, maxValue: 3.4028234663852886e38, minValue: -3.4028234663852886e38, nodeId: 'bb5255e5-5bd3-4290-b714-ecd3ff57be28', paramId: '4b818074-5b96-42c3-b2e6-fcdd350e37bb', paramType: 'upZ', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioNodeCreated, params: { node: { channelCount: 2, channelCountMode: 'max', channelInterpretation: 'speakers', contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', nodeId: 'e5bd5ec5-abb8-426a-bad8-65f723970c76', nodeType: 'Delay', numberOfInputs: 1, numberOfOutputs: 1, }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 0, maxValue: 1, minValue: 0, nodeId: 'e5bd5ec5-abb8-426a-bad8-65f723970c76', paramId: 'a88ea483-fc15-4c2b-ab0c-597af8e069b9', paramType: 'delayTime', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioNodeCreated, params: { node: { channelCount: 2, channelCountMode: 'max', channelInterpretation: 'speakers', contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', nodeId: '61b107eb-24ad-4f11-b811-72b2c5e7e79f', nodeType: 'Gain', numberOfInputs: 1, numberOfOutputs: 1, }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 1, maxValue: 3.4028234663852886e38, minValue: -3.4028234663852886e38, nodeId: '61b107eb-24ad-4f11-b811-72b2c5e7e79f', paramId: '03e13b59-a58f-4883-8479-d7a048ebe80a', paramType: 'gain', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioNodeCreated, params: { node: { channelCount: 2, channelCountMode: 'max', channelInterpretation: 'speakers', contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', nodeId: '78b78fae-b32e-4993-a2b4-7523c08e16c0', nodeType: 'Gain', numberOfInputs: 1, numberOfOutputs: 1, }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 1, maxValue: 3.4028234663852886e38, minValue: -3.4028234663852886e38, nodeId: '78b78fae-b32e-4993-a2b4-7523c08e16c0', paramId: 'b6ea1b98-2dda-43d0-8a52-49492fcafdde', paramType: 'gain', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioNodeCreated, params: { node: { channelCount: 2, channelCountMode: 'max', channelInterpretation: 'speakers', contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', nodeId: 'd8ac44f0-f099-40ff-9cf4-949148fca53f', nodeType: 'Gain', numberOfInputs: 1, numberOfOutputs: 1, }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 1, maxValue: 3.4028234663852886e38, minValue: -3.4028234663852886e38, nodeId: 'd8ac44f0-f099-40ff-9cf4-949148fca53f', paramId: '38ec329f-650c-4c35-805c-32c559b47ea7', paramType: 'gain', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioNodeCreated, params: { node: { channelCount: 2, channelCountMode: 'max', channelInterpretation: 'speakers', contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', nodeId: '59200b98-60e1-43cf-88f6-d0a33d5643cf', nodeType: 'Oscillator', numberOfInputs: 0, numberOfOutputs: 1, }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 0, maxValue: 153600, minValue: -153600, nodeId: '59200b98-60e1-43cf-88f6-d0a33d5643cf', paramId: '0b2b73d2-bc98-423b-a19c-1a0651e06d20', paramType: 'detune', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.audioParamCreated, params: { param: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', defaultValue: 440, maxValue: 24000, minValue: -24000, nodeId: '59200b98-60e1-43cf-88f6-d0a33d5643cf', paramId: '42dddc62-c058-473e-9f48-a678a708c001', paramType: 'frequency', rate: 'a-rate', }, }, }, { method: WebAudioDebuggerEvent.nodesConnected, params: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', destinationId: 'e5bd5ec5-abb8-426a-bad8-65f723970c76', destinationInputIndex: 0, sourceId: '61b107eb-24ad-4f11-b811-72b2c5e7e79f', sourceOutputIndex: 0, }, }, { method: WebAudioDebuggerEvent.nodesConnected, params: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', destinationId: '78b78fae-b32e-4993-a2b4-7523c08e16c0', destinationInputIndex: 0, sourceId: 'e5bd5ec5-abb8-426a-bad8-65f723970c76', sourceOutputIndex: 0, }, }, { method: WebAudioDebuggerEvent.nodesConnected, params: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', destinationId: 'd8ac44f0-f099-40ff-9cf4-949148fca53f', destinationInputIndex: 0, sourceId: '59200b98-60e1-43cf-88f6-d0a33d5643cf', sourceOutputIndex: 0, }, }, { method: WebAudioDebuggerEvent.nodeParamConnected, params: { contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62', destinationId: 'a88ea483-fc15-4c2b-ab0c-597af8e069b9', sourceId: 'd8ac44f0-f099-40ff-9cf4-949148fca53f', sourceOutputIndex: 0, }, }, { method: WebAudioDebuggerEvent.contextWillBeDestroyed, params: {contextId: '9d36b0e0-4251-41a6-89cb-876b0fbe1b62'}, }, ]; ================================================ FILE: package.json ================================================ { "name": "audion", "private": true, "version": "3.0.9", "description": "A Chrome DevTools extension traces Web Audio API calls and visualizes in the DevTools.", "repository": { "type": "git", "url": "git+https://github.com/GoogleChrome/audion.git" }, "keywords": [], "author": "", "license": "Apache-2.0", "bugs": { "url": "https://github.com/GoogleChrome/audion/issues" }, "homepage": "https://github.com/GoogleChrome/audion#readme", "main": "index.js", "engines": { "node": "18" }, "dependencies": { "@pixi/unsafe-eval": "^7.2.4", "dagre": "^0.8.5", "pixi.js": "^7.2.4", "rxjs": "^7.8.1", "taffydb": "^2.7.3" }, "devDependencies": { "@babel/core": "^7.14.6", "@babel/plugin-proposal-optional-chaining": "^7.16.7", "@babel/plugin-transform-modules-commonjs": "^7.14.5", "@babel/preset-typescript": "^7.16.7", "@pixi/jsdoc-template": "^2.6.0", "@types/dagre": "^0.7.46", "@types/graphlib": "^2.1.8", "babel-jest": "^29.5.0", "copy-webpack-plugin": "^11.0.0", "css-loader": "^6.6.0", "devtools-protocol": "^0.0.924232", "eslint": "^8.40.0", "eslint-config-google": "^0.14.0", "file-loader": "^6.2.0", "husky": ">=6", "jest": "^27.0.6", "jest-puppeteer": "^5.0.4", "jsdoc": "^4.0.2", "lint-staged": ">=10", "mustache": "^4.2.0", "pinst": ">=2", "prettier": "^2.3.2", "puppeteer": "^9.1.1", "raw-loader": "^4.0.2", "rimraf": "^3.0.2", "source-map-loader": "^3.0.0", "style-loader": "^3.2.1", "ts-loader": "^9.2.6", "typescript": "^4.4.3", "webpack": "^5.44.0", "webpack-cli": "^4.7.2", "yazl": "^2.5.1" }, "scripts": { "build:chrome-extension": "node src/build/make-chrome-extension.js", "build:clean": "rimraf build", "build:webpack": "webpack --mode production --config src/webpack.config.js", "build": "npm run build:clean && npm run build:webpack && npm run build:chrome-extension", "clean": "rimraf build docs src/coverage simulations/build", "dev": "webpack --mode development --config src/webpack.config.js && npm run build:chrome-extension", "postinstall": "husky install", "postpublish": "pinst --enable", "prepublishOnly": "pinst --disable", "test:integration:build": "npm run test:integration:clean && npm run test:integration:webpack", "test:integration:clean": "rimraf simulations/build", "test:integration:webpack": "webpack --mode development --config simulations/webpack.config.js", "test:integration:run": "JEST_PUPPETEER_CONFIG=test/.jest-puppeteer.config.json jest --config test/.jest.config.json", "test:integration": "npm run build && npm run test:integration:build && npm run test:integration:run", "test:jsdoc": "jsdoc -c .jsdoc.json", "test:lint:eslint": "eslint src/**/*.js", "test:lint:prettier": "prettier --check src/**/*.{js,ts}", "test:lint": "npm run test:lint:eslint && npm run test:lint:prettier", "test:unit": "jest --config src/.jest.config.json", "test": "npm run test:lint && npm run test:jsdoc && npm run test:unit && npm run test:integration" }, "lint-staged": { "*.{js}": "eslint --cache --fix", "*.{js,ts,json,css,md}": "prettier --write" } } ================================================ FILE: simulations/updateGraphRender.html ================================================
================================================ FILE: simulations/updateGraphRender.ts ================================================ import { auditTime, EMPTY, filter, finalize, from, interval, map, pipe, switchMap, take, } from 'rxjs'; import {layoutGraphContext} from '../src/devtools/layoutGraphContext'; import {deserializeGraphContext} from '../src/devtools/deserializeGraphContext'; import {serializeGraphContext} from '../src/devtools/serializeGraphContext'; import {WebAudioRealtimeData} from '../src/devtools/WebAudioRealtimeData'; import {integrateWebAudioGraph} from '../src/devtools/WebAudioGraphIntegrator'; import {updateGraphRender} from '../src/panel/updateGraphRender'; import {AudioGraphRender} from '../src/panel/graph/AudioGraphRender'; import {OSCILLATOR_GAIN_PARAM_EVENTS} from '../fixtures/oscillatorGainParam'; import {updateGraphSizes} from '../src/panel/updateGraphSizes'; function main() { const graphContainer = document.querySelector('.graph') as HTMLElement; const graphRender = new AudioGraphRender({ elementContainer: graphContainer, }); graphRender.init(); graphContainer.appendChild(graphRender.pixiView); const simulation = () => pipe( integrateWebAudioGraph({ pollContext() { return EMPTY; }, } as unknown as WebAudioRealtimeData), auditTime(1), map(serializeGraphContext), filter((graphContext) => graphContext.graph !== null), map(updateGraphSizes(graphRender)), map(deserializeGraphContext), map(layoutGraphContext), map(serializeGraphContext), map(updateGraphRender(graphRender)), ); interval(50) .pipe( take(OSCILLATOR_GAIN_PARAM_EVENTS.length), switchMap((_, i) => from( OSCILLATOR_GAIN_PARAM_EVENTS.slice(-1).concat( OSCILLATOR_GAIN_PARAM_EVENTS.slice( 0, i % (OSCILLATOR_GAIN_PARAM_EVENTS.length - 1), ), OSCILLATOR_GAIN_PARAM_EVENTS.slice( (i + 1) % (OSCILLATOR_GAIN_PARAM_EVENTS.length - 1), OSCILLATOR_GAIN_PARAM_EVENTS.length - 1, ), ), ), ), simulation(), finalize(() => graphContainer.classList.add('complete')), ) .subscribe(); } main(); ================================================ FILE: simulations/webpack.config.js ================================================ const {resolve} = require('path'); const srcConfig = require('../src/webpack.config'); module.exports = (env, argv) => ({ ...srcConfig(env, argv), entry: { updateGraphRender: resolve(__dirname, './updateGraphRender'), }, output: { path: resolve(__dirname, './build'), }, }); ================================================ FILE: src/.jest.config.json ================================================ { "collectCoverage": true, "injectGlobals": false, "transform": { "\\.[jt]sx?$": "babel-jest" }, "coveragePathIgnorePatterns": ["/chrome/"] } ================================================ FILE: src/build/make-chrome-extension.js ================================================ /** * A nodejs script that copies files, writes a extension manifest, and zips it * all up. * * @namespace makeChromeExtension */ const fs = require('fs').promises; const {createWriteStream} = require('fs'); const path = require('path'); const mustache = require('mustache'); const {ZipFile} = require('yazl'); main(); /** * Copy files, generate extension manifest, and zip the unpacked extension. * * Calls other methods in this script. * * @memberof makeChromeExtension */ async function main() { await Promise.all([ copyFiles({ src: '..', dest: '../../build/audion', files: ['panel.html', 'devtools.html'], }), generateManifest({ view: {version: require('../../package.json').version}, dest: '../../build/audion', }), ]); await zipChromeExtension({ src: '../../build', dir: 'audion', }); } /** * Copy file paths from a src directory to a dest directory. * * @param {object} options * @memberof makeChromeExtension */ async function copyFiles({src, dest, files, cwd = __dirname}) { await Promise.all( files.map(async (file) => { await mkdir(path.resolve(cwd, dest, path.dirname(file))); await fs.copyFile( path.resolve(cwd, src, file), path.resolve(cwd, dest, file), ); }), ); } /** * Generate a extension manifest from a template file. * * @param {object} options * @memberof makeChromeExtension */ async function generateManifest({ view, dest, file = 'manifest.json', cwd = __dirname, }) { await mkdir(path.resolve(cwd, dest, path.dirname(file))); await fs.writeFile( path.resolve(cwd, dest, file), mustache.render( await fs.readFile( path.resolve(__dirname, 'manifest.json.mustache'), 'utf8', ), view, ), ); } /** * Zip the unpacked chrome extension. * * @param {object} options * @memberof makeChromeExtension */ async function zipChromeExtension({ src, cwd = __dirname, dir, file = `${dir}.zip`, }) { await unlink(path.resolve(cwd, src, file)); const files = await readdirRecursive(path.resolve(cwd, src, dir)); const output = createWriteStream(path.resolve(cwd, src, file)); const zip = new ZipFile(); const zipDone = new Promise((resolve, reject) => zip.outputStream.pipe(output).on('close', resolve).on('error', reject), ); for (const file of files) { zip.addFile(path.resolve(cwd, src, dir, file), file); } zip.end(); await zipDone; } /** * Read entry names in a directory recursively. * @param {string} dir directory to recursively read * @return {Promise>} array of paths relative to `dir` * @memberof makeChromeExtension */ async function readdirRecursive(dir) { return ( await Promise.all( ( await fs.readdir(dir) ).map(async (file) => { try { return (await readdirRecursive(path.resolve(dir, file))).map( (subfile) => path.join(file, subfile), ); } catch (err) { if (err.code === 'ENOTDIR') { return file; } throw err; } }), ) ).flat(); } /** * Create a directory if it does not already exist. * * @param {string} dirpath directory to create * @memberof makeChromeExtension */ async function mkdir(dirpath) { try { await fs.mkdir(dirpath, {recursive: true}); } catch (err) { if (err.code === 'EEXIST') { return; } throw err; } } /** * Unlink a file from the filesystem if it exists. * * @param {string} filepath file to unlink * @memberof makeChromeExtension */ async function unlink(filepath) { try { await fs.unlink(filepath); } catch (err) { if (err.code === 'ENOENT') { return; } throw err; } } ================================================ FILE: src/build/manifest.json.mustache ================================================ { "manifest_version": 3, "name": "Audion", "version": "{{version}}", "description": "Web Audio DevTools Extension (graph visualizer)", "devtools_page": "devtools.html", "options_ui": { "page": "options.html", "open_in_tab": false }, "permissions": [ "debugger" ] } ================================================ FILE: src/chrome/API.js ================================================ /// /// /// /** * Top level chrome extension API type. Contains references of each accessible * extension api. * * @typedef Chrome.API * @property {Chrome.Debugger} debugger * @property {Chrome.DevTools} devtools * @property {Chrome.Runtime} runtime */ ================================================ FILE: src/chrome/Debugger.js ================================================ /// /** * [Chrome extension api][1] to the [Chrome Debugger Protocol][2]. Used by this * extension to access the [Web Audio domain][3]. * * [1]: https://developer.chrome.com/docs/extensions/reference/debugger/ * [2]: https://chromedevtools.github.io/devtools-protocol/ * [3]: ChromeDebuggerWebAudioDomain.html * * @typedef Chrome.Debugger * @property {function( * Chrome.DebuggerDebuggee, string, function(): void * ): void} attach * @property {function(Chrome.DebuggerDebuggee, function(): void): void} detach * @property {Chrome.Event} onDetach * @property {Chrome.Event} onEvent * @property {Chrome.DebuggerSendCommand} sendCommand * @see https://developer.chrome.com/docs/extensions/reference/debugger/ * @see https://chromedevtools.github.io/devtools-protocol/ */ /** * @callback Chrome.DebuggerSendCommand * @param {Chrome.DebuggerDebuggee} target * @param {string} method * @param {*} [commandParams] * @param {*} [callback] */ /** * A debuggee identifier. * * Either tabId or extensionId must be specified. * * @typedef Chrome.DebuggerDebuggee * @property {string} [extensionId] * @property {string} [tabId] * @property {string} [targetId] * @see https://developer.chrome.com/docs/extensions/reference/debugger/#type-Debuggee */ /** * Arguments passed to Debugger onEvent listeners. * * @callback Chrome.DebuggerOnEventListener * @param {Chrome.DebuggerDebuggee} source * @param {string} method * @param {*} [params] * @return {void} */ ================================================ FILE: src/chrome/DebuggerPageDomain.ts ================================================ /** * @file * Strings passed to `chrome.debugger.sendCommand` and received from * `chrome.debugger.onEvent` callbacks. */ import {ProtocolMapping} from 'devtools-protocol/types/protocol-mapping'; /** @see https://chromedevtools.github.io/devtools-protocol/tot/Page/#methods */ export enum PageDebuggerMethod { disable = 'Page.disable', enable = 'Page.enable', } /** @see https://chromedevtools.github.io/devtools-protocol/tot/Page/#events */ export enum PageDebuggerEvent { domContentEventFired = 'Page.domContentEventFired', frameAttached = 'Page.frameAttached', frameDetached = 'Page.frameDetached', frameNavigated = 'Page.frameNavigated', frameRequestedNavigation = 'Page.frameRequestedNavigation', frameStartedLoading = 'Page.frameStartedLoading', frameStoppedLoading = 'Page.frameStoppedLoading', lifecycleEvent = 'Page.lifecycleEvent', loadEventFired = 'Page.loadEventFired', } /** @see https://chromedevtools.github.io/devtools-protocol/tot/Page/#types */ export type PageDebuggerEventParams = ProtocolMapping.Events[Name]; ================================================ FILE: src/chrome/DebuggerWebAudioDomain.ts ================================================ /** * @file * Strings passed to `chrome.debugger.sendCommand` and received from * `chrome.debugger.onEvent` callbacks. */ import {ProtocolMapping} from 'devtools-protocol/types/protocol-mapping'; /** @see https://chromedevtools.github.io/devtools-protocol/tot/WebAudio/#methods */ export enum WebAudioDebuggerMethod { disable = 'WebAudio.disable', enable = 'WebAudio.enable', getRealtimeData = 'WebAudio.getRealtimeData', } /** @see https://chromedevtools.github.io/devtools-protocol/tot/WebAudio/#events */ export enum WebAudioDebuggerEvent { audioListenerCreated = 'WebAudio.audioListenerCreated', audioListenerWillBeDestroyed = 'WebAudio.audioListenerWillBeDestroyed', audioNodeCreated = 'WebAudio.audioNodeCreated', audioNodeWillBeDestroyed = 'WebAudio.audioNodeWillBeDestroyed', audioParamCreated = 'WebAudio.audioParamCreated', audioParamWillBeDestroyed = 'WebAudio.audioParamWillBeDestroyed', contextChanged = 'WebAudio.contextChanged', contextCreated = 'WebAudio.contextCreated', contextWillBeDestroyed = 'WebAudio.contextWillBeDestroyed', nodeParamConnected = 'WebAudio.nodeParamConnected', nodeParamDisconnected = 'WebAudio.nodeParamDisconnected', nodesConnected = 'WebAudio.nodesConnected', nodesDisconnected = 'WebAudio.nodesDisconnected', } /** @see https://chromedevtools.github.io/devtools-protocol/tot/WebAudio/#types */ export type WebAudioDebuggerEventParams = ProtocolMapping.Events[Name]; ================================================ FILE: src/chrome/DevTools.js ================================================ /// /** * [Chrome extension api][1] to devtool inspector available to a extension's * devtools page specified by the extension manifest's `"devtools_page"`. * * [1]: https://developer.chrome.com/docs/extensions/mv3/devtools/ * * @typedef Chrome.DevTools * @property {Chrome.DevToolsInspectedWindow} inspectedWindow * @property {Chrome.DevtoolsNetwork} network * @property {Chrome.DevToolsPanels} panels */ /** * [Extension api][1] for the tab inspected by this `"devtools_page"` instance. * * [1]: https://developer.chrome.com/docs/extensions/reference/devtools_inspectedWindow/ * * @typedef Chrome.DevToolsInspectedWindow * @property {string} tabId */ /** * @typedef Chrome.DevtoolsNetwork * @property {Chrome.Event} onNavigated */ /** * [Extension api][1] to manage panels this extension adds. * * [1]: https://developer.chrome.com/docs/extensions/reference/devtools_panels/ * * @typedef Chrome.DevToolsPanels * @property {Chrome.DevToolsPanelsCreateFunction} create * @property {'default' | 'dark'} themeName */ /** * [`chrome.devtools.panels.create(...)`][1] * * [1]: https://developer.chrome.com/docs/extensions/reference/devtools_panels/#method-create * * @callback Chrome.DevToolsPanelsCreateFunction * @param {string} title * @param {string} icon * @param {string} pageUrl * @param {Chrome.DevToolsPanelsCreateCallback} onPanelCreated * @return {void} */ /** * @callback Chrome.DevToolsPanelsCreateCallback * @param {Chrome.DevToolsPanel} panel * @return {void} */ /** * [Panel][1] created by [`chrome.devtools.panels.create`][2]. * * [1]: https://developer.chrome.com/docs/extensions/reference/devtools_panels/#type-ExtensionPanel * [2]: Chrome.html#.DevToolsPanelsCreateFunction * * @typedef Chrome.DevToolsPanel * @property {Chrome.Event} onHidden * @property {Chrome.Event} onShown */ ================================================ FILE: src/chrome/Runtime.js ================================================ /// /** * [Chrome extension api][1] about the extension the host platform and * communication betwen different extension contexts. * * [1]: https://developer.chrome.com/docs/extensions/reference/runtime/ * * @typedef Chrome.Runtime * @property {function(): Chrome.RuntimePort} connect * @property {function(string): string} getURL * @property {Chrome.RuntimeError} lastError * @property {Chrome.Event} onConnect */ /** * @typedef Chrome.RuntimeError * @property {string} [message] * @see https://developer.chrome.com/docs/extensions/reference/runtime/#property-lastError */ /** * Callback passed to [`chrome.runtime.onConnect`][1]. * * [1]: https://developer.chrome.com/docs/extensions/reference/runtime/#event-onConnect * * @callback Chrome.RuntimeOnConnectCallback * @param {Chrome.RuntimePort} port * @return {void} */ /** * [Port][1] to another chrome extension runtime context. * * [1]: https://developer.chrome.com/docs/extensions/reference/runtime/#type-Port * * @typedef Chrome.RuntimePort * @property {function(): void} disconnect * @property {Chrome.Event} onDisconnect * @property {Chrome.Event} onMessage * @property {function(*): void} postMessage */ ================================================ FILE: src/chrome/Types.js ================================================ /** * Types provided by the [chrome extension api][1]. * * [1]: https://developer.chrome.com/docs/extensions/reference/ * * @namespace Chrome */ /** * Generic [event emitter][1] in chrome extension types. * * [1]: https://developer.chrome.com/docs/extensions/reference/events/#type-Event * * @typedef Chrome.Event * @property {Chrome.EventCallback} addListener * @property {Chrome.EventCallback} removeListener * @template {function} T */ /** * Function taking an event listener passed to a {@link Chrome.Event} instance. * * @callback Chrome.EventCallback * @param {T} callback * @template {function} T */ ================================================ FILE: src/chrome/index.js ================================================ /// /// /** * Global chrome extension api instance. * * Normally available on the global context `chrome` identifier. Use this export * to assist in testing use of the chrome extension api from inside this * extension. * * @type {Chrome.API} * @memberof Chrome * @alias chrome */ export const chrome = getChrome(); /** * Return a no-operation implementation of Chrome.API. Used in testing. * * @return {Chrome.API} * @memberof Chrome */ function noopChrome() { /** * @return {Chrome.Event<*>} */ function noopEvent() { return {addListener() {}, removeListener() {}}; } return { debugger: { attach() {}, detach() {}, onDetach: noopEvent(), onEvent: noopEvent(), sendCommand() {}, }, devtools: { inspectedWindow: {tabId: 'tab'}, network: {onNavigated: noopEvent()}, panels: {create() {}}, }, runtime: { connect() { return { onDisconnect: noopEvent(), onMessage: noopEvent(), disconnect() {}, postMessage(message) {}, }; }, getURL(url) { return url; }, /** * If a called chrome api method errored, lastError is set to that error * while the provided callback is run. Otherwise lastError is not set. */ lastError: undefined, onConnect: noopEvent(), }, }; } /** * Return the global scope. * * @return {*} * @memberof Chrome */ function getGlobal() { if (typeof window === 'object') return window; if (typeof self === 'object') return self; if (typeof globalThis === 'object') return globalThis; if (typeof global === 'object') return global; if (typeof process === 'object') return process; throw new Error('Cannot find global object'); } /** * Return a {@link Chrome.API} instance. Return a copy from * {@link Chrome.noopChrome} if running under a unit test environment. * * @return {Chrome.API} * @memberof Chrome */ function getChrome() { const g = getGlobal(); if ( 'chrome' in g && typeof g.chrome === 'object' && typeof g.chrome.devtools === 'object' ) { return g.chrome; } return noopChrome(); } ================================================ FILE: src/custom.d.ts ================================================ declare module '*.svg' { const content: any; export default content; } declare module '*.css' { const content: any; export default content; } ================================================ FILE: src/devtools/DebuggerAttachEventController.ts ================================================ import { BehaviorSubject, combineLatest, concat, defer, EMPTY, Observable, of, Subject, Subscriber, } from 'rxjs'; import { catchError, delay, distinctUntilChanged, exhaustMap, filter, finalize, map, share, take, } from 'rxjs/operators'; import {chrome} from '../chrome'; import {PageDebuggerMethod} from '../chrome/DebuggerPageDomain'; import {WebAudioDebuggerMethod} from '../chrome/DebuggerWebAudioDomain'; /** * Permission value in regards to calling `chrome.debugger.attach`. * * When the extension calls `chrome.debugger.attach` a notification will display * in devtools that the extension is debugging the tab. Attaching when the user * does not expect it and then see this notification is not desired. The user * needs to grant permission for the extension the privilege to attach, or * reject prior permission. * * Permission could be implied when the extension's panel is opened. * * Permission should be rejected when the debugging notification is canceled or * dismissed. * * Permission could be granted more explicitly by a panel component when the * panel is visible but the extension does not have permission. * * WebAudioEventObserver will be instructed with rules like the above by other * functions outside of this file. */ enum AttachPermission { /** * Initial value. * * When WebAudioEventObserver is created, it does not know if permission has * been granted or not and should treat this as **not having** permission. */ UNKNOWN, /** * Permission has been granted by a user action. WebAudioEventObserver may * attach to `chrome.debugger`. */ TEMPORARY, /** * Permission has been rejected. WebAudioEventObserver must not attach to * `chrome.debugger`. */ REJECTED, } /** * Value used to indicate if the `chrome.debugger` attachment and * receiving `chrome.debugger.onEvent` events are "active". */ enum BinaryTransition { DEACTIVATING = 'deactivating', IS_INACTIVE = 'isInactive', ACTIVATING = 'activating', IS_ACTIVE = 'isActive', } export interface DebuggerAttachEventState { permission: AttachPermission; attachInterest: number; attachState: BinaryTransition; pageEventInterest: number; pageEventState: BinaryTransition; webAudioEventInterest: number; webAudioEventState: BinaryTransition; } /** Chrome Devtools Protocol version to attach to. */ const debuggerVersion = '1.3'; /** Chrome tab to attach the debugger to. */ const {tabId} = chrome.devtools.inspectedWindow; export enum ChromeDebuggerAPIEventName { detached = 'ChromeDebuggerAPI.detached', } export interface ChromeDebuggerAPIDetachEventParams { reason: 'canceled_by_user' | 'target_closed'; } export interface ChromeDebuggerAPIDetachEvent { method: ChromeDebuggerAPIEventName.detached; params: ChromeDebuggerAPIDetachEventParams; } export type ChromeDebuggerAPIEvent = ChromeDebuggerAPIDetachEvent; export type ChromeDebuggerAPIEventParams = ChromeDebuggerAPIEvent['params']; /** * Control attachment to chrome.debugger depending on if the user has given * permission and how many parts of the extension need attachment. * * @memberof Audion * @alias DebuggerAttachEventController */ export class DebuggerAttachEventController { /** Does user permit extension to use `chrome.debugger`. */ permission$: PermissionSubject; /** How many subscriptions want to attach to `chrome.debugger`. */ attachInterest$: CounterSubject; attachState$: Observable; /** * How many subscriptions want to receive page events through * `chrome.debugger.onEvent`. */ pageEventInterest$: CounterSubject; pageEventState$: Observable; /** * How many subscriptions want to receive web audio events through * `chrome.debugger.onEvent`. */ webAudioEventInterest$: CounterSubject; webAudioEventState$: Observable; combinedState$: Observable; debuggerEvent$: Observable; constructor() { // Create an interface of subjects to track changes in state with the // `chrome.debugger` api. const debuggerSubject = { // Does the extension have permission from the user to use `chrome.debugger` api. permission: new PermissionSubject(), // How many entities want to attach to the debugger to call `sendCommand` // or listen to `onEvent`. attachInterest: new CounterSubject(0), // attachState must be IS_ACTIVE for `chrome.debugger.sendCommand` to be used. attachState: new BinaryTransitionSubject({ initialState: BinaryTransition.IS_INACTIVE, activateAction: () => attach({tabId}, debuggerVersion), deactivateAction: () => detach({tabId}), }), // How many entities want to listen to page events through `onEvent`. pageEventInterest: new CounterSubject(0), // must be IS_ACTIVE for `onEvent` to receive events. pageEventState: new BinaryTransitionSubject({ initialState: BinaryTransition.IS_INACTIVE, activateAction: () => sendCommand({tabId}, PageDebuggerMethod.enable), deactivateAction: () => sendCommand({tabId}, PageDebuggerMethod.disable), }), // How many entities want to listen to web audio events through `onEvent`. webAudioEventInterest: new CounterSubject(0), // webAudioEventState must be IS_ACTIVE for `onEvent` to receive events. webAudioEventState: new BinaryTransitionSubject({ initialState: BinaryTransition.IS_INACTIVE, activateAction: () => sendCommand({tabId}, WebAudioDebuggerMethod.enable), deactivateAction: () => sendCommand({tabId}, WebAudioDebuggerMethod.disable), }), }; this.permission$ = debuggerSubject.permission; this.attachInterest$ = debuggerSubject.attachInterest; this.attachState$ = debuggerSubject.attachState; this.pageEventInterest$ = debuggerSubject.pageEventInterest; this.pageEventState$ = debuggerSubject.pageEventState; this.webAudioEventInterest$ = debuggerSubject.webAudioEventInterest; this.webAudioEventState$ = debuggerSubject.webAudioEventState; // Observable of changes to state derived from debuggerSubject. const debuggerState$ = (this.combinedState$ = // Push objects mapping of keys in debuggerSubject to values pushed from // that debuggerSubject member. combineLatest(debuggerSubject).pipe( // Filter out combined state that is not different from the last value. distinctUntilChanged( (previous, current) => previous.permission === current.permission && previous.attachInterest === current.attachInterest && previous.attachState === current.attachState && previous.pageEventInterest === current.pageEventInterest && previous.pageEventState === current.pageEventState && previous.webAudioEventInterest === current.webAudioEventInterest && previous.webAudioEventState === current.webAudioEventState, ), // Make one subscription debuggerSubject once for many subscribers. share(), )); // The following subscriptions govern debuggerSubject. // Govern attachment to `chrome.debugger`. debuggerState$.subscribe({ next: (state) => { // When debugger state has permission to attach to `chrome.debugger` and // something wants to use `chrome.debugger`, activate the attachment. // Otherwise deactivate the attachment. if ( state.permission === AttachPermission.TEMPORARY && state.attachInterest > 0 ) { debuggerSubject.attachState.activate(); } else { debuggerSubject.attachState.deactivate(); } }, }); this.debuggerEvent$ = onDebuggerDetach$.pipe( map(([, reason]) => { return { method: ChromeDebuggerAPIEventName.detached, params: {reason}, } as ChromeDebuggerAPIDetachEvent; }), ); // Govern permission rejection and externally induced detachment. onDebuggerDetach$.subscribe({ next([, reason]) { if (reason === 'canceled_by_user') { // Reject permission to use `chrome.debugger` in this extension. We // understand this event to be an explicit rejection from the // extension's user. debuggerSubject.permission.reject(); } // Immediately go to the inactive state. Detachment was initiated // outside the extension and does not need to be requested. debuggerSubject.attachState.next(BinaryTransition.IS_INACTIVE); }, }); // Govern receiving events through `chrome.debugger.onEvent`. debuggerState$.subscribe( activateEventWhileAttached( debuggerSubject.pageEventState, ({pageEventInterest}) => pageEventInterest > 0, ), ); debuggerState$.subscribe( activateEventWhileAttached( debuggerSubject.webAudioEventState, ({webAudioEventInterest}) => webAudioEventInterest > 0, ), ); } /** * Attach to the debugger if not already, and call chrome.debugger.sendCommand. * @param method Chrome devtools protocol method like 'HeapProfiler.collectGarbage'. * @returns observable that completes once done without pushing any values */ sendCommand(method: string): Observable { this.attachInterest$.increment(); return this.attachState$.pipe( filter((state) => state === BinaryTransition.IS_ACTIVE), take(1), exhaustMap(() => sendCommand({tabId}, method)), finalize(() => this.attachInterest$.decrement()), ); } } function activateEventWhileAttached( eventState: BinaryTransitionSubject, interestExists: (state: DebuggerAttachEventState) => boolean, ): Partial> { return { next(state) { if ( state.attachState === BinaryTransition.IS_ACTIVE && interestExists(state) ) { // Start receiving events. The attachemnt is active and some entities // are listening for events. eventState.activate(); } else { if (state.attachState === BinaryTransition.IS_ACTIVE) { // Stop receiving events. The attachment is still active but no // entities are listening for events. eventState.deactivate(); } else { // "Skip" deactivation of receiving events and immediately go to the // inactive state. The process of detachment either requested by the // extension or initiated otherwise has implicitly stopped reception // of events. eventState.next(BinaryTransition.IS_INACTIVE); } } }, }; } /** * Create a function that returns an observable that completes when the api * calls back. * @param method `chrome` api method whose last argument is a callback * @param thisArg `this` inside of the method * @returns observable that completes when the method is done */ function bindChromeCallback

( method: (...args: [...params: P, callback: () => void]) => void, thisArg = null, ) { return (...args: P) => new Observable((subscriber) => { method.call(thisArg, ...args, () => { if (chrome.runtime.lastError) { subscriber.error(chrome.runtime.lastError); } else { subscriber.complete(); } }); }); } /** * Return an observable that pushes events from a `chrome` api event. * @param event `chrome` api event * @returns observable of `chrome` api events */ function fromChromeEvent( event: Chrome.Event<(...args: A) => any>, ) { return new Observable((subscriber) => { const listener = (...args: A) => { subscriber.next(args); }; event.addListener(listener); return () => { event.removeListener(listener); }; }); } /** * Call `chrome.debugger.attach`. * * @see * https://developer.chrome.com/docs/extensions/reference/debugger/#method-attach */ const attach = bindChromeCallback(chrome.debugger.attach, chrome.debugger); /** * Call `chrome.debugger.detach`. * * @see * https://developer.chrome.com/docs/extensions/reference/debugger/#method-detach */ const detach = bindChromeCallback(chrome.debugger.detach, chrome.debugger); /** * Call `chrome.debugger.sendCommand`. * * @see * https://developer.chrome.com/docs/extensions/reference/debugger/#method-sendCommand */ const sendCommand = bindChromeCallback( chrome.debugger.sendCommand as ( target: Chrome.DebuggerDebuggee, method: string, params?, callback?, ) => void, chrome.debugger, ); /** * Observable of `chrome.debugger.onDetach` events. */ const onDebuggerDetach$ = fromChromeEvent< [target: Chrome.DebuggerDebuggee, reason: string] >(chrome.debugger.onDetach); /** * Store if user allows the extension to use `chrome.debugger` api. */ export class PermissionSubject extends BehaviorSubject { constructor() { super(AttachPermission.UNKNOWN); } /** * Permit use of `chrome.debugger`. */ grantTemporary() { if (this.value === AttachPermission.UNKNOWN) { this.next(AttachPermission.TEMPORARY); } } /** * Reject use of `chrome.debugger`. */ reject() { if (this.value !== AttachPermission.REJECTED) { this.next(AttachPermission.REJECTED); } } } /** * Description of a transition in BinaryTransitionSubject. */ interface BinaryTransitionDescription { /** The state the Subject must start in to perform this transition. */ beginningState: BinaryTransition; /** The state the Subject is in while performing this transition. */ intermediateState: BinaryTransition; /** The state the Subject is in after action is successfully. */ successState: BinaryTransition; /** The state the Subject is in after action is unsuccessful. */ errorState: BinaryTransition; /** * Delegate that does some work to modify other application state to the * desired state. */ action: () => Observable; } /** * Control a transition between inactive and active state. To perform a * transition the subject enters a intermediate state and calls a delegate to do * some action. After the action completes successfully the subject enters the * desired state. */ class BinaryTransitionSubject extends BehaviorSubject { private readonly activateTransition: BinaryTransitionDescription; private readonly deactivateTransition: BinaryTransitionDescription; constructor({ initialState, activateAction, deactivateAction, }: { initialState: BinaryTransition; activateAction: () => Observable; deactivateAction: () => Observable; }) { super(initialState); this.activateTransition = { beginningState: BinaryTransition.IS_INACTIVE, intermediateState: BinaryTransition.ACTIVATING, successState: BinaryTransition.IS_ACTIVE, errorState: BinaryTransition.IS_INACTIVE, action: activateAction, }; this.deactivateTransition = { beginningState: BinaryTransition.IS_ACTIVE, intermediateState: BinaryTransition.DEACTIVATING, successState: BinaryTransition.IS_INACTIVE, errorState: BinaryTransition.IS_INACTIVE, action: deactivateAction, }; } /** * Transition to a desired state. * * Change the subject value if it is set to beginningState to intermediateState and once action completes successfuly, set to successState. * @param description */ transition(description: BinaryTransitionDescription) { if (this.value === description.beginningState) { concat( of(description.intermediateState), description.action(), defer(() => this.value === description.intermediateState ? of(description.successState) : EMPTY, ), ) .pipe( catchError((err) => { console.error(err); if ( err.message.startsWith('Another debugger is already attached') ) { return this.value === description.intermediateState ? of(description.successState) : EMPTY; } return of( this.value === description.intermediateState ? description.errorState : description.beginningState, ); }), ) .subscribe({next: this.next.bind(this)}); } } /** * If subject is inactive, transition to active. */ activate() { this.transition(this.activateTransition); } /** * If subject is active, transition to inactive. */ deactivate() { this.transition(this.deactivateTransition); } } /** * Observable counting some discrete value. */ export class CounterSubject extends BehaviorSubject { /** * Increase value by 1. */ increment() { this.next(this.value + 1); } /** * Decrease value by 1. */ decrement() { this.next(this.value - 1); } } ================================================ FILE: src/devtools/DebuggerEvents.ts ================================================ import {filter, map, Observable} from 'rxjs'; import {chrome} from '../chrome'; import {fromChromeEvent} from '../utils/rxChrome'; import {DebuggerAttachEventController} from './DebuggerAttachEventController'; import {Audion} from './Types'; type DebuggerDomain = 'page' | 'webAudio'; interface DebuggerEventsOptions { domain: D; } type DebuggerDomainEvent = D extends 'page' ? Audion.PageEvent : D extends 'webAudio' ? Audion.WebAudioEvent : never; export class DebuggerEventsObservable< D extends DebuggerDomain, > extends Observable> { constructor( public attachController: DebuggerAttachEventController, public options: DebuggerEventsOptions, ) { super((subscriber) => { attachController.attachInterest$.increment(); attachController[options.domain + 'EventInterest$'].increment(); const subscription = fromChromeEvent(chrome.debugger.onEvent) .pipe( map(([debuggeeId, method, params]) => ({method, params})), filter(({method}) => method.toLowerCase().startsWith(options.domain.toLowerCase()), ), ) .subscribe(subscriber); subscription.add(() => { attachController.attachInterest$.decrement(); attachController[options.domain + 'EventInterest$'].decrement(); }); return subscription; }); } } ================================================ FILE: src/devtools/DevtoolsGraphPanel.test.js ================================================ /// /// /// /// import {beforeEach, describe, expect, it, jest} from '@jest/globals'; import dagre from 'dagre'; import {BehaviorSubject, Observable, partition, Subject} from 'rxjs'; import {map} from 'rxjs/operators'; import {chrome} from '../chrome'; import {DevtoolsGraphPanel} from './DevtoolsGraphPanel'; import {serializeGraphContext} from './serializeGraphContext'; jest.mock('../chrome'); /** * @type {Object<*, Audion.GraphContext>} */ const mockGraphs = { 0: { id: 'context0000', /** @type {ChromeDebuggerWebAudio.BaseAudioContext} */ context: { contextId: 'context0000', contextType: 'realtime', contextState: 'running', sampleRate: 48000, maxOutputChannelCount: 2, callbackBufferSize: 1000, }, graph: new dagre.graphlib.Graph(), nodes: {}, }, 1: { id: 'context0000', /** @type {ChromeDebuggerWebAudio.BaseAudioContext} */ context: { contextId: 'context0000', contextType: 'realtime', contextState: 'suspended', sampleRate: 48000, maxOutputChannelCount: 2, callbackBufferSize: 1000, }, graph: new dagre.graphlib.Graph(), nodes: {}, }, 2: { id: 'context0000', context: null, graph: null, nodes: null, }, }; describe('DevtoolsGraphPanel', () => { let nextGraph = (graph) => {}; /** @type {Subject} */ let subject; /** @type {Chrome.RuntimePort} */ let port; beforeEach(() => { jest.resetAllMocks(); subject = new Subject(); nextGraph = (value) => subject.next(value); /** @type {BehaviorSubject} */ const gate = new BehaviorSubject(); const [gateOpen, gateClose] = partition(gate, Boolean).map(map(() => {})); const panel = new DevtoolsGraphPanel( subject.pipe( map(serializeGraphContext), map((graphContext) => ({graphContext})), subscribeWhen(gateOpen, gateClose), ), ); panel.onPanelShown$.pipe(map(() => true)).subscribe(gate); port = mockPort(); }); it('creates a panel with chrome.devtools', () => { expect(chrome.devtools.panels.create).toBeCalled(); simulateCreatePanel(); }); it('subscribes to debugger events only after panel is shown', () => { expect(subject.observed).toBe(false); const panel = simulateCreatePanel(); simulateConnectPort(port); expect(subject.observed).toBe(false); // Send onShown event to panel creation callback. simulateShowPanel(panel); expect(subject.observed).toBe(true); }); it('posts graphs when connected', () => { // Send onShown event to panel creation callback. const panel = simulateCreatePanel(); simulateConnectPort(port); simulateShowPanel(panel); nextGraph(mockGraphs[0]); nextGraph(mockGraphs[1]); expect(port.postMessage).toBeCalledTimes(2); expect(port.postMessage.mock.calls[0]).toMatchInlineSnapshot(` Array [ Object { "graphContext": Object { "context": Object { "callbackBufferSize": 1000, "contextId": "context0000", "contextState": "running", "contextType": "realtime", "maxOutputChannelCount": 2, "sampleRate": 48000, }, "graph": Object { "edges": Array [], "nodes": Array [], "options": Object { "compound": false, "directed": true, "multigraph": false, }, }, "id": "context0000", "nodes": Object {}, }, }, ] `); expect(port.postMessage.mock.calls[1]).toMatchInlineSnapshot(` Array [ Object { "graphContext": Object { "context": Object { "callbackBufferSize": 1000, "contextId": "context0000", "contextState": "suspended", "contextType": "realtime", "maxOutputChannelCount": 2, "sampleRate": 48000, }, "graph": Object { "edges": Array [], "nodes": Array [], "options": Object { "compound": false, "directed": true, "multigraph": false, }, }, "id": "context0000", "nodes": Object {}, }, }, ] `); }); it('posts null graph when context is destroyed', () => { // Send onShown event to panel creation callback. const panel = simulateCreatePanel(); simulateConnectPort(port); simulateShowPanel(panel); nextGraph(mockGraphs[0]); nextGraph(mockGraphs[2]); expect(port.postMessage).toBeCalledTimes(2); expect(port.postMessage.mock.calls[0]).toMatchInlineSnapshot(` Array [ Object { "graphContext": Object { "context": Object { "callbackBufferSize": 1000, "contextId": "context0000", "contextState": "running", "contextType": "realtime", "maxOutputChannelCount": 2, "sampleRate": 48000, }, "graph": Object { "edges": Array [], "nodes": Array [], "options": Object { "compound": false, "directed": true, "multigraph": false, }, }, "id": "context0000", "nodes": Object {}, }, }, ] `); expect(port.postMessage.mock.calls[1]).toMatchInlineSnapshot(` Array [ Object { "graphContext": Object { "context": null, "graph": null, "id": "context0000", "nodes": null, }, }, ] `); }); it('stops posting graphs once disconnected', () => { const panel = simulateCreatePanel(); simulateConnectPort(port); simulateShowPanel(panel); nextGraph(mockGraphs[0]); if (jest.isMockFunction(port.onDisconnect.addListener)) { /** @type {function} */ ( port.onDisconnect.addListener.mock.calls[0][0] )(); } nextGraph(mockGraphs[1]); expect(port.postMessage).toBeCalledTimes(1); expect(port.postMessage.mock.calls[0]).toMatchInlineSnapshot(` Array [ Object { "graphContext": Object { "context": Object { "callbackBufferSize": 1000, "contextId": "context0000", "contextState": "running", "contextType": "realtime", "maxOutputChannelCount": 2, "sampleRate": 48000, }, "graph": Object { "edges": Array [], "nodes": Array [], "options": Object { "compound": false, "directed": true, "multigraph": false, }, }, "id": "context0000", "nodes": Object {}, }, }, ] `); }); }); /** * Simulate chrome api as if devtool panel was shown. * @param {Chrome.DevToolsPanels} panel panel to simulating showing */ function simulateShowPanel(panel) { const panelOnShownCallback = panel.onShown.addListener.mock.calls[0][0]; panelOnShownCallback(); } /** * Simulate chrome api as if devtool panel was created. * @param {Chrome.DevToolsPanel} [panel] panel to simulate creating * @return {Chrome.DevToolsPanel} created mock panel */ function simulateCreatePanel(panel = mockPanel()) { const panelCreateCallback = chrome.devtools.panels.create.mock.calls[0][3]; panelCreateCallback(panel); return panel; } /** * Simulate chrome api as if runtime port was created. * @param {Chrome.RuntimePort} [port] port to simulate connecting * @return {Chrome.RuntimePort} connected port */ function simulateConnectPort(port = mockPort()) { const runtimeOnConnectCallback = chrome.runtime.onConnect.addListener.mock.calls[0][0]; runtimeOnConnectCallback(port); return port; } /** @return {Chrome.Event<*>} */ function mockEvent() { return {addListener: jest.fn(), removeListener: jest.fn()}; } /** @return {Chrome.RuntimePort} */ function mockPort() { return { onDisconnect: mockEvent(), onMessage: mockEvent(), postMessage: jest.fn(), }; } /** * @return {Chrome.DevToolsPanel} mock version of a devtool panel */ function mockPanel() { return {onHidden: mockEvent(), onShown: mockEvent()}; } /** * @param {Observable} subscribeNotifier * @param {Observable} unsubscribeNotifier * @return {function(Observable): Observable} * @template T */ function subscribeWhen(subscribeNotifier, unsubscribeNotifier) { return (source) => { return new Observable((subscriber) => { let subscription = null; let subscribe = () => { const oldSubscribe = subscribe; subscribe = () => {}; subscription = source.subscribe(subscriber); unsubscribe = () => { unsubscribe = () => {}; subscription.unsubscribe(); subscription = null; subscribe = oldSubscribe; }; }; let unsubscribe = () => {}; const onSubscription = subscribeNotifier.subscribe({ next() { subscribe(); }, }); const offSubscription = unsubscribeNotifier.subscribe({ next() { unsubscribe(); }, }); return () => { onSubscription.unsubscribe(); offSubscription.unsubscribe(); unsubscribe(); }; }); }; } ================================================ FILE: src/devtools/DevtoolsGraphPanel.ts ================================================ /** DevTools panel that renders the Web Audio graph and more debugging information. */ import {chrome} from '../chrome'; import {Audion} from './Types'; import {fromEventPattern, Observable, Subject} from 'rxjs'; import {map, takeUntil} from 'rxjs/operators'; function fromChromeEvent( event: Chrome.Event<(msg: T) => void>, ): Observable { return fromEventPattern( (handler) => event.addListener(handler), (handler) => event.removeListener(handler), ); } /** * Manage a devtools panel rendering a graph of a web audio context. */ export class DevtoolsGraphPanel { requests$: Observable; onPanelShown$: Observable; /** * Create a DevtoolsGraphPanel. */ constructor(graphs$: Observable) { const requests$ = (this.requests$ = new Subject()); const onPanelShown$ = (this.onPanelShown$ = new Subject()); chrome.devtools.panels.create('Web Audio', '', 'panel.html', (panel) => { fromChromeEvent(panel.onShown).subscribe(onPanelShown$); }); fromChromeEvent(chrome.runtime.onConnect).subscribe({ next(port) { fromChromeEvent(port.onMessage) .pipe(map(([message]) => message)) .subscribe(requests$); graphs$.pipe(takeUntil(fromChromeEvent(port.onDisconnect))).subscribe({ next(graphs) { port.postMessage(graphs); }, }); }, }); } } ================================================ FILE: src/devtools/Types.ts ================================================ /// import {Protocol} from 'devtools-protocol/types/protocol'; import { PageDebuggerEvent, PageDebuggerEventParams, } from '../chrome/DebuggerPageDomain'; import { WebAudioDebuggerEvent, WebAudioDebuggerEventParams, } from '../chrome/DebuggerWebAudioDomain'; import {Utils} from '../utils/Types'; /** @namespace Audion */ /** * @typedef Audion.WebAudioEvent * @property {Method} method * @property {Params} params */ export namespace Audion { export type ContextRealtimeData = Protocol.WebAudio.ContextRealtimeData; export enum GraphEdgeType { NODE = 'node', PARAM = 'param', } export interface GraphNodeEdge { sourceOutputIndex: number; destinationType: GraphEdgeType.NODE; destinationInputIndex: number; } export interface GraphParamEdge { sourceOutputIndex: number; destinationType: GraphEdgeType.PARAM; destinationParamId: string; destinationParamIndex: number; } export type GraphEdge = GraphNodeEdge | GraphParamEdge; export interface GraphlibEdge { v: string; w: string; name: string; value: V; } export interface GraphContext { id: Protocol.WebAudio.GraphObjectId; eventCount: number; context: Protocol.WebAudio.BaseAudioContext; realtimeData: ContextRealtimeData; nodes: {[key: string]: GraphNode}; params: {[key: string]: Protocol.WebAudio.AudioParam}; graph: any; } export interface GraphContextMessage { graphContext: Audion.GraphContext; } export interface GraphContextsById { [key: string]: Audion.GraphContext; } export interface AllGraphsMessage { allGraphs: GraphContextsById; } export type DevtoolsMessage = GraphContextMessage | AllGraphsMessage; export enum DevtoolsRequestType { COLLECT_GARBAGE = 'collectGarbage', } export interface DevtoolsCollectGarbageRequest { type: DevtoolsRequestType.COLLECT_GARBAGE; } export type DevtoolsRequest = DevtoolsCollectGarbageRequest; export interface DevtoolsObserver extends Utils.Observer {} export interface GraphNode { node: Protocol.WebAudio.AudioNode; params: Protocol.WebAudio.AudioParam[]; edges: Protocol.WebAudio.NodesConnectedEvent[]; } export type PageEvent = { method: N; params: PageDebuggerEventParams[0]; }; export type WebAudioEvent< N extends WebAudioDebuggerEvent = WebAudioDebuggerEvent, > = { method: N; params: WebAudioDebuggerEventParams[0]; }; } /** * @typedef Audion.GraphContext * @property {ChromeDebuggerWebAudioDomain.GraphObjectId} id * @property {ChromeDebuggerWebAudioDomain.BaseAudioContext} context * @property {Object} nodes * @property {Object} params * @property {object} graph */ /** * @typedef Audion.GraphContextMessage * @property {Audion.GraphContext} graphContext */ /** * @typedef Audion.AllGraphsMessage * @property {Object} allGraphs */ /** * @typedef {Audion.GraphContextMessage * | Audion.AllGraphsMessage * } Audion.DevtoolsMessage */ /** * @typedef {Utils.Observer} Audion.DevtoolsObserver */ ================================================ FILE: src/devtools/WebAudioEventObserver.test.js ================================================ /// import {beforeEach, describe, expect, it, jest} from '@jest/globals'; import {chrome} from '../chrome'; import {WebAudioDebuggerEvent} from '../chrome/DebuggerWebAudioDomain'; import {DebuggerAttachEventController} from './DebuggerAttachEventController'; import {WebAudioEventObservable} from './WebAudioEventObserver'; jest.mock('../chrome'); describe('WebAudioEventObserver', () => { let webAudioEvents$; beforeEach(() => { jest.clearAllMocks(); const attachController = new DebuggerAttachEventController(); attachController.permission$.grantTemporary(); webAudioEvents$ = new WebAudioEventObservable(attachController); }); it('attaches to chrome.debugger', () => { const sub = webAudioEvents$.subscribe(); expect(chrome.debugger.attach).toBeCalled(); if (jest.isMockFunction(chrome.debugger.attach)) { /** @type {function} */ (chrome.debugger.attach.mock.calls[0][2])(); } expect(chrome.debugger.sendCommand).toBeCalled(); expect(chrome.debugger.onDetach.addListener).toBeCalled(); expect(chrome.debugger.onEvent.addListener).toBeCalled(); sub.unsubscribe(); }); it('does not reattach when user triggers detach', () => { const sub = webAudioEvents$.subscribe(); if (jest.isMockFunction(chrome.debugger.attach)) { /** @type {function} */ (chrome.debugger.attach.mock.calls[0][2])(); } expect(chrome.debugger.attach).toBeCalledTimes(1); if ( jest.isMockFunction(chrome.debugger.onDetach.addListener) && chrome.debugger.onDetach.addListener.mock.calls.length > 0 ) { /** @type {function} */ ( chrome.debugger.onDetach.addListener.mock.calls[0][0] )({tabId: 'tab'}, 'canceled_by_user'); } expect(chrome.debugger.attach).toBeCalledTimes(1); sub.unsubscribe(); }); it('detachs from chrome.debugger on unsubscribe', () => { const sub = webAudioEvents$.subscribe(); if (jest.isMockFunction(chrome.debugger.attach)) { /** @type {function} */ (chrome.debugger.attach.mock.calls[0][2])(); } expect(chrome.debugger.sendCommand).toBeCalledTimes(1); if (jest.isMockFunction(chrome.debugger.sendCommand)) { /** @type {function} */ (chrome.debugger.sendCommand.mock.calls[0][2])(); } sub.unsubscribe(); expect(chrome.debugger.detach).toBeCalled(); if (jest.isMockFunction(chrome.debugger.sendCommand)) { /** @type {function} */ (chrome.debugger.sendCommand.mock.calls[1][2])(); } expect(chrome.debugger.sendCommand).toBeCalledTimes(2); if (jest.isMockFunction(chrome.debugger.detach)) { /** @type {function} */ (chrome.debugger.detach.mock.calls[0][1])(); } expect(chrome.debugger.onDetach.removeListener).toBeCalled(); expect(chrome.debugger.onEvent.removeListener).toBeCalled(); }); it('forwards to WebAudio debugger protocol events', () => { const nextMock = jest.fn(); const sub = webAudioEvents$.subscribe(nextMock); if (jest.isMockFunction(chrome.debugger.attach)) { /** @type {function} */ (chrome.debugger.attach.mock.calls[0][2])(); } /** @type {ChromeDebuggerWebAudioDomain.ContextCreatedEvent} */ const contextCreated = { context: { contextId: '0', contextType: 'realtime', contextState: 'running', sampleRate: 48000, callbackBufferSize: 1000, maxOutputChannelCount: 2, }, }; if (jest.isMockFunction(chrome.debugger.onEvent.addListener)) { /** @type {function} */ ( chrome.debugger.onEvent.addListener.mock.calls[0][0] )('tab', WebAudioDebuggerEvent.contextCreated, contextCreated); } expect(nextMock).toBeCalledWith({ method: WebAudioDebuggerEvent.contextCreated, params: contextCreated, }); sub.unsubscribe(); }); }); ================================================ FILE: src/devtools/WebAudioEventObserver.ts ================================================ import {chrome} from '../chrome'; import {Audion} from './Types'; import {Observable} from 'rxjs'; import { CounterSubject, DebuggerAttachEventController, PermissionSubject, } from './DebuggerAttachEventController'; import {WebAudioDebuggerEvent} from '../chrome/DebuggerWebAudioDomain'; /** * @memberof Audion * @alias WebAudioEventObserver */ export class WebAudioEventObservable extends Observable { debuggerAttachController: DebuggerAttachEventController; /** Does user permit extension to use `chrome.debugger`. */ permission$: PermissionSubject; /** How many subscriptions want to attach to `chrome.debugger`. */ attachInterest$: CounterSubject; /** * How many subscriptions want to receive events through * `chrome.debugger.onEvent`. */ webAudioEventInterest$: CounterSubject; constructor(debuggerAttachController: DebuggerAttachEventController) { super((subscriber) => { this.debuggerAttachController = debuggerAttachController; this.permission$ = debuggerAttachController.permission$; this.attachInterest$ = debuggerAttachController.attachInterest$; this.webAudioEventInterest$ = debuggerAttachController.webAudioEventInterest$; const onEvent: Chrome.DebuggerOnEventListener = ( debuggeeId, method: WebAudioDebuggerEvent, params, ) => { subscriber.next({method, params}); }; const onDetach = () => { // TODO: Show a warning if the DevTools are still open and allow the // user to re-attach manually, e.g. by pressing a button. // See: https://developer.chrome.com/docs/extensions/reference/debugger/#type-DetachReason }; chrome.debugger.onDetach.addListener(onDetach); chrome.debugger.onEvent.addListener(onEvent); this.attachInterest$.increment(); this.webAudioEventInterest$.increment(); return () => { chrome.debugger.onDetach.removeListener(onDetach); chrome.debugger.onEvent.removeListener(onEvent); this.attachInterest$.decrement(); this.webAudioEventInterest$.decrement(); }; }); } } ================================================ FILE: src/devtools/WebAudioGraphIntegrator.test.js ================================================ /// import {beforeEach, describe, expect, it, jest} from '@jest/globals'; import {EMPTY, from, Observable, Subject, throwError} from 'rxjs'; import {concatWith, filter, takeUntil} from 'rxjs/operators'; import {WebAudioDebuggerEvent} from '../chrome/DebuggerWebAudioDomain'; import {integrateWebAudioGraph} from './WebAudioGraphIntegrator'; // FIX: prettier isn't wrapping this next line. // eslint-disable-next-line max-len import * as oscillatorGainFixture from '../../fixtures/oscillatorGainParam'; // Node.js environment doesn't provide some browser-specific APIs // (e.g. performance.now(), localStorage.getItem() and localStorage.setItem()) // Mocking these ensures no errors are thrown when running tests. global.performance = { now: jest.fn(() => Date.now()), }; const localStorageMock = { getItem: jest.fn(), setItem: jest.fn(), }; global.localStorage = localStorageMock; describe('WebAudioGraphIntegrator', () => { let nextWebAudioEvent = (value) => {}; let nextGraphContext = jest.fn(); beforeEach(() => { const subject = new Subject(); nextGraphContext = jest.fn(); nextWebAudioEvent = (value) => subject.next(value); const webAudioRealtime = { pollContext() { return new Observable(); }, }; subject .pipe(integrateWebAudioGraph(webAudioRealtime)) .subscribe(nextGraphContext); }); it('adds new context', () => { nextWebAudioEvent({ method: WebAudioDebuggerEvent.contextCreated, params: MockWebAudioEvents.contextCreated[0], }); expect(nextGraphContext).toBeCalledTimes(1); expect(nextGraphContext.mock.calls[0]).toMatchInlineSnapshot(` Array [ Object { "context": Object { "callbackBufferSize": 1000, "contextId": "context0000", "contextState": "running", "contextType": "realtime", "maxOutputChannelCount": 2, "sampleRate": 48000, }, "eventCount": 1, "graph": Graph { "_defaultEdgeLabelFn": [Function], "_defaultNodeLabelFn": [Function], "_edgeLabels": Object {}, "_edgeObjs": Object {}, "_in": Object {}, "_isCompound": false, "_isDirected": true, "_isMultigraph": true, "_label": Object {}, "_nodes": Object {}, "_out": Object {}, "_preds": Object {}, "_sucs": Object {}, }, "id": "context0000", "nodes": Object {}, "params": Object {}, "realtimeData": Object { "callbackIntervalMean": 0, "callbackIntervalVariance": 0, "currentTime": 0, "renderCapacity": 0, }, }, ] `); }); it('changes context', () => { nextWebAudioEvent({ method: WebAudioDebuggerEvent.contextCreated, params: MockWebAudioEvents.contextCreated[0], }); expect(nextGraphContext).toBeCalledTimes(1); nextWebAudioEvent({ method: WebAudioDebuggerEvent.contextChanged, params: MockWebAudioEvents.contextChanged[0], }); expect(nextGraphContext).toBeCalledTimes(2); expect(nextGraphContext.mock.calls[1]).toMatchInlineSnapshot(` Array [ Object { "context": Object { "callbackBufferSize": 1000, "contextId": "context0000", "contextState": "suspended", "contextType": "realtime", "maxOutputChannelCount": 2, "sampleRate": 48000, }, "eventCount": 2, "graph": Graph { "_defaultEdgeLabelFn": [Function], "_defaultNodeLabelFn": [Function], "_edgeLabels": Object {}, "_edgeObjs": Object {}, "_in": Object {}, "_isCompound": false, "_isDirected": true, "_isMultigraph": true, "_label": Object {}, "_nodes": Object {}, "_out": Object {}, "_preds": Object {}, "_sucs": Object {}, }, "id": "context0000", "nodes": Object {}, "params": Object {}, "realtimeData": Object { "callbackIntervalMean": 0, "callbackIntervalVariance": 0, "currentTime": 0, "renderCapacity": 0, }, }, ] `); }); it('removes old context', () => { nextWebAudioEvent({ method: WebAudioDebuggerEvent.contextCreated, params: MockWebAudioEvents.contextCreated[0], }); expect(nextGraphContext).toBeCalledTimes(1); nextWebAudioEvent({ method: WebAudioDebuggerEvent.contextWillBeDestroyed, params: MockWebAudioEvents.contextWillBeDestroyed[0], }); expect(nextGraphContext).toBeCalledTimes(2); expect(nextGraphContext.mock.calls[1]).toMatchInlineSnapshot(` Array [ Object { "context": null, "eventCount": 2, "graph": null, "id": "context0000", "nodes": null, "params": null, "realtimeData": null, }, ] `); }); it('adds new node', () => { nextWebAudioEvent({ method: WebAudioDebuggerEvent.contextCreated, params: MockWebAudioEvents.contextCreated[0], }); expect(nextGraphContext).toBeCalledTimes(1); nextWebAudioEvent({ method: WebAudioDebuggerEvent.audioNodeCreated, params: MockWebAudioEvents.audioNodeCreated[0], }); expect(nextGraphContext).toBeCalledTimes(2); expect(nextGraphContext.mock.calls[1]).toMatchInlineSnapshot(` Array [ Object { "context": Object { "callbackBufferSize": 1000, "contextId": "context0000", "contextState": "running", "contextType": "realtime", "maxOutputChannelCount": 2, "sampleRate": 48000, }, "eventCount": 2, "graph": Graph { "_defaultEdgeLabelFn": [Function], "_defaultNodeLabelFn": [Function], "_edgeLabels": Object {}, "_edgeObjs": Object {}, "_in": Object { "node0000": Object {}, }, "_isCompound": false, "_isDirected": true, "_isMultigraph": true, "_label": Object {}, "_nodeCount": 1, "_nodes": Object { "node0000": Object { "color": null, "height": 50, "id": "node0000", "label": "gain", "type": "gain", "width": 150, }, }, "_out": Object { "node0000": Object {}, }, "_preds": Object { "node0000": Object {}, }, "_sucs": Object { "node0000": Object {}, }, }, "id": "context0000", "nodes": Object { "node0000": Object { "edges": Array [], "node": Object { "channelCountMode": "max", "channelInterpretation": "discrete", "contextId": "context0000", "nodeId": "node0000", "nodeType": "gain", "numberOfInputs": 1, "numberOfOutputs": 1, }, "params": Array [], }, }, "params": Object {}, "realtimeData": Object { "callbackIntervalMean": 0, "callbackIntervalVariance": 0, "currentTime": 0, "renderCapacity": 0, }, }, ] `); }); it('removes old node', () => { nextWebAudioEvent({ method: WebAudioDebuggerEvent.contextCreated, params: MockWebAudioEvents.contextCreated[0], }); expect(nextGraphContext).toBeCalledTimes(1); nextWebAudioEvent({ method: WebAudioDebuggerEvent.audioNodeCreated, params: MockWebAudioEvents.audioNodeCreated[0], }); expect(nextGraphContext).toBeCalledTimes(2); nextWebAudioEvent({ method: WebAudioDebuggerEvent.audioNodeWillBeDestroyed, params: MockWebAudioEvents.audioNodeWillBeDestroyed[0], }); expect(nextGraphContext).toBeCalledTimes(3); expect(nextGraphContext.mock.calls[2]).toMatchInlineSnapshot(` Array [ Object { "context": Object { "callbackBufferSize": 1000, "contextId": "context0000", "contextState": "running", "contextType": "realtime", "maxOutputChannelCount": 2, "sampleRate": 48000, }, "eventCount": 3, "graph": Graph { "_defaultEdgeLabelFn": [Function], "_defaultNodeLabelFn": [Function], "_edgeLabels": Object {}, "_edgeObjs": Object {}, "_in": Object {}, "_isCompound": false, "_isDirected": true, "_isMultigraph": true, "_label": Object {}, "_nodeCount": 0, "_nodes": Object {}, "_out": Object {}, "_preds": Object {}, "_sucs": Object {}, }, "id": "context0000", "nodes": Object {}, "params": Object {}, "realtimeData": Object { "callbackIntervalMean": 0, "callbackIntervalVariance": 0, "currentTime": 0, "renderCapacity": 0, }, }, ] `); }); it('adds new node edge connection', () => { nextWebAudioEvent({ method: WebAudioDebuggerEvent.contextCreated, params: MockWebAudioEvents.contextCreated[0], }); expect(nextGraphContext).toBeCalledTimes(1); nextWebAudioEvent({ method: WebAudioDebuggerEvent.audioNodeCreated, params: MockWebAudioEvents.audioNodeCreated[0], }); expect(nextGraphContext).toBeCalledTimes(2); nextWebAudioEvent({ method: WebAudioDebuggerEvent.audioNodeCreated, params: MockWebAudioEvents.audioNodeCreated[1], }); expect(nextGraphContext).toBeCalledTimes(3); nextWebAudioEvent({ method: WebAudioDebuggerEvent.nodesConnected, params: MockWebAudioEvents.nodesConnected[0], }); expect(nextGraphContext).toBeCalledTimes(4); expect(nextGraphContext.mock.calls[3]).toMatchInlineSnapshot(` Array [ Object { "context": Object { "callbackBufferSize": 1000, "contextId": "context0000", "contextState": "running", "contextType": "realtime", "maxOutputChannelCount": 2, "sampleRate": 48000, }, "eventCount": 4, "graph": Graph { "_defaultEdgeLabelFn": [Function], "_defaultNodeLabelFn": [Function], "_edgeCount": 1, "_edgeLabels": Object { "node0001node00000,0": Object { "destinationInputIndex": 0, "destinationType": "node", "sourceOutputIndex": 0, }, }, "_edgeObjs": Object { "node0001node00000,0": Object { "name": "0,0", "v": "node0001", "w": "node0000", }, }, "_in": Object { "node0000": Object { "node0001node00000,0": Object { "name": "0,0", "v": "node0001", "w": "node0000", }, }, "node0001": Object {}, }, "_isCompound": false, "_isDirected": true, "_isMultigraph": true, "_label": Object {}, "_nodeCount": 2, "_nodes": Object { "node0000": Object { "color": null, "height": 50, "id": "node0000", "label": "gain", "type": "gain", "width": 150, }, "node0001": Object { "color": null, "height": 50, "id": "node0001", "label": "bufferSource", "type": "bufferSource", "width": 150, }, }, "_out": Object { "node0000": Object {}, "node0001": Object { "node0001node00000,0": Object { "name": "0,0", "v": "node0001", "w": "node0000", }, }, }, "_preds": Object { "node0000": Object { "node0001": 1, }, "node0001": Object {}, }, "_sucs": Object { "node0000": Object {}, "node0001": Object { "node0000": 1, }, }, }, "id": "context0000", "nodes": Object { "node0000": Object { "edges": Array [], "node": Object { "channelCountMode": "max", "channelInterpretation": "discrete", "contextId": "context0000", "nodeId": "node0000", "nodeType": "gain", "numberOfInputs": 1, "numberOfOutputs": 1, }, "params": Array [], }, "node0001": Object { "edges": Array [ Object { "contextId": "context0000", "destinationId": "node0000", "sourceId": "node0001", }, ], "node": Object { "channelCountMode": "max", "channelInterpretation": "discrete", "contextId": "context0000", "nodeId": "node0001", "nodeType": "bufferSource", "numberOfInputs": 0, "numberOfOutputs": 1, }, "params": Array [], }, }, "params": Object {}, "realtimeData": Object { "callbackIntervalMean": 0, "callbackIntervalVariance": 0, "currentTime": 0, "renderCapacity": 0, }, }, ] `); }); it('removes old node edge connection', () => { nextWebAudioEvent({ method: WebAudioDebuggerEvent.contextCreated, params: MockWebAudioEvents.contextCreated[0], }); expect(nextGraphContext).toBeCalledTimes(1); nextWebAudioEvent({ method: WebAudioDebuggerEvent.audioNodeCreated, params: MockWebAudioEvents.audioNodeCreated[0], }); expect(nextGraphContext).toBeCalledTimes(2); nextWebAudioEvent({ method: WebAudioDebuggerEvent.audioNodeCreated, params: MockWebAudioEvents.audioNodeCreated[1], }); expect(nextGraphContext).toBeCalledTimes(3); nextWebAudioEvent({ method: WebAudioDebuggerEvent.nodesConnected, params: MockWebAudioEvents.nodesConnected[0], }); expect(nextGraphContext).toBeCalledTimes(4); nextWebAudioEvent({ method: WebAudioDebuggerEvent.nodesDisconnected, params: MockWebAudioEvents.nodesDisconnected[0], }); expect(nextGraphContext).toBeCalledTimes(5); expect(nextGraphContext.mock.calls[4]).toMatchInlineSnapshot(` Array [ Object { "context": Object { "callbackBufferSize": 1000, "contextId": "context0000", "contextState": "running", "contextType": "realtime", "maxOutputChannelCount": 2, "sampleRate": 48000, }, "eventCount": 5, "graph": Graph { "_defaultEdgeLabelFn": [Function], "_defaultNodeLabelFn": [Function], "_edgeCount": 0, "_edgeLabels": Object {}, "_edgeObjs": Object {}, "_in": Object { "node0000": Object {}, "node0001": Object {}, }, "_isCompound": false, "_isDirected": true, "_isMultigraph": true, "_label": Object {}, "_nodeCount": 2, "_nodes": Object { "node0000": Object { "color": null, "height": 50, "id": "node0000", "label": "gain", "type": "gain", "width": 150, }, "node0001": Object { "color": null, "height": 50, "id": "node0001", "label": "bufferSource", "type": "bufferSource", "width": 150, }, }, "_out": Object { "node0000": Object {}, "node0001": Object {}, }, "_preds": Object { "node0000": Object {}, "node0001": Object {}, }, "_sucs": Object { "node0000": Object {}, "node0001": Object {}, }, }, "id": "context0000", "nodes": Object { "node0000": Object { "edges": Array [], "node": Object { "channelCountMode": "max", "channelInterpretation": "discrete", "contextId": "context0000", "nodeId": "node0000", "nodeType": "gain", "numberOfInputs": 1, "numberOfOutputs": 1, }, "params": Array [], }, "node0001": Object { "edges": Array [ Object { "contextId": "context0000", "destinationId": "node0000", "sourceId": "node0001", }, ], "node": Object { "channelCountMode": "max", "channelInterpretation": "discrete", "contextId": "context0000", "nodeId": "node0001", "nodeType": "bufferSource", "numberOfInputs": 0, "numberOfOutputs": 1, }, "params": Array [], }, }, "params": Object {}, "realtimeData": Object { "callbackIntervalMean": 0, "callbackIntervalVariance": 0, "currentTime": 0, "renderCapacity": 0, }, }, ] `); }); describe('simulate graphs', () => { describe('oscillator -> gain param', () => { const events = oscillatorGainFixture.OSCILLATOR_GAIN_PARAM_EVENTS; const simulation = () => integrateWebAudioGraph({ pollContext() { return EMPTY; }, }); const eventSource = from(events); for (let i = 0; i < events.length; i++) { const errorEvent = events[i]; const falseSource = eventSource.pipe( takeUntil((event) => event === errorEvent), concatWith([throwError(() => new Error())]), ); it(`falsify #${i} ${errorEvent.method}`, () => { const subscriber = mockSubscriber(); falseSource.pipe(simulation()).subscribe(subscriber); expect(subscriber.error).toBeCalled(); }); } it(`all events`, () => { const subscriber = mockSubscriber(); eventSource.pipe(simulation()).subscribe(subscriber); expect(subscriber.next).toBeCalled(); expect(subscriber.error).not.toBeCalled(); }); for (let i = 0; i < events.length; i++) { const skipEvent = events[i]; const skipSource = eventSource.pipe(filter((ev) => ev !== skipEvent)); it(`skip event #${i} ${skipEvent.method}`, () => { const subscriber = mockSubscriber(); skipSource.pipe(simulation()).subscribe(subscriber); expect(subscriber.error).not.toBeCalled(); }); } }); }); }); /** * @type {Object>} */ const MockWebAudioEvents = { audioNodeCreated: { /** @type {ChromeDebuggerWebAudioDomain.AudioNodeCreatedEvent} */ 0: { node: { contextId: 'context0000', nodeId: 'node0000', nodeType: 'gain', channelCountMode: 'max', channelInterpretation: 'discrete', numberOfInputs: 1, numberOfOutputs: 1, }, }, /** @type {ChromeDebuggerWebAudioDomain.AudioNodeCreatedEvent} */ 1: { node: { contextId: 'context0000', nodeId: 'node0001', nodeType: 'bufferSource', channelCountMode: 'max', channelInterpretation: 'discrete', numberOfInputs: 0, numberOfOutputs: 1, }, }, }, audioNodeWillBeDestroyed: { /** @type {ChromeDebuggerWebAudioDomain.AudioNodeWillBeDestroyedEvent} */ 0: { contextId: 'context0000', nodeId: 'node0000', }, }, contextChanged: { /** @type {ChromeDebuggerWebAudioDomain.ContextChangedEvent} */ 0: { context: { contextId: 'context0000', contextType: 'realtime', contextState: 'suspended', sampleRate: 48000, callbackBufferSize: 1000, maxOutputChannelCount: 2, }, }, }, contextCreated: { /** @type {ChromeDebuggerWebAudioDomain.ContextCreatedEvent} */ 0: { context: { contextId: 'context0000', contextType: 'realtime', contextState: 'running', sampleRate: 48000, callbackBufferSize: 1000, maxOutputChannelCount: 2, }, }, }, contextWillBeDestroyed: { /** @type {ChromeDebuggerWebAudioDomain.ContextWillBeDestroyedEvent} */ 0: { contextId: 'context0000', }, }, nodesConnected: { /** @type {ChromeDebuggerWebAudioDomain.NodesConnectedEvent} */ 0: { contextId: 'context0000', sourceId: 'node0001', destinationId: 'node0000', }, }, nodesDisconnected: { /** @type {ChromeDebuggerWebAudioDomain.NodesDisconnectedEvent} */ 0: { contextId: 'context0000', sourceId: 'node0001', destinationId: 'node0000', }, }, }; /** * @return {Subscriber} */ function mockSubscriber() { return {next: jest.fn(), complete: jest.fn(), error: jest.fn()}; } ================================================ FILE: src/devtools/WebAudioGraphIntegrator.ts ================================================ import * as dagre from 'dagre'; import * as graphlib from 'graphlib'; import {ProtocolMapping} from 'devtools-protocol/types/protocol-mapping'; import {DLOG} from '../utils/dlog'; import { EMPTY, isObservable, merge, NEVER, Observable, of, OperatorFunction, pipe, Subject, } from 'rxjs'; import { map, filter, catchError, mergeMap, takeUntil, take, ignoreElements, finalize, share, } from 'rxjs/operators'; import {WebAudioDebuggerEvent} from '../chrome/DebuggerWebAudioDomain'; import {Audion} from './Types'; import { INITIAL_CONTEXT_REALTIME_DATA, RealtimeDataErrorMessage, WebAudioRealtimeData, WebAudioRealtimeDataReason, } from './WebAudioRealtimeData'; import { ChromeDebuggerAPIEventName, ChromeDebuggerAPIEvent, } from './DebuggerAttachEventController'; import { PageDebuggerEvent, PageDebuggerEventParams, } from '../chrome/DebuggerPageDomain'; enum GraphContextDestroyReasonMessage { RECEIVE_WILL_DESTROY_EVENT = `ReceiveWillDestroyEvent`, CANNOT_FIND_REALTIME_DATA = `CannotFindRealtimeData`, } type MutableContexts = { [key: string]: { graphContext: Audion.GraphContext; graphContextDestroyed$: Subject; realtimeDataGraphContext$: Observable; }; }; interface EventHelpers { realtimeData: WebAudioRealtimeData; } type IntegratableEventName = | PageDebuggerEvent | WebAudioDebuggerEvent | ChromeDebuggerAPIEventName; type IntegratableEvent = | Audion.PageEvent | Audion.WebAudioEvent | ChromeDebuggerAPIEvent; type IntegratableEventMapping = { [K in IntegratableEventName]: ProtocolMapping.Events extends { [key in K]: [infer P]; } ? P : ChromeDebuggerAPIEvent extends {method: K; params: infer P} ? P : never; }; type EventHandlers = | { readonly [K in IntegratableEventName]: ( helpers: EventHelpers, contexts: MutableContexts, event: IntegratableEventMapping[K], ) => Observable | Audion.GraphContext | void; }; export const getTimestampAsString = () => { return '[' + performance.now().toFixed(2) + '] '; }; const EVENT_HANDLERS: Partial = { [WebAudioDebuggerEvent.audioNodeCreated]: ( helpers, contexts, audioNodeCreated, ) => { const node = audioNodeCreated.node; const {contextId, nodeId, nodeType} = node; const space = contexts[contextId]; if (!space) { return; } DLOG(`A new AudioNode has been created.`, { contextId, nodeId, }); const context = space.graphContext; context.eventCount += 1; if (context.nodes[nodeId]) { DLOG(`Duplicate WebAudio.audioNodeCreated event`, { contextId, nodeId, }); return; } context.nodes[nodeId] = { node, params: [], edges: [], }; context.graph.setNode(nodeId, { id: nodeId, label: nodeType, type: nodeType, color: null, width: 150, height: 50, }); return context; }, [WebAudioDebuggerEvent.audioNodeWillBeDestroyed]: ( helpers, contexts, audioNodeDestroyed, ) => { const {contextId, nodeId} = audioNodeDestroyed; DLOG(`An existing AudioNode has been destroyed.`, { contextId, nodeId, }); const space = contexts[contextId]; if (!space) { return; } const context = space.graphContext; context.eventCount += 1; context.graph.removeNode(nodeId); const node = context.nodes[nodeId]; if (node && node.params) { for (const audioParam of node.params) { delete context.params[audioParam.paramId]; } } delete context.nodes[nodeId]; return context; }, [WebAudioDebuggerEvent.audioParamCreated]: ( helpers, contexts, audioParamCreated, ) => { const {param} = audioParamCreated; const {contextId, nodeId, paramId: paramIdCreated} = param; DLOG(`A new AudioParam has been created.`, { contextId, nodeId, paramIdCreated, }); const space = contexts[contextId]; if (!space) { return; } const context = space.graphContext; context.eventCount += 1; const node = context.nodes[nodeId]; if (!node) { return; } if (node.params.some(({paramId}) => paramId === paramIdCreated)) { DLOG(`Duplicate WebAudio.audioParamCreated event`, { contextId, nodeId, paramIdCreated, }); return; } node.params.push(param); context.params[paramIdCreated] = param; return context; }, [WebAudioDebuggerEvent.audioParamWillBeDestroyed]: ( helpers, contexts, audioParamWillBeDestroyed, ) => { const { contextId, nodeId, paramId: paramIdCreated, } = audioParamWillBeDestroyed; DLOG(`An existing AudioParam has been destroyed.`, { contextId, nodeId, paramIdCreated, }); const space = contexts[contextId]; if (!space) { return; } const context = space.graphContext; context.eventCount += 1; const node = context.nodes[nodeId]; if (node && node.params) { removeAll(node.params, ({paramId}) => paramId === paramIdCreated); } delete context.params[paramIdCreated]; return context; }, [WebAudioDebuggerEvent.contextChanged]: ( helpers, contexts, contextChanged, ) => { const {contextId} = contextChanged.context; const space = contexts[contextId]; if (!space) { DLOG( `Unexpected WebAudio.contextChanged event.` + `Did not receive an event when Audio Context was created`, { contextId, }, ); return; } DLOG( `Some properties in BaseAudioContext have changed.` + `properties (id stays the same)`, { contextId, }, ); space.graphContext.context = contextChanged.context; space.graphContext.eventCount += 1; return space.graphContext; }, [WebAudioDebuggerEvent.audioListenerCreated]: ( helpers, contexts, contextChanged, ) => { const {contextId} = contextChanged.listener; DLOG(`An AudioListener has been created.`, { contextId, }); return; }, [WebAudioDebuggerEvent.audioListenerWillBeDestroyed]: ( helpers, contexts, contextChanged, ) => { const {contextId} = contextChanged; DLOG(`An AudioListener will be destroyed.`, { contextId, }); return; }, [WebAudioDebuggerEvent.contextCreated]: ( helpers, contexts, contextCreated, ) => { const {contextId, contextType} = contextCreated.context; if (contexts[contextId]) { // Duplicate or out of order context created event. console.warn( getTimestampAsString() + `Duplicate ${WebAudioDebuggerEvent.contextCreated} event.`, contextCreated, ); return; } else { console.debug( getTimestampAsString() + `Audio Context (${contextId.slice(-6)}-${contextType}) created.` + `Adding the context to the tracked set.`, ); } const graph = new dagre.graphlib.Graph({multigraph: true}); graph.setGraph({}); graph.setDefaultEdgeLabel(() => { return {}; }); // Request realtime data for realtime and offline contexts. We use this // information to help confirm the existence of this new context. Events // that normally mark when contexts are destroyed may not arrive and so we // need this extra way to determine when the contexts no longer exist. const realtimeData$ = helpers.realtimeData.pollContext(contextId); const graphContextDestroyed$ = new Subject(); const realtimeDataGraphContext$ = realtimeData$.pipe( map((realtimeData) => { const space = contexts[contextId]; if (space) { space.graphContext = { ...space.graphContext, realtimeData, }; return space.graphContext; } }), filter((context): context is Audion.GraphContext => Boolean(context)), catchError((reason, caught) => { reason = WebAudioRealtimeDataReason.parseReason(reason); if (WebAudioRealtimeDataReason.isCannotFindReason(reason)) { const space = contexts[contextId]; space?.graphContextDestroyed$?.next( GraphContextDestroyReasonMessage.CANNOT_FIND_REALTIME_DATA, ); if (!space) { DLOG( `Error requesting realtime data for context,` + `Context was likely cleaned up during requests for real time data.`, { reason, contextId, }, ); } return EMPTY; } else if (WebAudioRealtimeDataReason.isRealtimeOnlyReason(reason)) { // Non-realtime/offline contexts do not have realtime data and will // produce this error when that data is requested. } else { console.error( getTimestampAsString() + `Unexpected error requesting realtime data for context '${contextId}'.` + `"${WebAudioRealtimeDataReason.toString(reason)}"`, ); } // Redirect back to the caught observable. We want to keep receiving // realtime data values or errors until we receive CANNOT_FIND error. return caught; }), takeUntil(graphContextDestroyed$), ); contexts[contextId] = { graphContext: { id: contextId, eventCount: 1, context: contextCreated.context, realtimeData: INITIAL_CONTEXT_REALTIME_DATA, nodes: {}, params: {}, // TODO: dagre's graphlib typings are inaccurate, which is why we use // graphlib's types. Revert to dagre's types once the issue is fixed: // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/47439 graph: graph as unknown as graphlib.Graph, }, graphContextDestroyed$, realtimeDataGraphContext$, }; return merge( of(contexts[contextId].graphContext), graphContextDestroyed$.pipe( share(), take(1), mergeMap((message) => { if ( message === GraphContextDestroyReasonMessage.CANNOT_FIND_REALTIME_DATA ) { DLOG( `Audio Context cannot be found. ` + `Removing the context from the tracked set.`, { contextId, }, ); } else if ( message === GraphContextDestroyReasonMessage.RECEIVE_WILL_DESTROY_EVENT ) { DLOG( `Audio Context will be destroyed.` + `Removing the context from the tracked set.`, { contextId, }, ); } const space = contexts[contextId]; if (space) { delete contexts[contextId]; return of({ id: contextId, eventCount: space.graphContext?.eventCount + 1, context: null, realtimeData: null, nodes: null, params: null, graph: null, }); } else { DLOG( `Audio Context could not be removed from the tracked set.` + `It was not tracked.`, { contextId, }, ); } return EMPTY; }), ), contexts[contextId].realtimeDataGraphContext$, ); }, [WebAudioDebuggerEvent.contextWillBeDestroyed]: ( helpers, contexts, contextDestroyed, ) => { const {contextId} = contextDestroyed; const space = contexts[contextId]; space?.graphContextDestroyed$?.next( GraphContextDestroyReasonMessage.RECEIVE_WILL_DESTROY_EVENT, ); DLOG(`A BaseAudioContext will be destroyed.`, { contextId, }); }, [WebAudioDebuggerEvent.nodeParamConnected]: ( helpers, contexts, nodeParamConnected, ) => { const { contextId, sourceId: sourceNodeId, sourceOutputIndex = 0, destinationId: destinationParamId, } = nodeParamConnected; DLOG(`An AudioNode is connected to an AudioParam.`, { contextId, sourceNodeId, destinationParamId, }); const space = contexts[contextId]; if (!space) { return; } const context = space.graphContext; context.eventCount += 1; const sourceNode = context.nodes[sourceNodeId]; if (!sourceNode) { return; } const destinationParam = context.params[destinationParamId]; if (!destinationParam) { return; } const destinationNodeId = destinationParam.nodeId; const destinationNode = context.nodes[destinationNodeId]; if (!destinationNode) { return; } sourceNode.edges.push(nodeParamConnected); context.graph.setEdge( sourceNodeId, destinationNodeId, { sourceOutputIndex, destinationType: Audion.GraphEdgeType.PARAM, destinationParamId, destinationParamIndex: destinationNode.params.findIndex( ({paramId}) => paramId === destinationParamId, ), } as Audion.GraphEdge, sourceOutputIndex.toString(), ); return context; }, [WebAudioDebuggerEvent.nodeParamDisconnected]: ( helpers, contexts, nodesDisconnected, ) => { const { contextId, sourceId: sourceNodeId, sourceOutputIndex = 0, destinationId: destinationParamId, } = nodesDisconnected; DLOG(`An AudioNode is disconnected to an AudioParam.`, { contextId, sourceNodeId, destinationParamId, }); const space = contexts[contextId]; if (!space) { return; } const context = space.graphContext; context.eventCount += 1; const sourceNode = context.nodes[sourceNodeId]; if (!sourceNode) { return; } const {edges} = sourceNode; removeAll( edges, (edge) => edge.destinationId === destinationParamId && edge.sourceOutputIndex === sourceOutputIndex, ); context.graph.removeEdge( sourceNodeId, destinationParamId, sourceOutputIndex.toString(), ); return context; }, [WebAudioDebuggerEvent.nodesConnected]: ( helpers, contexts, nodesConnected, ) => { const { contextId, sourceId, sourceOutputIndex = 0, destinationId, destinationInputIndex = 0, } = nodesConnected; DLOG(`Two AudioNodes are connected.`, { contextId, sourceId, destinationId, }); const space = contexts[contextId]; if (!space) { return; } const context = space.graphContext; context.eventCount += 1; const sourceNode = context.nodes[sourceId]; if (!sourceNode) { return; } const destinationNode = context.nodes[destinationId]; if (!destinationNode) { return; } sourceNode.edges.push(nodesConnected); context.graph.setEdge( sourceId, destinationId, { sourceOutputIndex, destinationType: Audion.GraphEdgeType.NODE, destinationInputIndex, } as Audion.GraphNodeEdge, `${sourceOutputIndex},${destinationInputIndex}`, ); return context; }, [WebAudioDebuggerEvent.nodesDisconnected]: ( helpers, contexts, nodesDisconnected, ) => { const { contextId, sourceId, sourceOutputIndex = 0, destinationId, destinationInputIndex = 0, } = nodesDisconnected; DLOG( `Notifies AudioNodes is disconnected. The destination` + `can be null, and it means all the outgoing connections` + `from the source are disconnected.`, {contextId, sourceId, destinationId}, ); const space = contexts[contextId]; if (!space) { return; } const context = space.graphContext; context.eventCount += 1; const sourceNode = context.nodes[sourceId]; if (!sourceNode) { return; } const {edges} = sourceNode; removeAll( edges, (edge) => edge.destinationId === destinationId && edge.sourceOutputIndex === sourceOutputIndex && edge.destinationInputIndex === destinationInputIndex, ); context.graph.removeEdge( sourceId, destinationId, `${sourceOutputIndex},${destinationInputIndex}`, ); return context; }, [PageDebuggerEvent.frameNavigated]: (helpers, contexts) => { console.debug( getTimestampAsString() + `Checking if tracked Audio Contexts (${Object.keys(contexts) .map((contextId) => contextId.slice(-6)) .join(`, `)}) exist after frame navigated.`, ); return ensureContextsExist(contexts, helpers); }, [PageDebuggerEvent.loadEventFired]: (helpers, contexts) => { console.debug( getTimestampAsString() + `Checking if tracked Audio Contexts (${Object.keys(contexts) .map((contextId) => contextId.slice(-6)) .join(`, `)}) exist after load event.`, ); return ensureContextsExist(contexts, helpers); }, [ChromeDebuggerAPIEventName.detached]: ( helpers, contexts, debuggerDetached, ) => { if (debuggerDetached.reason === `target_closed`) { console.debug( getTimestampAsString() + `Checking if tracked Audio Contexts (${Object.keys(contexts) .map((contextId) => contextId.slice(-6)) .join( `, `, )}) exist after debugger detached because target was closed.`, ); return ensureContextsExist(contexts, helpers); } }, }; function ensureContextsExist( contexts: MutableContexts, helpers: EventHelpers, ): void | Audion.GraphContext | Observable { return merge( ...Object.keys(contexts).map((contextId) => helpers.realtimeData.pollContext(contextId).pipe( take(1), ignoreElements(), catchError((reason) => { reason = WebAudioRealtimeDataReason.parseReason(reason); if (WebAudioRealtimeDataReason.isCannotFindReason(reason)) { const space = contexts[contextId]; if (space) { space?.graphContextDestroyed$?.next( GraphContextDestroyReasonMessage.CANNOT_FIND_REALTIME_DATA, ); } } else if (WebAudioRealtimeDataReason.isRealtimeOnlyReason(reason)) { // OfflineAudioContexts emit this error if they are still alive. } else { console.error( getTimestampAsString() + `Unexpected error determining if context "${contextId}" is ` + `stale with devtools protocol WebAudio.getRealtimeData.` + `"${WebAudioRealtimeDataReason.toString(reason)}"`, ); } return EMPTY; }), ), ), ); } function removeAll(array: T[], fn: (value: T) => boolean) { if (array) { let index = array.findIndex(fn); while (index >= 0) { array.splice(index, 1); index = array.findIndex(fn); } } } /** * Collect WebAudio debugger events into per context graphs. */ export function integrateWebAudioGraph( webAudioRealtimeData: WebAudioRealtimeData, ): OperatorFunction { const helpers = {realtimeData: webAudioRealtimeData}; const contexts: MutableContexts = {}; return pipe( mergeMap(({method, params}) => { if (EVENT_HANDLERS[method]) { const result = EVENT_HANDLERS[method]?.( helpers, contexts, params as any, ); if (typeof result !== 'object' || result === null) return EMPTY; if (isObservable(result)) { return result; } return of(result); } return EMPTY; }), ); } ================================================ FILE: src/devtools/WebAudioRealtimeData.ts ================================================ import Protocol from 'devtools-protocol'; import {bindCallback, concatMap, interval, Observable} from 'rxjs'; import {map, timeout} from 'rxjs/operators'; import {invariant} from '../utils/error'; import {chrome} from '../chrome'; import {WebAudioDebuggerMethod} from '../chrome/DebuggerWebAudioDomain'; import {Audion} from './Types'; import {bindChromeCallback} from '../utils/rxChrome'; /** * Error messages returned by WebAudio.getRealtimeData devtool protocol method. */ export enum RealtimeDataErrorMessage { /** Error returned when a AudioContext cannot be find. */ CANNOT_FIND = 'Cannot find BaseAudioContext with such id.', /** Error returned when realtime data is requested from an OfflineAudioContext. */ REALTIME_ONLY = 'ContextRealtimeData is only avaliable for an AudioContext.', } interface RealtimeDataReason { message: Message; } const {tabId} = chrome.devtools.inspectedWindow; const sendCommand = bindChromeCallback< [{tabId: string}, WebAudioDebuggerMethod.getRealtimeData, any?], [{realtimeData: Protocol.WebAudio.ContextRealtimeData}] >(chrome.debugger.sendCommand, chrome.debugger); export const INITIAL_CONTEXT_REALTIME_DATA = { callbackIntervalMean: 0, callbackIntervalVariance: 0, currentTime: 0, renderCapacity: 0, } as Audion.ContextRealtimeData; export class WebAudioRealtimeData { private readonly intervalMS = 1000; private readonly timeoutMS = 500; private readonly interval$ = interval(this.intervalMS); pollContext(contextId: string) { return this.interval$.pipe( concatMap(() => sendCommand({tabId}, WebAudioDebuggerMethod.getRealtimeData, { contextId, }).pipe( timeout({first: this.timeoutMS}), map((result) => { invariant( result && result !== null, 'ContextRealtimeData not returned for WebAudio context %0.', contextId, ); return result.realtimeData; }), ), ), ); } } export const WebAudioRealtimeDataReason = { parseReason(reason: any) { if (reason && reason.message && !reason.code) { try { reason = JSON.parse(reason.message); } catch (e) {} } return reason; }, toString(reason: any) { return reason && reason.message ? reason.message : reason; }, isRealtimeOnlyReason( reason: any, ): reason is RealtimeDataReason { return reason && reason.message === RealtimeDataErrorMessage.REALTIME_ONLY; }, isCannotFindReason( reason: any, ): reason is RealtimeDataReason { return reason && reason.message === RealtimeDataErrorMessage.CANNOT_FIND; }, }; ================================================ FILE: src/devtools/deserializeGraphContext.ts ================================================ import * as graphlib from 'graphlib'; import {Audion} from './Types'; export interface SerializedGraphContext extends Audion.GraphContext { graph: any; } export function deserializeGraphContext( graphContext: SerializedGraphContext, ): Audion.GraphContext { if (graphContext.graph) { return { ...graphContext, // TODO: dagre's graphlib typings are inaccurate, which is why we use // graphlib directly here. Revert to dagre's types once the issue is fixed: // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/47439 graph: graphlib.json.read(graphContext.graph), }; } else { return graphContext; } } ================================================ FILE: src/devtools/layoutGraphContext.ts ================================================ import * as dagre from 'dagre'; import {Audion} from './Types'; export function layoutGraphContext( context: Audion.GraphContext, ): Audion.GraphContext { if (context.context && context.graph) { // TODO: dagre's graphlib typings are inaccurate, which is why we use // graphlib's types. Revert to dagre's types once the issue is fixed: // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/47439 dagre.layout(context.graph as unknown as dagre.graphlib.Graph); } return context; } ================================================ FILE: src/devtools/main.ts ================================================ import {merge} from 'rxjs'; import { map, scan, take, shareReplay, share, mergeMap, auditTime, } from 'rxjs/operators'; import {Audion} from './Types'; import {DebuggerAttachEventController} from './DebuggerAttachEventController'; import {DevtoolsGraphPanel} from './DevtoolsGraphPanel'; import {serializeGraphContext} from './serializeGraphContext'; import {integrateWebAudioGraph} from './WebAudioGraphIntegrator'; import {WebAudioRealtimeData} from './WebAudioRealtimeData'; import {partitionMap} from './partitionMap'; import {DebuggerEventsObservable} from './DebuggerEvents'; const attachController = new DebuggerAttachEventController(); const pageEvent$ = new DebuggerEventsObservable(attachController, { domain: 'page', }); const webAudioEvents$ = new DebuggerEventsObservable(attachController, { domain: 'webAudio', }); const webAudioRealtimeData = new WebAudioRealtimeData(); const serializedGraphContext$ = merge( pageEvent$, webAudioEvents$, attachController.debuggerEvent$, ).pipe( integrateWebAudioGraph(webAudioRealtimeData), // Split graph contexts into an observable for each unique graph context id. partitionMap({ getPartitionId: ({id}) => id, isPartitionComplete: ({context}) => context === null, }), // For each partition, start a timer on the first value in that partition but // emit the last value during that timer when the timer completes. map(auditTime(16)), // Merge all the partitions together. mergeMap((source) => source), map(serializeGraphContext), share(), ); const allGraphs$ = merge(serializedGraphContext$).pipe( // Persistently observe web audio events and integrate events into context // objects. Collect those into an object of all current graphs. scan( (allGraphs, graphContext) => { if (graphContext.graph) { return {...allGraphs, [graphContext.id]: graphContext}; } const {[graphContext.id]: _, ...otherGraphs} = allGraphs; return otherGraphs; }, {}, ), shareReplay(), ); // There must be at least one subscription to keep allGraphs$ up to date if // panel is connected or otherwise. allGraphs$.subscribe(); // When the panel is opened it'll connect to the devtools page, immediately send // the current set of graphs. const panel = new DevtoolsGraphPanel( merge( allGraphs$.pipe( map((allGraphs) => ({allGraphs})), take(1), ), serializedGraphContext$.pipe(map((graphContext) => ({graphContext}))), ), ); // When the panel is first shown, grant attachController permission to attach to // the debugger. panel.onPanelShown$.pipe(take(1)).subscribe({ next() { attachController.permission$.grantTemporary(); }, }); // Respond to requests from the panel accordingly. panel.requests$.subscribe({ next(value) { if (value.type === Audion.DevtoolsRequestType.COLLECT_GARBAGE) { attachController.sendCommand('HeapProfiler.collectGarbage').subscribe(); } }, }); ================================================ FILE: src/devtools/partitionMap.ts ================================================ import {Observable, OperatorFunction, Subject} from 'rxjs'; interface PartitionMapConfig { /** Callback that returns id string of partition to push to. */ getPartitionId: (value: V) => string; /** Callback that determines if as of that value the partition is complete. */ isPartitionComplete: (value: V) => boolean; } /** * Split input observable's values into an observable of observables of those values. * * @param config when to create partition observables and complete them * @returns an observable that pushs an observable for each created partition */ export function partitionMap({ getPartitionId, isPartitionComplete, }: PartitionMapConfig): OperatorFunction> { return (source: Observable) => { const partitions = {} as {[key: string]: Subject}; return new Observable>((subscriber) => { return source.subscribe({ next(graphChange) { const key = getPartitionId(graphChange); const isComplete = isPartitionComplete(graphChange); // If the key is not in the partition cache, add a new one for that // key and push it. if (!(key in partitions)) { partitions[key] = new Subject(); subscriber.next(partitions[key]); } // Push the value through the selected partition. partitions[key].next(graphChange); // When completeSelector returns true, complete the partition and // delete it from the cache. if (isComplete) { partitions[key].complete(); delete partitions[key]; } }, // When source completes, all partitions complete. complete: () => subscriber.complete(), // When source errors, all partitions error. error: (reason) => subscriber.error(reason), }); }); }; } ================================================ FILE: src/devtools/serializeGraphContext.js ================================================ import dagre from 'dagre'; /** * @param {Audion.GraphContext} graphContext * @return {Audion.GraphContext} */ export function serializeGraphContext(graphContext) { if (graphContext.graph) { return { ...graphContext, graph: dagre.graphlib.json.write(graphContext.graph), }; } return graphContext; } ================================================ FILE: src/devtools/setOptionsToGraphContext.ts ================================================ import * as dagre from 'dagre'; import {Audion} from './Types'; export function setOptionsToGraphContext([context, layoutOptions]: [ Audion.GraphContext, dagre.GraphLabel, ]): Audion.GraphContext { if (context.context && context.graph) { context.graph.setGraph(layoutOptions); } return context; } ================================================ FILE: src/devtools.html ================================================ DevTools: Audion Extension ================================================ FILE: src/extraSettingPage/options.html ================================================ Audion Addition Setting Options ================================================ FILE: src/extraSettingPage/options.js ================================================ // prettier-ignore /** * Initializes the options page by setting up event listeners and * restoring saved options. */ document.addEventListener( 'DOMContentLoaded', /** * Handles the DOMContentLoaded event to set up the options page. */ function() { /** * Function to save the options in storage */ function saveOptions() { const checkboxValue = document.getElementById('showDebugInfo').checked; localStorage.setItem('showExtraDebugLog', checkboxValue); } /** * Function to restore the options from storage */ function restoreOptions() { document.getElementById('showDebugInfo').checked = localStorage.getItem('showExtraDebugLog') === 'true'; } /** * Event listeners */ document .getElementById('showDebugInfo') .addEventListener('change', saveOptions); restoreOptions(); }, ); ================================================ FILE: src/panel/GraphSelector.ts ================================================ import {Observable, combineLatest, BehaviorSubject} from 'rxjs'; import {map, shareReplay, distinctUntilChanged} from 'rxjs/operators'; import {Audion} from '../devtools/Types'; type GraphMap = {[key: string]: Audion.GraphContext}; type GraphMapRX = Observable; const EMPTY_GRAPH = { graph: {value: {width: 0, height: 0}, nodes: [], edges: []}, } as Audion.GraphContext; /** * Control which graph is observed. */ export class GraphSelector { options$: Observable; graphId$: Observable; graph$: Observable; private _graphIdSubject: BehaviorSubject; get graphId(): string { return this._graphIdSubject.value; } /** * Create a GraphSelector. * @param options */ constructor({allGraphs$: allGraphs$}: {allGraphs$: GraphMapRX}) { this.options$ = allGraphs$.pipe( map((allGraphs) => Object.entries(allGraphs) .filter(([key, graphContext]) => graphContext) .map(([key]) => key), ), ); const graphIdSubject = new BehaviorSubject(''); this._graphIdSubject = graphIdSubject; this.graphId$ = graphIdSubject; const props$ = combineLatest({ id: this.graphId$, allGraphs: allGraphs$, }); this.graph$ = props$.pipe( map(({id, allGraphs}) => allGraphs[id] || EMPTY_GRAPH), distinctUntilChanged(), shareReplay(1), ); } /** * Select the graph to observe. * @param graphId */ select(graphId: string) { if (graphId !== this.graphId) { this._graphIdSubject.next(graphId); } } } ================================================ FILE: src/panel/Observer.runtime.ts ================================================ import {Observable} from 'rxjs'; import {share} from 'rxjs/operators'; import {chrome} from '../chrome'; /** * Connect to chrome runtime through an observable. * @param requests$ observable of requests to send to devtools extension context * @returns observable of messages recevied from devtools extension context */ export function connect(requests$: Observable): Observable { return new Observable((subscriber) => { const port = chrome.runtime.connect(); // Send values pushed by requests$ to devtools context. const subjectSubscription = requests$.subscribe({ next(value) { port.postMessage(value); }, }); // Publish messages from devtools context through returned observable. const onMessage: (arg0: any, arg1: Chrome.RuntimePort) => void = ( message, ) => { subscriber.next(message); }; const onDisconnect = () => subscriber.error(new Error('chrome.runtime disconnected')); port.onMessage.addListener(onMessage); port.onDisconnect.addListener(onDisconnect); return () => { subjectSubscription.unsubscribe(); port.onMessage.removeListener(onMessage); port.onDisconnect.removeListener(onDisconnect); port.disconnect(); }; }).pipe(share()); } ================================================ FILE: src/panel/Types.ts ================================================ import * as PIXI from 'pixi.js'; /** @namespace AudionPanel */ /** * @typedef AudionPanel.Point * @property {number} x * @property {number} y */ /** * @typedef AudionPanel.Node * @property {AudionPanel.Point} position * @property {AudionPanel.Point} size */ /** * @typedef AudionPanel.Port * @property {AudionPanel.Node} node * @property {AudionPanel.Point} offset * @property {number} radius * @property {Array} edges */ export namespace AudionPanel { export interface Point { x: number; y: number; } export interface Node { position: Point; size: Point; updatePortDisplay(portType: PortType, portIndex: number): void; } export enum PortType { INPUT = 'input', OUTPUT = 'output', PARAM = 'param', } export interface Port { node: Node; offset: Point; radius: number; edges: any[]; updateNodeDisplay(): void; drawConnect(graphics: PIXI.Graphics): void; } } ================================================ FILE: src/panel/components/WholeGraphButton.css ================================================ .wholeGraphButton { position: absolute; top: 5px; left: 5px; cursor: pointer; opacity: 0.8; border-radius: 3px; width: 20px; height: 20px; } ================================================ FILE: src/panel/components/WholeGraphButton.ts ================================================ import {fromEvent} from 'rxjs'; import style from './WholeGraphButton.css'; import wholeGraphButtonImage from './WholeGraphButton.svg'; /** * Render a button. Can be observed for when the button is clicked. */ export class WholeGraphButton { private readonly view = document.createElement('div'); readonly click$ = fromEvent(this.view, 'click'); /** Create a WholeGraphButton. */ constructor() { this.view.className = style.wholeGraphButton; this.view.innerHTML = `Resize to fit`; } /** Render the button. */ render() { return this.view; } } ================================================ FILE: src/panel/components/collectGarbage.css ================================================ :global(.-theme-with-dark-background) .collectIcon { --override-icon-mask-background-color: rgb(145 145 145); } .collectIcon { display: inline-block; -webkit-mask: url('./collectGarbage.svg') no-repeat center; mask: url('./collectGarbage.svg') no-repeat center; width: 28px; height: 24px; background-color: var(--override-icon-mask-background-color); --override-icon-mask-background-color: rgb(110 110 110); } :global(.toolbar-button):hover .collectIcon { background-color: var(--color-text-primary); } ================================================ FILE: src/panel/components/collectGarbage.ts ================================================ import {fromEvent, merge, NEVER, Observable} from 'rxjs'; import {map, startWith, switchMap} from 'rxjs/operators'; import {Audion} from '../../devtools/Types'; import {setElementHTML} from './domUtils'; import style from './collectGarbage.css'; /** * @returns html representation of the collect garbage icon */ function collectGarbageImageHTML(): string { return ``; } /** * @param buttonElement$ observable of html elements to listen to events and * render a icon in * @returns observable of elements when they are modified or actions to be acted * on by the extension's devtools context */ export function renderCollectGarbage( buttonElement$: Observable, ): Observable { // Map clicks to actions to request devtools to collect garbage. const collectGarbageAction$ = buttonElement$.pipe( switchMap((element) => fromEvent(element, 'click')), map( () => ({type: 'collectGarbage'} as Audion.DevtoolsCollectGarbageRequest), ), ); // Observable that pushs the button icon once and never completes. If the // observable completes, setElementHTML will clean up and remove the html. const collectGarbageIcon$ = NEVER.pipe(startWith(collectGarbageImageHTML())); return merge( setElementHTML(buttonElement$, collectGarbageIcon$), collectGarbageAction$, ); } ================================================ FILE: src/panel/components/detailPanel.css ================================================ .detailPanel > * { padding: 0 1rem; } .detailPanel h1, .detailPanel h2, .detailPanel h3, .detailPanel h4, .detailPanel h5, .detailPanel h6 { font-weight: normal; } .detailPanel table { font-size: 12px; } .detailPanel th { color: var(--color-text-secondary); font-weight: normal; text-align: left; } .detailPanel th, .detailPanel td { padding: 0.2rem; } ================================================ FILE: src/panel/components/detailPanel.ts ================================================ import {merge, NEVER, Observable} from 'rxjs'; import {distinctUntilChanged, map, startWith, switchMap} from 'rxjs/operators'; import {Audion} from '../../devtools/Types'; import {setElementHTML, toggleElementClassList} from './domUtils'; import style from './detailPanel.css'; const contextTypeNameMap = { realtime: 'AudioContext', offline: 'OfflineAudioContext', }; /** * @param context web audio context's context information * @returns html representation of context information */ function graphContextHTML({ contextType, contextId, contextState, sampleRate, callbackBufferSize, maxOutputChannelCount, }: Audion.GraphContext['context']): string { return `

${contextTypeNameMap[contextType] || contextType}

${contextId.slice(-6)}


State${contextState}
Sample Rate${sampleRate}
Callback Buffer Size${callbackBufferSize}
Max Output Channels${maxOutputChannelCount}
`; } /** * @param node web audio node's node information * @returns html representation of web audio node information */ function graphNodeBaseHTML({ nodeType, nodeId, channelCount, channelCountMode, channelInterpretation, numberOfInputs, numberOfOutputs, }: Audion.GraphNode['node']): string { return `

${nodeType}

${nodeId}


Channel Count${channelCount}
Channel Count Mode${channelCountMode}
Channel Interpretation${channelInterpretation}
Number of Inputs${numberOfInputs}
Number of Outputs${numberOfOutputs}
`; } /** * @param param web audio node's single parameter information * @returns html representation of parameter information */ function graphParamHTML({ paramType, paramId, rate, defaultValue, minValue, maxValue, }: Audion.GraphNode['params'][number]): string { return `

${paramType}

${paramId}


Automation Rate${rate}
Default Value${defaultValue}
Minimum Value${minValue}
Maximum Value${maxValue}
`; } /** * @param node web audio node * @returns html representation of a node's node and parameters information */ function graphNodeHTML({node, params}: Audion.GraphNode): string { return `${graphNodeBaseHTML(node)} ${ params.length ? `

Parameters:

${params.map(graphParamHTML).join('')}` : '' } `; } /** * @param element$ observable of html element to render detail panel into * @param contextData$ observable of context data to render * @param nodeData$ observable of node data to render * @returns observable of html elements as they are modified */ export function renderDetailPanel( element$: Observable, contextData$: Observable, nodeData$: Observable, ): Observable { return merge( toggleElementClassList( element$, NEVER.pipe(startWith([style.detailPanel])), ), setElementHTML( element$, contextData$.pipe( distinctUntilChanged((previous, current) => previous && previous.context && current && current.context ? previous.context.contextId === current.context.contextId : false, ), switchMap((graphContext) => nodeData$.pipe( distinctUntilChanged((previous, current) => previous && previous.node && current && current.node ? previous.node.nodeId === current.node.nodeId : false, ), map((graphNode) => graphNode && graphNode.node ? graphNodeHTML(graphNode) : graphContext && graphContext.context ? graphContextHTML(graphContext.context) : '(no recordings)', ), ), ), ), ), ); } ================================================ FILE: src/panel/components/domUtils.ts ================================================ import {defer, Observable, of} from 'rxjs'; import {finalize, map, scan, switchMap} from 'rxjs/operators'; /** * Create a factory that modifies the most latest element from an observable of elements to value from an observable of other values. * @param property html element property * @returns factory that modifies a latest element with the latest data */ export function setElementProperty< E extends HTMLElement, K extends keyof E, T extends E[K], >(property: K) { return function (element$: Observable, data$: Observable) { return element$.pipe( switchMap((view) => data$.pipe( map((value) => { if (view) { view[property] = value; } return view; }), finalize(() => { if (view) { view[property] = null; } }), ), ), ); }; } /** * Set that values can be added to and removed from. */ interface PropertySet { add(value: T): any; remove(value: T): any; } /** * Description of a change to a PropertySet. */ interface PropertySetChange { /** Items to remove from the PropertySet. */ deleteItems: string[]; /** Items to add to the PropertySet. */ addItems: string[]; /** All items to remove if the element changes or finalizes. */ allItems: string[]; } /** * Create a factory that adds and removes the items contained in a observable of * array values to the latest element. * @param property html element property * @returns factory that adds and removes items on an elements property */ export function toggleElementPropertySet< E extends HTMLElement, K extends { [key in keyof E]: E[key] extends PropertySet ? key : never; }[any], T extends string[], >(property: K) { return function (element$: Observable, data$: Observable) { const valueDiff$ = data$.pipe( scan( ([previous], current) => { const allItems = current; const deleteItems = previous.filter( (value) => !current.includes(value), ); const addItems = allItems.filter( (value) => !previous.includes(value), ); return [current, {deleteItems, addItems, allItems}] as [ T, PropertySetChange, ]; }, [[], {deleteItems: [], addItems: []}] as [T, PropertySetChange], ), map(([, change]) => change), ); return element$.pipe( switchMap((view) => valueDiff$.pipe( map((diff) => { if (view) { for (const value of diff.deleteItems) { (view[property] as PropertySet).remove(value); } for (const value of diff.addItems) { (view[property] as PropertySet).add(value); } } return view; }), finalize(() => {}), ), ), ); }; } /** * Change to a html element property's map structure. */ interface PropertyMapChange { /** Keys to remove from the property's map. */ deleteKeys: string[]; /** Keys to change to a given value. */ setKeys: [string, any][]; /** All keys. Used to remove all keys when the element changes or finalizes. */ allKeys: string[]; } export function assignElementProperty< E extends HTMLElement, K extends keyof E, T extends {[key in keyof E[K]]?: E[K][key]}, >(property: K) { return function (element$: Observable, data$: Observable) { const valueDiff$ = data$.pipe( scan( ([previous], current) => { const allKeys = Object.keys(current); const deleteKeys = Object.keys(previous).filter( (key) => !(key in current), ); const setKeys = allKeys .filter((key) => current[key] !== previous[key]) .map((key) => [key, current[key]]); return [current, {deleteKeys, setKeys, allKeys}] as [ T, PropertyMapChange, ]; }, [{}, {deleteKeys: [], setKeys: []}] as [T, PropertyMapChange], ), map(([, change]) => change), ); return element$.pipe( switchMap((view) => { let finalizeKeys = []; return valueDiff$.pipe( map((diff) => { if (view) { for (const key of diff.deleteKeys) { view[property][key] = undefined; } for (const [key, value] of diff.setKeys) { view[property][key] = value; } finalizeKeys = diff.allKeys; } return view; }), finalize(() => { if (view) { for (const key of finalizeKeys) { view[property][key] = undefined; } } }), ); }), ); }; } /** * Set latest element's innerText property to latest data string value. */ export const setElementText = setElementProperty('innerText'); /** * Set latest element's innerHTML property to latest data string value. */ export const setElementHTML = setElementProperty('innerHTML'); /** * Set latest element's className property to latest data string value. */ export const setElementClassName = setElementProperty('className'); /** * Add and remove latest data string array to latest element's classList set * property. */ export const toggleElementClassList = toggleElementPropertySet('classList'); /** * Set and delete changes keys of latest data object to latest element's style * object map property. */ export const assignElementStyle = assignElementProperty('style'); /** * @param query css query selector to find an element for * @param dom document to query * @returns observable of a html element matching the query */ export function querySelector( query: string, dom: {querySelector(...args: any): any} = document, ): Observable { return defer(() => of(dom.querySelector(query))); } ================================================ FILE: src/panel/components/realtimeSummary.ts ================================================ import {map, Observable} from 'rxjs'; import {Audion} from '../../devtools/Types'; import {setElementHTML} from './domUtils'; /** * Format web audio context performance data in html. * @param realtimeData realtime performance data for a web audio context * @returns rendered html summary of performance data */ export function realtimeSummaryHTML(realtimeData: Audion.ContextRealtimeData) { if (!realtimeData) return ''; const currentTime = realtimeData.currentTime.toFixed(3); const callbackIntervalMean = ( realtimeData.callbackIntervalMean * 1000 ).toFixed(3); const callbackIntervalVariance = ( Math.sqrt(realtimeData.callbackIntervalVariance) * 1000 ).toFixed(3); const renderCapacity = (realtimeData.renderCapacity * 100).toFixed(3); return realtimeData ? `Current Time: ${currentTime} s    Callback Interval: μ = ${callbackIntervalMean} ms σ = ${callbackIntervalVariance} ms    Render Capacity: ${renderCapacity} %` : ''; } /** * Render a summary of web audio context performance. * @param element$ current html element to render summary into * @param data$ current performance data * @returns an element pushed to renderRealtimeSummary after its content is modified */ export function renderRealtimeSummary( element$: Observable, data$: Observable, ) { const realtimeHTML$ = data$.pipe(map(realtimeSummaryHTML)); return setElementHTML(element$, realtimeHTML$); } ================================================ FILE: src/panel/components/selectGraph.css ================================================ .dropdownOption { display: flex; height: 2rem; align-items: center; cursor: pointer; padding: 0 0.2rem; } .dropdownOption:hover, .dropdownButtonActive { background: var(--color-background-elevation-2); } ================================================ FILE: src/panel/components/selectGraph.ts ================================================ import { BehaviorSubject, combineLatest, fromEvent, merge, Observable, of, } from 'rxjs'; import { distinctUntilChanged, filter, map, switchMap, tap, } from 'rxjs/operators'; import {Audion} from '../../devtools/Types'; import { assignElementStyle, setElementClassName, setElementHTML, setElementText, toggleElementClassList, } from './domUtils'; import style from './selectGraph.css'; /** * Title of the dropdown toggle button when no graphs are selected or available * to select. */ const NO_GRAPHS_AVAILABLE_TITLE = '(no recordings)'; /** * Render title for an audio graph with only the graphId. * @param graphId unique graph identifier * @returns rendered graph title */ function graphIdTitle(graphId: string) { return `unknown (${graphId.slice(-6)})`; } /** * Render title for an audio graph. * @param graph * @returns rendered graph title */ function graphTitle(graph: Audion.GraphContext) { return `${graph.context.contextType} (${graph.id.slice(-6)})`; } /** * Create a map of graph IDs to rendered graph titles. * @param allGraphs map of graph IDs to graph contexts * @returns map of graph IDs to rendered graph titles */ function graphTitles(allGraphs: Audion.GraphContextsById): { [key: string]: string; } { return Object.entries(allGraphs) .map(([id, graph]) => [id, graphTitle(graph)]) .reduce((accum, [id, title]) => { accum[id] = title; return accum; }, {} as {[key: string]: string}); } /** * Render current graph title or some copy to indicate no graph is selected or * no graph is available. * @param param currently selected graph ID and ID to title map * @returns rendered button title text */ function buttonTitle([graphId, graphTitles]) { return graphId ? graphTitles[graphId] || graphIdTitle(graphId) : NO_GRAPHS_AVAILABLE_TITLE; } /** * Render html list of graph options to select from. * @param graphTitles graph ID to title map * @returns html list of graph titles to select from */ const dropdownListHTML = function (graphTitles: { [graphId: string]: string; }): string { return Object.entries(graphTitles) .map( ([graphId, title]) => `
${title}
`, ) .join(''); }; /** * Test if two maps of graph titles are equivalent. * * Used to reduce further processing of graph title information like updating * the dom with new html for the new set of titles. * * @param previousTitles map of graph titles * @param currentTitles map of graph titles * @returns true if maps match */ function equalTitles( previousTitles: {[graphId: string]: string}, currentTitles: {[graphId: string]: string}, ) { const previousEntries = Object.entries(previousTitles); const currentEntries = Object.entries(currentTitles); return ( previousEntries.length === currentEntries.length && previousEntries.every(([previousKey, previousValue], index) => { const [currentKey, currentValue] = currentEntries[index]; return previousKey === currentKey && previousValue === currentValue; }) ); } /** * Render a widget displaying the current selected graph title. When clicked * show a list of currently available graphs to select from. * * @param titleElement$ current html element to render dropdown button * title into * @param dropdownListElement$ current html element to render dropdown * list into * @param buttonElement$ current html element that when clicked opens the dropdown * @param graphId$ currently selected graph id * @param allGraphs$ current map of graph ids to graph contexts * @returns an element pushed to renderSelectGraph after its content is modified */ export function renderSelectGraph( titleElement$: Observable, dropdownListElement$: Observable, buttonElement$: Observable, graphId$: Observable, allGraphs$: Observable, ) { const distinctGraphId$ = graphId$.pipe(distinctUntilChanged()); const graphTitles$ = allGraphs$.pipe( map(graphTitles), distinctUntilChanged(equalTitles), ); const graphIdAndTitles$ = combineLatest([distinctGraphId$, graphTitles$]); const dropdownVisible$ = new BehaviorSubject(false); const body$ = of(document.body); const bodyClick$ = body$.pipe( switchMap((element) => fromEvent(element, 'click')), ); const openDropdownAction$ = buttonElement$.pipe( switchMap((element) => fromEvent(element, 'click')), tap(() => dropdownVisible$.next(!dropdownVisible$.value)), filter(() => false), map(() => {}), ); const closeDropdownAction$ = combineLatest([ buttonElement$, dropdownListElement$, ]).pipe( switchMap(([buttonElement, dropdownElement]) => bodyClick$.pipe( filter( (ev) => ev.target instanceof Element && !( buttonElement.contains(ev.target) || dropdownElement.contains(ev.target) ), ), ), ), tap(() => dropdownVisible$.next(false)), filter(() => false), map(() => {}), ); const eventAction$ = merge(openDropdownAction$, closeDropdownAction$); const titleText$ = graphIdAndTitles$.pipe(map(buttonTitle)); const buttonClassName$ = dropdownVisible$.pipe( map((visible) => (visible ? [style.dropdownButtonActive] : [])), ); const dropdownListHTML$ = graphTitles$.pipe(map(dropdownListHTML)); const dropdownListIdSelected$ = dropdownListElement$.pipe( switchMap((element) => fromEvent(element, 'click')), map((clickEvent) => { let {target} = clickEvent; if (target instanceof HTMLElement) { const optionElement = target.closest('[data-option]'); if (optionElement instanceof HTMLElement) { const graphId = optionElement.dataset['option']; if (graphId) { return {type: 'selectGraph', graphId}; } } } }), filter(Boolean), tap(() => dropdownVisible$.next(false)), ); const dropdownClassName$ = dropdownVisible$.pipe( map( (visible) => `web-audio-select-graph-dropdown ${visible ? '' : 'hidden'}`, ), ); const dropdownPositionStyle$ = buttonElement$.pipe( switchMap((buttonElement) => dropdownVisible$.pipe( map((visible) => { const rect = buttonElement.getBoundingClientRect(); return visible ? { top: `${rect.bottom}px`, left: `${rect.left}px`, } : {}; }), ), ), ); return merge( setElementText(titleElement$, titleText$), toggleElementClassList(buttonElement$, buttonClassName$), setElementHTML(dropdownListElement$, dropdownListHTML$), setElementClassName(dropdownListElement$, dropdownClassName$), assignElementStyle(dropdownListElement$, dropdownPositionStyle$), dropdownListIdSelected$, eventAction$, ); } ================================================ FILE: src/panel/graph/AudioEdgeArrowGraphics.ts ================================================ import * as PIXI from 'pixi.js'; import {GraphColor} from './graphStyle'; const ARROW_LENGTH = 16; const ARROW_HEIGHT = 8; const ARROW_ANGLE_ROUNDING = 32; export class EdgeArrowGraphics { geometryCache = new Array(ARROW_ANGLE_ROUNDING * 2 + 1).fill(null); drawFromPoint( pointOnLine: PIXI.Point, end: PIXI.Point, graphics: PIXI.Graphics, ) { const arrowMagnitude = Math.hypot( pointOnLine.y - end.y, pointOnLine.x - end.x, ); const arrowUnitX = (pointOnLine.x - end.x) / arrowMagnitude; const arrowUnitY = (pointOnLine.y - end.y) / arrowMagnitude; this.drawFromUnit(arrowUnitX, arrowUnitY, end, graphics); } drawFromUnit( arrowUnitX: number, arrowUnitY: number, end: PIXI.Point, graphics: PIXI.Graphics, ) { graphics.beginFill(GraphColor.INPUT_OUTPUT); graphics.drawPolygon([ new PIXI.Point( end.x + arrowUnitX * ARROW_LENGTH + arrowUnitY * ARROW_HEIGHT, end.y + arrowUnitY * ARROW_LENGTH - arrowUnitX * ARROW_HEIGHT, ), new PIXI.Point( end.x + arrowUnitX * ARROW_LENGTH - arrowUnitY * ARROW_HEIGHT, end.y + arrowUnitY * ARROW_LENGTH + arrowUnitX * ARROW_HEIGHT, ), new PIXI.Point(end.x, end.y), ]); graphics.endFill(); } getGeometry(pointOnLine: PIXI.Point, end: PIXI.Point) { const magnitude = Math.hypot(pointOnLine.x - end.x, pointOnLine.y - end.y); const unitX = (pointOnLine.x - end.x) / magnitude; const unitY = (pointOnLine.y - end.y) / magnitude; const angle = Math.atan2(unitY, unitX); const angleSliceIndex = Math.round( (angle / Math.PI) * ARROW_ANGLE_ROUNDING, ); const cacheIndex = angleSliceIndex + ARROW_ANGLE_ROUNDING; if (this.geometryCache[cacheIndex] === null) { const graphics = new PIXI.Graphics(); const angleRounded = (angleSliceIndex / ARROW_ANGLE_ROUNDING) * Math.PI; this.drawFromUnit( Math.cos(angleRounded), Math.sin(angleRounded), new PIXI.Point(Math.cos(angleRounded) * 4, Math.sin(angleRounded) * 4), graphics, ); this.geometryCache[cacheIndex] = graphics.geometry; } return this.geometryCache[cacheIndex]; } createGraphics(pointOnLine: PIXI.Point, end: PIXI.Point) { const graphics = new PIXI.Graphics(this.getGeometry(pointOnLine, end)); graphics.position.set(end.x, end.y); return graphics; } } ================================================ FILE: src/panel/graph/AudioEdgeCurvedLineGraphics.ts ================================================ import * as PIXI from 'pixi.js'; import {AudionPanel} from '../Types'; import {GraphColor} from './graphStyle'; const STEP_RATIO = 1 / 10; const LINE_COEFF = createLineCoefficients(); interface LineCoefficients { ax: number; ay: number; bx: number; by: number; cx: number; cy: number; dx: number; dy: number; } export class EdgeCurvedLineGraphics { geometryCache: PIXI.GraphicsGeometry[][] = []; getGeometry(a: PIXI.Point, d: PIXI.Point) { const i = Math.floor(Math.abs(d.x - a.x)); const j = Math.floor(Math.abs(d.y - a.y)); if (i > 100 || j > 100) { const graphics = new PIXI.Graphics(); this.drawCurvedLine( new PIXI.Point(), new PIXI.Point(i / 2, j / 3), new PIXI.Point(i / 2, (j * 2) / 3), new PIXI.Point(i, j), graphics, new PIXI.Point(), ); return graphics.geometry; } if (!this.geometryCache[i]) { this.geometryCache[i] = []; } if (!this.geometryCache[i][j]) { const b0 = new PIXI.Point(i / 2, j / 3); const c0 = new PIXI.Point(i / 2, (j * 2) / 3); const d0 = new PIXI.Point(i, j); const graphics = new PIXI.Graphics(); this.drawCurvedLine( new PIXI.Point(), b0, c0, d0, graphics, new PIXI.Point(), ); this.geometryCache[i][j] = graphics.geometry; } return this.geometryCache[i][j]; } createGraphics(a: PIXI.Point, d: PIXI.Point) { const graphics = new PIXI.Graphics(this.getGeometry(a, d)); graphics.position.set(a.x, a.y); const x = d.x - a.x; const y = d.y - a.y; graphics.scale.set( x === 0 ? 1 : x / Math.abs(x), y === 0 ? 1 : y / Math.abs(y), ); return graphics; } /** * Draw a curved line with 3 points to control its shape. * @param a * @param b * @param c * @param graphics * @param pointOnLine */ drawCurvedLine( a: AudionPanel.Point, b: AudionPanel.Point, c: AudionPanel.Point, d: AudionPanel.Point, graphics: PIXI.Graphics, pointOnLine: AudionPanel.Point, ) { const lineCoeffs = buildLineCoefficients(a, b, c, d, LINE_COEFF); const lineMagnitudeEstimate = Math.hypot(a.y - d.y, a.x - d.x); const steps = Math.max(2, Math.ceil(lineMagnitudeEstimate * STEP_RATIO)); graphics.lineStyle(2, GraphColor.INPUT_OUTPUT); graphics.moveTo(a.x, a.y); for (let i = 1; i < steps; i++) { interpolateCoefficients(lineCoeffs, i / steps, pointOnLine); graphics.lineTo(pointOnLine.x, pointOnLine.y); } graphics.lineStyle(0); graphics.closePath(); } /** * Adjust a point along a line by amount radius. * @param end * @param destination * @param radius */ adjustPoint( end: AudionPanel.Point, destination: AudionPanel.Point, radius: number, ) { const magnitude = Math.hypot(end.y - destination.y, end.x - destination.x); destination.x += ((end.x - destination.x) / magnitude) * radius; destination.y += ((end.y - destination.y) / magnitude) * radius; } } /** * Create a LineCoefficients object. * @return */ function createLineCoefficients(): LineCoefficients { return {ax: 0, ay: 0, bx: 0, by: 0, cx: 0, cy: 0, dx: 0, dy: 0}; } /** * Interpolate a line from 4 points: a, b, c, d. * @param a * @param b * @param c * @param d * @param coeff * @return */ function buildLineCoefficients( a: AudionPanel.Point, b: AudionPanel.Point, c: AudionPanel.Point, d: AudionPanel.Point, coeff = createLineCoefficients(), ): LineCoefficients { const {x: ax, y: ay} = a; const {x: bx, y: by} = b; const {x: cx, y: cy} = c; const {x: dx, y: dy} = d; coeff.ax = dx - 3 * cx + 3 * bx - ax; coeff.ay = dy - 3 * cy + 3 * by - ay; coeff.bx = 3 * cx - 6 * bx + 3 * ax; coeff.by = 3 * cy - 6 * by + 3 * ay; coeff.cx = 3 * bx - 3 * ax; coeff.cy = 3 * by - 3 * ay; coeff.dx = ax; coeff.dy = ay; return coeff; } /** * @param coeff * @param t number between 0 and 1 inclusive * @param destination * @return */ function interpolateCoefficients( coeff: LineCoefficients, t: number, destination: AudionPanel.Point = new PIXI.Point(), ): AudionPanel.Point { const t2 = t * t; const t3 = t2 * t; destination.x = coeff.ax * t3 + coeff.bx * t2 + coeff.cx * t + coeff.dx; destination.y = coeff.ay * t3 + coeff.by * t2 + coeff.cy * t + coeff.dy; return destination; } ================================================ FILE: src/panel/graph/AudioEdgeRender.ts ================================================ import * as PIXI from 'pixi.js'; import type {AudionPanel} from '../Types'; import {EdgeArrowGraphics} from './AudioEdgeArrowGraphics'; import {EdgeCurvedLineGraphics} from './AudioEdgeCurvedLineGraphics'; import {GraphColor} from './graphStyle'; const ARROW_LENGTH = 12; const ARROW_HEIGHT = 4; const STEP_RATIO = 1 / 10; const LINE_COEFF = createLineCoefficients(); export interface AudioEdgeKey { v: string; w: string; name: string; } /** * Render a line between AudionNodes and their inputs, outputs, and parameters. */ export class AudioEdgeRender { key: AudioEdgeKey; source: AudionPanel.Port; destination: AudionPanel.Port; parent: PIXI.Container; graphics: PIXI.Graphics; container: PIXI.Container; /** * @param options */ constructor({ key, source, destination, }: { key: AudioEdgeKey; source: AudionPanel.Port; destination: AudionPanel.Port; }) { this.key = key; this.source = source; this.destination = destination; this.parent = null; this.graphics = new PIXI.Graphics(); this.container = new PIXI.Container(); this.source.edges.push(this); this.destination.edges.push(this); } /** * @param parent */ setPIXIParent(parent: PIXI.Container) { this.parent = parent; parent.addChild(this.container); } /** * Remove the PIXI DisplayObject from the rendered hierarchy. */ remove() { this.container.parent.removeChild(this.container); this.source.edges.splice(this.source.edges.indexOf(this), 1); this.destination.edges.splice(this.destination.edges.indexOf(this), 1); } /** * @param line */ draw( line: AudionPanel.Point[], { edgeArrowGraphics: arrowGraphics, edgeCurvedLineGraphics: curvedLineGraphics, }: { edgeArrowGraphics: EdgeArrowGraphics; edgeCurvedLineGraphics: EdgeCurvedLineGraphics; }, ) { const { offset: start, node: {position: sourcePosition}, } = this.source; const { offset: end, node: {position: destinationPosition}, } = this.destination; const a = new PIXI.Point( sourcePosition.x + start.x, sourcePosition.y + start.y, ); const d = new PIXI.Point( destinationPosition.x + end.x, destinationPosition.y + end.y, ); this.container.removeChildren(); this.container.addChild(arrowGraphics.createGraphics(a, d)); this.container.addChild(curvedLineGraphics.createGraphics(a, d)); } /** * Draw an arrow. * @param pointOnLine * @param end * @param graphics */ drawArrow( pointOnLine: AudionPanel.Point, end: AudionPanel.Point, graphics: PIXI.Graphics, ) { const arrowMagnitude = Math.hypot( pointOnLine.y - end.y, pointOnLine.x - end.x, ); const arrowUnitX = (pointOnLine.x - end.x) / arrowMagnitude; const arrowUnitY = (pointOnLine.y - end.y) / arrowMagnitude; graphics.beginFill(GraphColor.INPUT_OUTPUT); graphics.lineTo( end.x + arrowUnitX * ARROW_LENGTH + arrowUnitY * ARROW_HEIGHT, end.y + arrowUnitY * ARROW_LENGTH - arrowUnitX * ARROW_HEIGHT, ); graphics.lineTo( end.x + arrowUnitX * ARROW_LENGTH - arrowUnitY * ARROW_HEIGHT, end.y + arrowUnitY * ARROW_LENGTH + arrowUnitX * ARROW_HEIGHT, ); graphics.lineTo(end.x, end.y); graphics.endFill(); } /** * Draw a curved line with 3 points to control its shape. * @param a * @param b * @param c * @param graphics * @param pointOnLine */ drawCurvedLine( a: AudionPanel.Point, b: AudionPanel.Point, c: AudionPanel.Point, graphics: PIXI.Graphics, pointOnLine: AudionPanel.Point, ) { const lineCoeffs = lineCoefficients(a, b, c, LINE_COEFF); const lineMagnitudeEstimate = Math.hypot(a.y - c.y, a.x - c.x); const steps = Math.max(2, Math.ceil(lineMagnitudeEstimate * STEP_RATIO)); graphics.lineStyle(2, GraphColor.INPUT_OUTPUT); graphics.moveTo(a.x, a.y); for (let i = 1; i < steps; i++) { interpolateCoefficients(lineCoeffs, i / steps, pointOnLine); graphics.lineTo(pointOnLine.x, pointOnLine.y); } graphics.lineTo(c.x, c.y); } /** * Adjust a point along a line by amount radius. * @param end * @param destination * @param radius */ adjustPoint( end: AudionPanel.Point, destination: AudionPanel.Point, radius: number, ) { const magnitude = Math.hypot(end.y - destination.y, end.x - destination.x); destination.x += ((end.x - destination.x) / magnitude) * radius; destination.y += ((end.y - destination.y) / magnitude) * radius; } } /** * Create a LineCoefficients object. * @return */ function createLineCoefficients(): LineCoefficients { return {ax: 0, ay: 0, bx: 0, by: 0, cx: 0, cy: 0}; } /** * Interpolate a line from 3 points: a, b, c. * @param a * @param b * @param c * @param coeff * @return */ function lineCoefficients( a: AudionPanel.Point, b: AudionPanel.Point, c: AudionPanel.Point, coeff = createLineCoefficients(), ): LineCoefficients { const {x: ax, y: ay} = a; const {x: bx, y: by} = b; const {x: cx, y: cy} = c; const cbx = cx - bx; const bax = bx - ax; const cby = cy - by; const bay = by - ay; coeff.ax = cbx - bax; coeff.ay = cby - bay; coeff.bx = 2 * bax; coeff.by = 2 * bay; coeff.cx = ax; coeff.cy = ay; return coeff; } /** * @param coeff * @param t number between 0 and 1 inclusive * @param destination * @return */ function interpolateCoefficients( coeff: LineCoefficients, t: number, destination: AudionPanel.Point = new PIXI.Point(), ): AudionPanel.Point { destination.x = coeff.ax * t * t + coeff.bx * t + coeff.cx; destination.y = coeff.ay * t * t + coeff.by * t + coeff.cy; return destination; } /** * @typedef LineCoefficients * @property {number} ax * @property {number} ay * @property {number} bx * @property {number} by * @property {number} cx * @property {number} cy */ interface LineCoefficients { ax: number; ay: number; bx: number; by: number; cx: number; cy: number; } ================================================ FILE: src/panel/graph/AudioGraphRender.ts ================================================ /// import * as PIXI from 'pixi.js'; import {BehaviorSubject} from 'rxjs'; import {Audion} from '../../devtools/Types'; import {AudioEdgeKey, AudioEdgeRender} from './AudioEdgeRender'; import {AudioNodeRender} from './AudioNodeRender'; import {Camera} from './Camera'; import {GraphicsCache} from './GraphicsCache'; type AnimationFrameId = ReturnType; /** * Render a graph of nodes and edges. */ export class AudioGraphRender { nodeMap: Map; edgeIdMap: Map>>; edgeMap: Map; camera: Camera; elementContainer: HTMLElement; pixiApplication: PIXI.Application | null; pixiView: HTMLCanvasElement | null; pixiNodeContainer: PIXI.Container | null; pixiEdgeContainer: PIXI.Container | null; renderFrameId: AnimationFrameId | null; graphicsCache: GraphicsCache; selectedNode$: BehaviorSubject; /** * Create an AudioGraphRender. * @param options */ constructor({elementContainer}: {elementContainer: HTMLElement}) { this.nodeMap = new Map(); this.edgeIdMap = new Map(); this.edgeMap = new Map(); this.camera = new Camera(); this.elementContainer = elementContainer; this.pixiView = null; this.pixiApplication = null; this.pixiNodeContainer = null; this.pixiEdgeContainer = null; this.renderFrameId = null; this.graphicsCache = null; this._render = this._render.bind(this); this.selectedNode$ = new BehaviorSubject(null); } /** Initialize. */ init() { const app = (this.pixiApplication = new PIXI.Application( { backgroundColor: 0xffffff, resizeTo: this.elementContainer, antialias: true, autoDensity: true, resolution: window.devicePixelRatio, }, )); this.pixiView = app.view; this.graphicsCache = new GraphicsCache(); const nodeContainer = (this.pixiNodeContainer = new PIXI.Container()); app.stage.addChild(nodeContainer); const edgeContainer = (this.pixiEdgeContainer = new PIXI.Container()); app.stage.addChild(edgeContainer); this.initEvents(); this.camera.viewportObserver.observe((viewport) => { const {x, y, width, height} = this.camera.viewport; app.stage.setTransform(-x / width, -y / height, 1 / width, 1 / height); this.requestRender(); }); } /** Render the graph. */ requestRender() { if (this.renderFrameId === null) { this.renderFrameId = requestAnimationFrame(this._render); } } _render() { this.renderFrameId = null; const {pixiApplication: app} = this; this.camera.setScreenSize(app.screen.width, app.screen.height); app.render(); } /** Stop rendering. */ stop() { cancelAnimationFrame(this.renderFrameId); } /** * @param message */ updateGraphSizes(message: Audion.GraphContext): Audion.GraphContext { if (message.graph) { message.graph.nodes.forEach(({v: nodeId, value: node}) => { if (node) { const nodeRender = this.createNodeRender( nodeId, message.nodes[nodeId], ); node.width = nodeRender.size.x; node.height = nodeRender.size.y; } }); } else { for (const nodeId of this.nodeMap.keys()) { this.destroyNodeRender(nodeId); } for (const edgeId of this.edgeMap.keys()) { this.destroyEdgeRender(edgeId); } } return message; } /** * @param message */ update(message: Audion.GraphContext) { this.camera.setGraphSize( message.graph.value.width, message.graph.value.height, ); const previousNodeRenders = new Set(this.nodeMap.values()); for (let i = 0; i < message.graph.nodes.length; i++) { const nodeKeyValue = message.graph.nodes[i]; const nodeId = nodeKeyValue.v; const node = nodeKeyValue.value; if (node) { const nodeRender = this.createNodeRender(nodeId, message.nodes[nodeId]); nodeRender.container.visible = true; nodeRender.position.set( node.x - nodeRender.size.x / 2, node.y - nodeRender.size.y / 2, ); previousNodeRenders.delete(nodeRender); } else { this.destroyNodeRender(nodeId); } } for (const nodeRender of previousNodeRenders) { this.destroyNodeRender(nodeRender.id); } const previousEdgeRenders = new Set(this.edgeMap.values()); for (let i = 0; i < message.graph.edges.length; i++) { const edgeKeyValue = message.graph.edges[i]; const edge = edgeKeyValue.value; if (edge) { const edgeRender = this.createEdgeRender(edgeKeyValue, message); if (edgeRender) { edgeRender.draw(edge.points, this.graphicsCache); } previousEdgeRenders.delete(edgeRender); } } for (const edgeRender of previousEdgeRenders) { this.destroyEdgeRender(edgeRender.key); } this.requestRender(); } getNodeAtViewportPoint(viewportPoint: {x: number; y: number}) { const screenPoint = new PIXI.Point( viewportPoint.x * this.camera.screen.width, viewportPoint.y * this.camera.screen.height, ); return this.getNodeAtScreenPoint(screenPoint); } getNodeAtScreenPoint(screenPoint: {x: number; y: number}) { for (const nodeRender of this.nodeMap.values()) { if ( nodeRender.container.getBounds().contains(screenPoint.x, screenPoint.y) ) { return nodeRender.node; } } return null; } /** Initialize event handling. */ initEvents() { const {pixiApplication: app} = this; app.stage.eventMode = 'dynamic'; let lastPoint = null; app.stage.addListener('mousemove', (e) => { if (lastPoint && e.buttons) { this.camera.move(lastPoint.x - e.globalX, lastPoint.y - e.globalY); } lastPoint = e.global.clone(); }); app.view.onclick = ({offsetX, offsetY}) => { const {clientWidth, clientHeight} = app.view; const viewportPoint = new PIXI.Point( offsetX / clientWidth, offsetY / clientHeight, ); const lastSelectedNode = this.selectedNode$.value; const selectedNode = this.getNodeAtViewportPoint(viewportPoint); this.nodeMap.get(lastSelectedNode?.node?.nodeId)?.setHighlight(false); this.nodeMap.get(selectedNode?.node?.nodeId)?.setHighlight(true); this.requestRender(); this.selectedNode$.next(selectedNode); }; app.view.onwheel = (e) => { this.camera.zoom( e.clientX - app.view.clientLeft, e.clientY - app.view.clientTop, e.deltaY / 1000, ); }; } /** * Create the rendering for an audio node. * @param nodeId * @param node * @returns */ createNodeRender(nodeId: string, node: Audion.GraphNode): AudioNodeRender { let nodeRender = this.nodeMap.get(nodeId); if (!nodeRender) { if (node.node && node.node.nodeType) { nodeRender = new AudioNodeRender(nodeId).init(node, this.graphicsCache); nodeRender.setPixiParent(this.pixiNodeContainer); this.nodeMap.set(nodeId, nodeRender); } } return nodeRender; } /** * Destroy the rendering for an audio node. * @param nodeId */ destroyNodeRender(nodeId: any) { const nodeRender = this.nodeMap.get(nodeId); if (nodeRender) { nodeRender.remove(); this.nodeMap.delete(nodeId); if (nodeId === this.selectedNode$.value?.node?.nodeId) { this.selectedNode$.next(null); } } } compareEdgeKey(left: AudioEdgeKey, right: AudioEdgeKey) { if (left.v < right.v) { return -1; } else if (left.v > right.v) { return 1; } if (left.w < right.w) { return -1; } else if (left.w > right.w) { return 1; } if (left.name < right.name) { return -1; } else if (left.name > right.name) { return 1; } return 0; } createEdgeId({v, w, name}: Audion.GraphlibEdge) { if (!this.edgeIdMap.has(v)) { this.edgeIdMap.set(v, new Map()); } const edgeIdVMap = this.edgeIdMap.get(v); if (!edgeIdVMap.has(w)) { edgeIdVMap.set(w, new Map()); } const edgeIdVWMap = edgeIdVMap.get(w); if (!edgeIdVWMap.has(name)) { edgeIdVWMap.set(name, {v, w, name}); } return edgeIdVWMap.get(name); } destroyEdgeId(edgeId: AudioEdgeKey) { if (this.edgeIdMap.has(edgeId.v)) { const edgeIdVMap = this.edgeIdMap.get(edgeId.v); if (edgeIdVMap.has(edgeId.w)) { const edgeIdVWMap = edgeIdVMap.get(edgeId.w); if (edgeIdVWMap.has(edgeId.name)) { edgeIdVWMap.delete(edgeId.name); } if (edgeIdVWMap.size === 0) { edgeIdVMap.delete(edgeId.w); } } if (edgeIdVMap.size === 0) { this.edgeIdMap.delete(edgeId.v); } } } /** * @param edge * @param context * @return */ createEdgeRender( edge: Audion.GraphlibEdge, context: Audion.GraphContext, ): AudioEdgeRender { const edgeId = this.createEdgeId(edge); let edgeRender = this.edgeMap.get(edgeId); if (!edgeRender) { const sourceData = context.nodes[edge.v]; const destinationData = context.nodes[edge.w]; if (sourceData && destinationData) { const sourceNode = this.nodeMap.get(sourceData.node.nodeId); const destinationNode = this.nodeMap.get(destinationData.node.nodeId); if (sourceNode && destinationNode) { const {sourceOutputIndex, destinationType} = edge.value; const sourceNodePort = sourceNode.output[sourceOutputIndex]; const destinationNodePort = destinationType === Audion.GraphEdgeType.NODE ? destinationNode.input[edge.value.destinationInputIndex] : destinationNode.param[edge.value.destinationParamIndex]; if (sourceNodePort && destinationNodePort) { edgeRender = new AudioEdgeRender({ key: edgeId, source: sourceNodePort, destination: destinationNodePort, }); edgeRender.setPIXIParent(this.pixiEdgeContainer); edgeRender.source.updateNodeDisplay(); edgeRender.destination.updateNodeDisplay(); this.edgeMap.set(edgeId, edgeRender); } } } } return edgeRender; } /** * @param edgeId */ destroyEdgeRender(edgeId: AudioEdgeKey) { const edgeRender = this.edgeMap.get(edgeId); if (edgeRender) { edgeRender.remove(); edgeRender.source.updateNodeDisplay(); edgeRender.destination.updateNodeDisplay(); this.edgeMap.delete(edgeId); this.destroyEdgeId(edgeId); } } } ================================================ FILE: src/panel/graph/AudioGraphText.ts ================================================ import * as PIXI from 'pixi.js'; export class AudioGraphText { bounds: PIXI.Rectangle; content: string; text: PIXI.Text; textStyle: PIXI.TextStyle; texture: PIXI.Texture; constructor(textStyle: PIXI.TextStyle, content: string) { this.textStyle = textStyle; this.content = content; this.text = new PIXI.Text(content, this.textStyle); this.bounds = this.text.getLocalBounds(new PIXI.Rectangle()); this.texture = this.text.texture; } createSprite() { return new PIXI.Sprite(this.texture); } } ================================================ FILE: src/panel/graph/AudioGraphTextCacheGroup.ts ================================================ import * as PIXI from 'pixi.js'; import {AudioGraphText} from './AudioGraphText'; import {GraphTextStyle} from './graphStyle'; export class AudioGraphTextCache { textStyle: PIXI.TextStyle; cache: Map = new Map(); constructor({textStyle}: {textStyle: PIXI.TextStyle}) { this.textStyle = textStyle; } getText(content: string) { if (!this.cache.has(content)) { const newText = new AudioGraphText(this.textStyle, content); this.cache.set(content, newText); } return this.cache.get(content); } getTextBounds(content: string) { return this.getText(content).bounds; } } export class AudioGraphTextCacheGroup { paramText: AudioGraphTextCache; titleText: AudioGraphTextCache; constructor() { this.paramText = new AudioGraphTextCache({ textStyle: new PIXI.TextStyle(GraphTextStyle.PARAM), }); this.titleText = new AudioGraphTextCache({ textStyle: new PIXI.TextStyle(GraphTextStyle.TITLE), }); } } ================================================ FILE: src/panel/graph/AudioNodeBackground.ts ================================================ import * as PIXI from 'pixi.js'; import {Audion} from '../../devtools/Types'; import {AudionPanel} from '../Types'; import {AudioGraphTextCacheGroup} from './AudioGraphTextCacheGroup'; import {AudioNodePort} from './AudioNodePort'; import { GraphColor, colorFromNodeType, GraphPortStyle, GraphNodeStyle, } from './graphStyle'; export interface AudioNodeBackgroundStyle { isHighlighted: boolean; } export class AudioNodeTextMetrics { title: PIXI.Rectangle = null; param: PIXI.Rectangle[] = []; static from( node: Audion.GraphNode, textCacheGroup: AudioGraphTextCacheGroup, ) { const metrics = new AudioNodeTextMetrics(); metrics.title = textCacheGroup.titleText.getTextBounds(node.node.nodeType); for (let i = 0; i < node.params.length; i++) { metrics.param.push( textCacheGroup.paramText.getTextBounds(node.params[i].paramType), ); } return metrics; } } export class AudioNodeMetrics { nodeType: string; text: AudioNodeTextMetrics; numberOfInputs: number; numberOfOutputs: number; numberOfParams: number; static from( node: Audion.GraphNode, textCacheGroup: AudioGraphTextCacheGroup, ) { const metrics = new AudioNodeMetrics(); metrics.nodeType = node.node.nodeType; metrics.text = AudioNodeTextMetrics.from(node, textCacheGroup); metrics.numberOfInputs = node.node.numberOfInputs; metrics.numberOfOutputs = node.node.numberOfOutputs; metrics.numberOfParams = node.params.length; return metrics; } } export class AudioNodeBackground { metrics: AudioNodeMetrics; input: AudioNodePort[] = []; output: AudioNodePort[] = []; param: AudioNodePort[] = []; size: PIXI.Point = new PIXI.Point(); /** Padding around input ports. */ static get INPUT_GROUP_MARGIN() { return GraphPortStyle.INPUT_GROUP_MARGIN; } /** Height of input output ports. */ static get INPUT_HEIGHT() { return GraphPortStyle.INPUT_HEIGHT; } /** Radius of the visible port icon. */ static get INPUT_RADIUS() { return GraphPortStyle.INPUT_RADIUS; } /** Padding around the group of params. */ static get PARAM_GROUP_MARGIN() { return GraphPortStyle.PARAM_GROUP_MARGIN; } /** Height of audio parameter ports. */ static get PARAM_HEIGHT() { return GraphPortStyle.PARAM_HEIGHT; } /** Radius of visible port icon. */ static get PARAM_RADIUS() { return GraphPortStyle.PARAM_RADIUS; } init(metrics: AudioNodeMetrics) { this.metrics = metrics; const {numberOfInputs, numberOfOutputs, numberOfParams} = metrics; const {input, output, param, size} = this; this._getSize(metrics, size); for (let i = input.length; i < numberOfInputs; i++) { input[i] = new AudioNodePort({ node: null, portType: AudionPanel.PortType.INPUT, portIndex: i, point: new PIXI.Point( 0, AudioNodeBackground.INPUT_GROUP_MARGIN + (i + 0.5) * AudioNodeBackground.INPUT_HEIGHT, ), radius: AudioNodeBackground.INPUT_RADIUS, color: GraphColor.INPUT_OUTPUT, }); } for (let i = output.length; i < numberOfOutputs; i++) { output[i] = new AudioNodePort({ node: null, portType: AudionPanel.PortType.OUTPUT, portIndex: i, point: new PIXI.Point( size.x, AudioNodeBackground.INPUT_GROUP_MARGIN + (i + 0.5) * AudioNodeBackground.INPUT_HEIGHT, ), radius: AudioNodeBackground.INPUT_RADIUS, color: GraphColor.INPUT_OUTPUT, }); } const paramYStart = this._getParamYStart(metrics); for (let i = 0; i < numberOfParams; i++) { param[i] = new AudioNodePort({ node: null, portType: AudionPanel.PortType.PARAM, portIndex: i, point: new PIXI.Point( 0, paramYStart + (i + 0.5) * AudioNodeBackground.PARAM_HEIGHT, ), radius: AudioNodeBackground.PARAM_RADIUS, color: GraphColor.AUDIO_PARAM, }); } } private _getParamYStart({ text: textMetrics, numberOfInputs, }: AudioNodeMetrics) { return Math.max( textMetrics.title.height + GraphNodeStyle.TITLE_PADDING, AudioNodeBackground.INPUT_GROUP_MARGIN + numberOfInputs * AudioNodeBackground.INPUT_HEIGHT + Math.max( AudioNodeBackground.INPUT_GROUP_MARGIN, AudioNodeBackground.PARAM_GROUP_MARGIN, ), ); } private _getSize( { text: textMetrics, numberOfInputs, numberOfOutputs, numberOfParams, }: AudioNodeMetrics, size: PIXI.Point, ) { const maxParamTextSize = new PIXI.Point(); for (let i = 0; i < numberOfParams; i++) { const param = textMetrics.param[i]; maxParamTextSize.x = Math.max(maxParamTextSize.x, param.width); maxParamTextSize.y = Math.max(maxParamTextSize.y, param.height); } size.set( Math.max(textMetrics.title.width, maxParamTextSize.x) + 2 * GraphNodeStyle.PADDING, Math.max( Math.max( textMetrics.title.height + 2 * GraphNodeStyle.TITLE_PADDING, AudioNodeBackground.INPUT_GROUP_MARGIN + AudioNodeBackground.INPUT_HEIGHT * numberOfInputs + Math.max( AudioNodeBackground.INPUT_GROUP_MARGIN, AudioNodeBackground.PARAM_GROUP_MARGIN, ), ) + AudioNodeBackground.PARAM_HEIGHT * numberOfParams + AudioNodeBackground.PARAM_GROUP_MARGIN, AudioNodeBackground.INPUT_GROUP_MARGIN + AudioNodeBackground.INPUT_HEIGHT * numberOfOutputs + AudioNodeBackground.INPUT_GROUP_MARGIN, ), ); } } export class AudioNodeBackgroundRender { background: AudioNodeBackground; style: AudioNodeBackgroundStyle; geometry: PIXI.GraphicsGeometry = null; material: PIXI.MeshMaterial; constructor( background: AudioNodeBackground, style: AudioNodeBackgroundStyle, material: PIXI.MeshMaterial, ) { this.background = background; this.style = style; this.material = material; } draw(graphics: PIXI.Graphics) { graphics.clear(); if (this.style.isHighlighted) { graphics.lineStyle({ width: GraphNodeStyle.HIGHLIGHT_STROKE_WIDTH, color: GraphNodeStyle.HIGHLIGHT_STROKE_COLOR, }); } else { graphics.lineStyle(0); } graphics.beginFill(colorFromNodeType(this.background.metrics.nodeType)); graphics.drawRoundedRect( 0, 0, this.background.size.x, this.background.size.y, GraphNodeStyle.CORNER_RADIUS, ); graphics.endFill(); for (let i = 0; i < this.background.input.length; i++) { this.background.input[i].drawSocket(graphics); } for (let i = 0; i < this.background.output.length; i++) { this.background.output[i].drawSocket(graphics); } for (let i = 0; i < this.background.param.length; i++) { this.background.param[i].drawSocket(graphics); } } getGeometry() { if (this.geometry === null) { const graphics = new PIXI.Graphics(); this.draw(graphics); this.geometry = graphics.geometry; } return this.geometry; } createMesh() { return new PIXI.Graphics(this.getGeometry()); } } ================================================ FILE: src/panel/graph/AudioNodeBackgroundRenderCacheGroup.ts ================================================ import * as PIXI from 'pixi.js'; import {MeshMaterial} from 'pixi.js'; import {Audion} from '../../devtools/Types'; import {AudioGraphTextCacheGroup} from './AudioGraphTextCacheGroup'; import { AudioNodeBackground, AudioNodeBackgroundRender, AudioNodeBackgroundStyle, AudioNodeMetrics, } from './AudioNodeBackground'; export class AudioNodeBackgroundCache { textCacheGroup: AudioGraphTextCacheGroup; cache: Map = new Map(); constructor(textCacheGroup: AudioGraphTextCacheGroup) { this.textCacheGroup = textCacheGroup; } getBackground(node: Audion.GraphNode) { if (!this.cache.has(node.node.nodeType)) { const background = new AudioNodeBackground(); background.init(AudioNodeMetrics.from(node, this.textCacheGroup)); this.cache.set(node.node.nodeType, background); } return this.cache.get(node.node.nodeType); } } export class AudioNodeBackgroundRenderCache { material: PIXI.MeshMaterial; textCacheGroup: AudioGraphTextCacheGroup; background: AudioNodeBackgroundCache; style: AudioNodeBackgroundStyle; cache: Map = new Map(); constructor({ background, style, material, }: { background: AudioNodeBackgroundCache; style: AudioNodeBackgroundStyle; material: PIXI.MeshMaterial; }) { this.material = material; this.background = background; this.style = style; } getBackground(node: Audion.GraphNode) { if (!this.cache.has(node.node.nodeType)) { const background = this.background.getBackground(node); const backgroundRender = new AudioNodeBackgroundRender( background, this.style, this.material, ); this.cache.set(node.node.nodeType, backgroundRender); } return this.cache.get(node.node.nodeType); } } export class AudioNodeBackgroundRenderCacheGroup { textCacheGroup: AudioGraphTextCacheGroup; defaultMaterial: PIXI.MeshMaterial; plain: AudioNodeBackgroundRenderCache; highlight: AudioNodeBackgroundRenderCache; constructor({textCacheGroup}: {textCacheGroup: AudioGraphTextCacheGroup}) { this.textCacheGroup = textCacheGroup; const material = (this.defaultMaterial = new MeshMaterial( PIXI.Texture.EMPTY, )); const background = new AudioNodeBackgroundCache(textCacheGroup); this.plain = new AudioNodeBackgroundRenderCache({ background, style: {isHighlighted: false}, material, }); this.highlight = new AudioNodeBackgroundRenderCache({ background, style: {isHighlighted: true}, material, }); } } ================================================ FILE: src/panel/graph/AudioNodePort.ts ================================================ import * as PIXI from 'pixi.js'; import {AudionPanel} from '../Types'; import {GraphPortStyle} from './graphStyle'; const ZERO_POINT = new PIXI.Point(); export enum AudioNodePortType { INPUT = 'input', OUTPUT = 'output', PARAM = 'param', } /** * Port. */ export class AudioNodePort { node: AudionPanel.Node; portType: AudionPanel.PortType; portIndex: number; offset: AudionPanel.Point; radius: number; color: number; edges: any[]; /** Radius of the visible port icon. */ static get INPUT_RADIUS() { return GraphPortStyle.INPUT_RADIUS; } /** Radius of visible port icon. */ static get PARAM_RADIUS() { return GraphPortStyle.PARAM_RADIUS; } /** * Create a port. * @param options */ constructor({ node, portType, portIndex, point, radius, color, }: { node: AudionPanel.Node; portType: AudionPanel.PortType; portIndex: number; point: AudionPanel.Point; radius: number; color: number; }) { this.node = node; this.portType = portType; this.portIndex = portIndex; this.offset = point; this.radius = radius; this.color = color; this.edges = []; } updateNodeDisplay() { this.node.updatePortDisplay(this.portType, this.portIndex); } /** * @param graphics */ drawSocket( graphics: PIXI.Graphics, fill: number = GraphPortStyle.DISCONNECTED_FILL_COLOR, offset: AudionPanel.Point = ZERO_POINT, ) { graphics.lineStyle(GraphPortStyle.STROKE_WIDTH, this.color); graphics.beginFill(fill); graphics.drawCircle( offset.x + this.offset.x, offset.y + this.offset.y, this.radius, ); graphics.endFill(); } /** * @param graphics */ drawConnect(graphics: PIXI.Graphics) { this.drawSocket(graphics, this.color, this.node.position); } } ================================================ FILE: src/panel/graph/AudioNodeRender.ts ================================================ import * as PIXI from 'pixi.js'; import {Audion} from '../../devtools/Types'; import { GraphColor, colorFromNodeType, GraphNodeStyle, GraphPortStyle, GraphTextStyle, } from './graphStyle'; import {AudioNodePort, AudioNodePortType} from './AudioNodePort'; import {AudioGraphTextCacheGroup} from './AudioGraphTextCacheGroup'; import {AudioNodeBackgroundRenderCacheGroup} from './AudioNodeBackgroundRenderCacheGroup'; import {AudioPortCacheGroup} from './AudioPortCacheGroup'; import {AudionPanel} from '../Types'; /** * Manage the rendered representation of a WebAudio node. */ export class AudioNodeRender { id: string; node: Audion.GraphNode; backgroundCacheGroup: AudioNodeBackgroundRenderCacheGroup; parent: PIXI.Container; container: PIXI.Container; title: PIXI.DisplayObject; labelContainer: PIXI.Container; background: PIXI.DisplayObject; portContainer: PIXI.Container; inputPortDisplays: PIXI.DisplayObject[]; outputPortDisplays: PIXI.DisplayObject[]; paramPortDisplays: PIXI.DisplayObject[]; size: PIXI.Point; position: PIXI.Point; input: AudioNodePort[]; output: AudioNodePort[]; param: AudioNodePort[]; isHighlighted: boolean; /** * Create a AudioNodeRender instance. * @param id */ constructor(id: string) { this.id = id; this.node = null; this.parent = null; this.container = null; this.title = null; this.labelContainer = null; this.background = null; this.backgroundCacheGroup = null; this.inputPortDisplays = []; this.outputPortDisplays = []; this.paramPortDisplays = []; this.size = new PIXI.Point(); this.position = null; this.input = []; this.output = []; this.param = []; } /** Padding around input ports. */ static get INPUT_GROUP_MARGIN() { return GraphPortStyle.INPUT_GROUP_MARGIN; } /** Height of input output ports. */ static get INPUT_HEIGHT() { return GraphPortStyle.INPUT_HEIGHT; } /** Radius of the visible port icon. */ static get INPUT_RADIUS() { return GraphPortStyle.INPUT_RADIUS; } /** Padding around the group of params. */ static get PARAM_GROUP_MARGIN() { return GraphPortStyle.PARAM_GROUP_MARGIN; } /** Height of audio parameter ports. */ static get PARAM_HEIGHT() { return GraphPortStyle.PARAM_HEIGHT; } /** Radius of visible port icon. */ static get PARAM_RADIUS() { return GraphPortStyle.PARAM_RADIUS; } /** * @param node * @return */ init( node: Audion.GraphNode, { textCacheGroup, backgroundCacheGroup, portCacheGroup, }: { textCacheGroup: AudioGraphTextCacheGroup; backgroundCacheGroup: AudioNodeBackgroundRenderCacheGroup; portCacheGroup: AudioPortCacheGroup; }, ): AudioNodeRender { if (this.node && node.params.length === Object.keys(this.param).length) { return this; } this.node = node; this.backgroundCacheGroup = backgroundCacheGroup; const container = (this.container = new PIXI.Container()); this.position = container.position; container.visible = false; const title = (this.title = textCacheGroup.titleText .getText(node.node.nodeType) .createSprite()); title.position.set(GraphNodeStyle.PADDING, GraphNodeStyle.TITLE_PADDING); const background = (this.background = backgroundCacheGroup.plain .getBackground(node) .createMesh()); const labelContainer = (this.labelContainer = new PIXI.Container()); const portContainer = (this.portContainer = new PIXI.Container()); container.addChild(background); container.addChild(labelContainer); container.addChild(title); container.addChild(portContainer); this.initSize(textCacheGroup); this.initPorts(portCacheGroup); this.draw(); return this; } /** * @param parent */ setPixiParent(parent: PIXI.Container) { this.parent = parent; parent.addChild(this.container); } /** * Remove from the rendering hierarchy. */ remove() { this.container.parent.removeChild(this.container); } /** Deteremine the size of the node. */ initSize(textCacheGroup: AudioGraphTextCacheGroup) { const {node, title} = this; const localBounds = new PIXI.Rectangle(); this.labelContainer.removeChildren(); const maxParamTextSize = new PIXI.Point(); for (let i = 0; i < node.params.length; i++) { const param = node.params[i]; const label = textCacheGroup.paramText .getText(param.paramType) .createSprite(); this.labelContainer.addChild(label); label.getLocalBounds(localBounds); maxParamTextSize.x = Math.max(maxParamTextSize.x, localBounds.width); maxParamTextSize.y = Math.max(maxParamTextSize.y, localBounds.height); } title.getLocalBounds(localBounds); this.size.set( Math.max(localBounds.width, maxParamTextSize.x) + 2 * GraphNodeStyle.PADDING, Math.max( Math.max( localBounds.height + 2 * GraphNodeStyle.TITLE_PADDING, AudioNodeRender.INPUT_GROUP_MARGIN + AudioNodeRender.INPUT_HEIGHT * node.node.numberOfInputs + Math.max( AudioNodeRender.INPUT_GROUP_MARGIN, AudioNodeRender.PARAM_GROUP_MARGIN, ), ) + AudioNodeRender.PARAM_HEIGHT * node.params.length + AudioNodeRender.PARAM_GROUP_MARGIN, AudioNodeRender.INPUT_GROUP_MARGIN + AudioNodeRender.INPUT_HEIGHT * node.node.numberOfOutputs + AudioNodeRender.INPUT_GROUP_MARGIN, ), ); } /** * Initialize ports. */ initPorts(portCacheGroup: AudioPortCacheGroup) { const { input, node, output, param, inputPortDisplays, outputPortDisplays, paramPortDisplays, } = this; for (let i = input.length; i < node.node.numberOfInputs; i++) { const inputPoint = new PIXI.Point( 0, AudioNodeRender.INPUT_GROUP_MARGIN + (i + 0.5) * AudioNodeRender.INPUT_HEIGHT, ); input[i] = new AudioNodePort({ node: this, portType: AudionPanel.PortType.INPUT, portIndex: i, point: inputPoint, radius: AudioNodeRender.INPUT_RADIUS, color: GraphColor.INPUT_OUTPUT, }); inputPortDisplays[i] = portCacheGroup.inputOutput.createGraphics(inputPoint); this.portContainer.addChild(inputPortDisplays[i]); } for (let i = output.length; i < node.node.numberOfOutputs; i++) { const outputPoint = new PIXI.Point( this.size.x, AudioNodeRender.INPUT_GROUP_MARGIN + (i + 0.5) * AudioNodeRender.INPUT_HEIGHT, ); output[i] = new AudioNodePort({ node: this, portType: AudionPanel.PortType.OUTPUT, portIndex: i, point: outputPoint, radius: AudioNodeRender.INPUT_RADIUS, color: GraphColor.INPUT_OUTPUT, }); outputPortDisplays[i] = portCacheGroup.inputOutput.createGraphics(outputPoint); this.portContainer.addChild(outputPortDisplays[i]); } const localBounds = new PIXI.Rectangle(); this.title.getLocalBounds(localBounds); const paramYStart = Math.max( localBounds.height + GraphNodeStyle.TITLE_PADDING, AudioNodeRender.INPUT_GROUP_MARGIN + input.length * AudioNodeRender.INPUT_HEIGHT + Math.max( AudioNodeRender.INPUT_GROUP_MARGIN, AudioNodeRender.PARAM_GROUP_MARGIN, ), ); for (let i = 0; i < node.params.length; i++) { const paramPoint = new PIXI.Point( 0, paramYStart + (i + 0.5) * AudioNodeRender.PARAM_HEIGHT, ); const paramPort = (param[i] = new AudioNodePort({ node: this, portType: AudionPanel.PortType.PARAM, portIndex: i, point: paramPoint, radius: AudioNodeRender.PARAM_RADIUS, color: GraphColor.AUDIO_PARAM, })); paramPortDisplays[i] = portCacheGroup.param.createGraphics(paramPoint); this.portContainer.addChild(paramPortDisplays[i]); const label = this.labelContainer.getChildAt(i); label.position.set( GraphNodeStyle.PADDING, paramPort.offset.y - 0.5 * GraphTextStyle.PARAM.fontSize, ); } } setHighlight(isHighlighted: boolean) { this.isHighlighted = isHighlighted; this.draw(); } updatePortDisplay(portType: AudionPanel.PortType, index: number) { if (portType === AudionPanel.PortType.INPUT) { this.inputPortDisplays[index].visible = this.input[index].edges.length > 0; } else if (portType === AudionPanel.PortType.OUTPUT) { this.outputPortDisplays[index].visible = this.output[index].edges.length > 0; } else if (portType === AudionPanel.PortType.PARAM) { this.paramPortDisplays[index].visible = this.param[index].edges.length > 0; } } /** * Update the rendering. */ draw() { const newBackground = ( this.isHighlighted ? this.backgroundCacheGroup.highlight : this.backgroundCacheGroup.plain ) .getBackground(this.node) .createMesh(); this.container.removeChild(this.background); this.container.addChildAt(newBackground, 0); this.background = newBackground; } } ================================================ FILE: src/panel/graph/AudioPortCacheGroup.ts ================================================ import * as PIXI from 'pixi.js'; import {AudionPanel} from '../Types'; import {AudioNodePort} from './AudioNodePort'; import {GraphColor} from './graphStyle'; export class AudioPortCache { port: AudioNodePort; geometry: PIXI.GraphicsGeometry = null; constructor(port: AudioNodePort) { this.port = port; } getGeometry() { if (this.geometry === null) { const graphics = new PIXI.Graphics(); this.port.drawSocket(graphics, this.port.color); this.geometry = graphics.geometry; } return this.geometry; } createGraphics(position = new PIXI.Point()) { const graphics = new PIXI.Graphics(this.getGeometry()); graphics.position.set(position.x, position.y); graphics.visible = false; return graphics; } } export class AudioPortCacheGroup { inputOutput: AudioPortCache; param: AudioPortCache; constructor() { this.inputOutput = new AudioPortCache( new AudioNodePort({ node: null, portType: AudionPanel.PortType.INPUT, portIndex: -1, point: new PIXI.Point(), radius: AudioNodePort.INPUT_RADIUS, color: GraphColor.INPUT_OUTPUT, }), ); this.param = new AudioPortCache( new AudioNodePort({ node: null, portType: AudionPanel.PortType.PARAM, portIndex: -1, point: new PIXI.Point(), radius: AudioNodePort.PARAM_RADIUS, color: GraphColor.AUDIO_PARAM, }), ); } } ================================================ FILE: src/panel/graph/Camera.js ================================================ import {Rectangle} from '@pixi/math'; import {Observer} from '../../utils/Observer'; import {trunc, clamp} from '../../utils/math'; const MIN_ZOOM = 0.5; /** * Camera. */ export class Camera { /** Create a Camera. */ constructor() { /** Area that can be viewed. */ this.bounds = new Rectangle(-50, -50, 100, 100); this.screen = new Rectangle(); this.viewport = new Rectangle(-50, -50, 1, 1); /** @type {Observer} */ this.viewportObserver = new Observer((onNext) => { this.update = () => { onNext(this.viewport); }; return () => {}; }); } /** Update. */ update() {} /** * Move the viewport. * @param {number} dx * @param {number} dy */ move(dx, dy) { const zoomFactor = this.viewport.width; const {x, y} = this.viewport; this.viewport.x = trunc( clamp( x + dx * zoomFactor, this.bounds.x, Math.max( this.bounds.x, this.bounds.x + this.bounds.width - this.screen.width * zoomFactor, ), ), -2, ); this.viewport.y = trunc( clamp( y + dy * zoomFactor, this.bounds.y, Math.max( this.bounds.y, this.bounds.y + this.bounds.height - this.screen.height * zoomFactor, ), ), -2, ); this.update(); } /** * Zoom in or out by `delta`. * @param {number} screenX * @param {number} screenY * @param {number} zoomDelta */ zoom(screenX, screenY, zoomDelta) { const maxScaleX = this.bounds.width / this.screen.width; const maxScaleY = this.bounds.height / this.screen.height; const maxScale = Math.max(1, maxScaleX, maxScaleY); const zoomFactor = this.viewport.width; const newZoom = trunc( clamp(zoomFactor + zoomDelta, MIN_ZOOM, maxScale), -2, ); const {x, y} = this.viewport; this.viewport.x = trunc( clamp( x + screenX * (zoomFactor - newZoom), this.bounds.x, Math.max( this.bounds.x, this.bounds.x + this.bounds.width - this.screen.width * newZoom, ), ), -2, ); this.viewport.y = trunc( clamp( y + screenY * (zoomFactor - newZoom), this.bounds.y, Math.max( this.bounds.y, this.bounds.y + this.bounds.height - this.screen.height * newZoom, ), ), -2, ); this.viewport.width = newZoom; this.viewport.height = newZoom; this.update(); } /** * Fit the viewport to the whole bounds. */ fitToScreen() { this.zoom(0, 0, Infinity); } /** * Set graph bounds with and height. * @param {number} width * @param {number} height */ setGraphSize(width, height) { this.bounds.x = -50; this.bounds.y = -50; this.bounds.width = width + 100; this.bounds.height = height + 100; } /** * Set screen size. * @param {number} width * @param {number} height */ setScreenSize(width, height) { this.screen.width = width; this.screen.height = height; } } ================================================ FILE: src/panel/graph/GraphicsCache.ts ================================================ import {EdgeArrowGraphics} from './AudioEdgeArrowGraphics'; import {EdgeCurvedLineGraphics} from './AudioEdgeCurvedLineGraphics'; import {AudioGraphTextCacheGroup} from './AudioGraphTextCacheGroup'; import {AudioNodeBackgroundRenderCacheGroup} from './AudioNodeBackgroundRenderCacheGroup'; import {AudioPortCacheGroup} from './AudioPortCacheGroup'; export class GraphicsCache { textCacheGroup: AudioGraphTextCacheGroup = new AudioGraphTextCacheGroup(); backgroundCacheGroup: AudioNodeBackgroundRenderCacheGroup = new AudioNodeBackgroundRenderCacheGroup({ textCacheGroup: this.textCacheGroup, }); portCacheGroup: AudioPortCacheGroup = new AudioPortCacheGroup(); edgeArrowGraphics: EdgeArrowGraphics = new EdgeArrowGraphics(); edgeCurvedLineGraphics: EdgeCurvedLineGraphics = new EdgeCurvedLineGraphics(); } ================================================ FILE: src/panel/graph/graphStyle.js ================================================ /** @enum {number} */ export const Color = { PROCESSOR: 0x64b5f6, MEDIA: 0xba68c8, SOURCE: 0x81c784, DESTINATION: 0x90a4ad, ANALYSER: 0xf48fb1, AUDIO_WORKLET: 0x9fa8da, DEPRECATED: 0xe0e0e0, AUDIO_PARAM: 0xffa726, TEXT: 0x263238, INPUT_OUTPUT: 0x455a63, EDGE: 0x455a63, }; export const TextStyle = { TITLE: { fill: Color.TEXT, fontSize: 16, }, PARAM: { fill: Color.TEXT, fontSize: 9, }, }; export const PortStyle = { /** Stroke width around port radius. */ STROKE_WIDTH: 2, /** Inner color for ports without connecting edges. */ DISCONNECTED_FILL_COLOR: 0xffffff, /** Padding around input ports. */ INPUT_GROUP_MARGIN: 2, /** Height of input output ports. */ INPUT_HEIGHT: 20, /** Radius of the visible port icon. */ INPUT_RADIUS: 6, /** Padding around the group of params. */ PARAM_GROUP_MARGIN: 2, /** Height of audio parameter ports. */ PARAM_HEIGHT: 12, /** Radius of visible port icon. */ PARAM_RADIUS: 4, }; export const NodeStyle = { TITLE_PADDING: 4, /** Stroke width around node when highlighted. */ HIGHLIGHT_STROKE_WIDTH: 5, /** Stroke color around node when highlighted. */ HIGHLIGHT_STROKE_COLOR: 0x000000, /** Node background corner radius. */ CORNER_RADIUS: 3, /** Node background padding around contained text. */ PADDING: 10, }; /** * @param {string} nodeType * @param {boolean} [isOffline] * @return {number} */ export const colorFromNodeType = (nodeType, isOffline = false) => { // AudioNodes are grouped into color categories based on their purposes. switch (nodeType) { case 'AudioDestination': // The destination nodes of OfflineAudioContexts are brown. Those of // "non-offline" AudioContexts are a dark grey. return isOffline ? 0x5d4037 : Color.DESTINATION; case 'AudioBufferSource': case 'ConstantSource': case 'Oscillator': return Color.SOURCE; case 'Analyser': return Color.ANALYSER; case 'BiquadFilter': case 'Convolver': case 'Delay': case 'DynamicsCompressor': case 'IIRFilter': case 'Panner': case 'StereoPanner': case 'WaveShaper': case 'Gain': case 'ChannelMerger': case 'ChannelSplitter': return Color.PROCESSOR; case 'MediaElementAudioSource': case 'MediaStreamAudioDestination': case 'MediaStreamAudioSource': return Color.MEDIA; case 'AudioWorklet': return Color.AUDIO_WORKLET; case 'ScriptProcessor': return Color.DEPRECATED; } // Nothing matched. Odd. Highlight this node in dark red. return 0xc62828; }; ================================================ FILE: src/panel/graph/graphStyle.ts ================================================ export enum GraphColor { PROCESSOR = 0x64b5f6, MEDIA = 0xba68c8, SOURCE = 0x81c784, DESTINATION = 0x90a4ad, ANALYSER = 0xf48fb1, AUDIO_WORKLET = 0x9fa8da, OFFLINE_DESTINATION = 0x5d4037, DEPRECATED = 0xe0e0e0, AUDIO_PARAM = 0xffa726, TEXT = 0x263238, INPUT_OUTPUT = 0x455a63, EDGE = 0x455a63, UNKNOWN = 0xc62828, } export const GraphTextStyle = { TITLE: { fill: GraphColor.TEXT, fontSize: 16, }, PARAM: { fill: GraphColor.TEXT, fontSize: 9, }, }; export const GraphPortStyle = { /** Stroke width around port radius. */ STROKE_WIDTH: 2, /** Inner color for ports without connecting edges. */ DISCONNECTED_FILL_COLOR: 0xffffff, /** Padding around input ports. */ INPUT_GROUP_MARGIN: 2, /** Height of input output ports. */ INPUT_HEIGHT: 20, /** Radius of the visible port icon. */ INPUT_RADIUS: 6, /** Padding around the group of params. */ PARAM_GROUP_MARGIN: 2, /** Height of audio parameter ports. */ PARAM_HEIGHT: 12, /** Radius of visible port icon. */ PARAM_RADIUS: 4, }; export const GraphNodeStyle = { /** Padding above and below title text. */ TITLE_PADDING: 4, /** Stroke width around node when highlighted. */ HIGHLIGHT_STROKE_WIDTH: 5, /** Stroke color around node when highlighted. */ HIGHLIGHT_STROKE_COLOR: 0x000000, /** Node background corner radius. */ CORNER_RADIUS: 3, /** Node background padding around contained text. */ PADDING: 10, }; /** * @param nodeType * @param isOffline */ export const colorFromNodeType = (nodeType: string, isOffline = false) => { // AudioNodes are grouped into color categories based on their purposes. switch (nodeType) { case 'AudioDestination': // The destination nodes of OfflineAudioContexts are brown. Those of // "non-offline" AudioContexts are a dark grey. return isOffline ? GraphColor.OFFLINE_DESTINATION : GraphColor.DESTINATION; case 'AudioBufferSource': case 'ConstantSource': case 'Oscillator': return GraphColor.SOURCE; case 'Analyser': return GraphColor.ANALYSER; case 'BiquadFilter': case 'Convolver': case 'Delay': case 'DynamicsCompressor': case 'IIRFilter': case 'Panner': case 'StereoPanner': case 'WaveShaper': case 'Gain': case 'ChannelMerger': case 'ChannelSplitter': return GraphColor.PROCESSOR; case 'MediaElementAudioSource': case 'MediaStreamAudioDestination': case 'MediaStreamAudioSource': return GraphColor.MEDIA; case 'AudioWorklet': return GraphColor.AUDIO_WORKLET; case 'ScriptProcessor': return GraphColor.DEPRECATED; } // Nothing matched. Odd. Highlight this node in dark red. return GraphColor.UNKNOWN; }; ================================================ FILE: src/panel/main.ts ================================================ /// // This module disable's pixi.js use of new Function to optimize rendering. import '@pixi/unsafe-eval'; import {merge, Subject} from 'rxjs'; import {catchError, filter, map, scan, shareReplay, tap} from 'rxjs/operators'; import {chrome} from '../chrome'; import {Audion} from '../devtools/Types'; import {mapThruWorker} from '../utils/mapThruWorker'; import {WholeGraphButton} from './components/WholeGraphButton'; import {querySelector} from './components/domUtils'; import {renderRealtimeSummary} from './components/realtimeSummary'; import {renderSelectGraph} from './components/selectGraph'; import {renderDetailPanel} from './components/detailPanel'; import {renderCollectGarbage} from './components/collectGarbage'; import {connect} from './Observer.runtime'; import {AudioGraphRender} from './graph/AudioGraphRender'; import {GraphSelector} from './GraphSelector'; import {updateGraphRender} from './updateGraphRender'; import {updateGraphSizes} from './updateGraphSizes'; if (chrome.devtools.panels.themeName === 'dark') { document.querySelector('html').className = '-theme-with-dark-background'; } const devtoolsRequestSubject$ = new Subject(); const devtoolsObserver$ = connect< Audion.DevtoolsRequest, Audion.DevtoolsMessage >(devtoolsRequestSubject$); const allGraphsObserver$ = devtoolsObserver$.pipe( scan((allGraphs, message) => { if ('allGraphs' in message) { return message.allGraphs; } else if ('graphContext' in message) { if ( message.graphContext.graph && message.graphContext.context.contextState !== 'closed' ) { return { ...allGraphs, [message.graphContext.id]: message.graphContext, }; } else { allGraphs = {...allGraphs}; delete allGraphs[message.graphContext.id]; return allGraphs; } } return allGraphs; }, {} as Audion.GraphContextsById), shareReplay({bufferSize: 1, refCount: true}), ); const graphSelector = new GraphSelector({ allGraphs$: allGraphsObserver$, }); graphSelector.options$.subscribe((options) => { if ( // Select a graph automatically if one is not selected. graphSelector.graphId === '' || // Select a graph automatically if current selected graph is no longer available. !options.includes(graphSelector.graphId) ) { // Select the newest graph (the last in the list). graphSelector.select(options[options.length - 1] || ''); } }); const graphContainer = /** @type {HTMLElement} */ document.getElementsByClassName( 'web-audio-graph', )[0] as HTMLElement; const graphRender = new AudioGraphRender({elementContainer: graphContainer}); graphRender.init(); const layoutWorker = new Worker('audion-panelWorker.js'); graphSelector.graph$ .pipe( map(updateGraphSizes(graphRender)), map((graphContext) => ({graphContext})), mapThruWorker(layoutWorker), map(updateGraphRender(graphRender)), catchError((reason, caught) => { console.error( 'An error handling the latest audio graph context occured:', reason, ); return caught; }), ) .subscribe(); const wholeGraphButton = new WholeGraphButton(); wholeGraphButton.click$.subscribe(() => { graphRender.camera.fitToScreen(); }); graphContainer.appendChild(graphRender.pixiView); graphContainer.appendChild(wholeGraphButton.render()); merge( renderCollectGarbage(querySelector('.toolbar-garbage-button')).pipe( tap((action) => { if (action && 'type' in action && action.type === 'collectGarbage') { devtoolsRequestSubject$.next(action); } }), filter(isHTMLElement), ), renderSelectGraph( querySelector('.web-audio-toolbar-container .dropdown-title'), querySelector('.web-audio-select-graph-dropdown'), querySelector('.web-audio-toolbar-container .toolbar-dropdown'), graphSelector.graphId$, allGraphsObserver$, ).pipe( tap((action) => { if (action && 'type' in action && action.type === 'selectGraph') { graphSelector.select(action.graphId); } }), filter(isHTMLElement), ), renderRealtimeSummary( querySelector('.web-audio-status'), graphSelector.graph$.pipe(map(({realtimeData}) => realtimeData)), ), renderDetailPanel( querySelector('.web-audio-detail-panel'), graphSelector.graph$, graphRender.selectedNode$, ), ) // Observe elements as they are changed. .subscribe(); document.getElementsByClassName('web-audio-loading')[0].classList.add('hidden'); /** * @param value * @returns value is a HTMLElement */ function isHTMLElement(value: unknown): value is HTMLElement { return value && value instanceof HTMLElement; } ================================================ FILE: src/panel/updateGraphRender.ts ================================================ import {Audion} from '../devtools/Types'; import {AudioGraphRender} from './graph/AudioGraphRender'; export function updateGraphRender( graphRender: AudioGraphRender, ): (value: Audion.GraphContext) => void { return (graphContext) => graphRender.update(graphContext); } ================================================ FILE: src/panel/updateGraphSizes.ts ================================================ import {Audion} from '../devtools/Types'; import {AudioGraphRender} from './graph/AudioGraphRender'; export function updateGraphSizes( graphRender: AudioGraphRender, ): (value: Audion.GraphContext, index: number) => Audion.GraphContext { return (graphContext) => graphRender.updateGraphSizes(graphContext); } ================================================ FILE: src/panel/worker.ts ================================================ import * as dagre from 'dagre'; import {fromEvent, Observable} from 'rxjs'; import { auditTime, distinctUntilChanged, filter, map, startWith, withLatestFrom, } from 'rxjs/operators'; import {serializeGraphContext} from '../devtools/serializeGraphContext'; import { deserializeGraphContext, SerializedGraphContext, } from '../devtools/deserializeGraphContext'; import {setOptionsToGraphContext} from '../devtools/setOptionsToGraphContext'; import {layoutGraphContext} from '../devtools/layoutGraphContext'; interface LayoutOptionsMessage { layoutOptions: dagre.GraphLabel; } interface GraphContextMessage { graphContext: SerializedGraphContext; } type PanelMessage = MessageEvent; const messages$ = fromEvent(self, 'message').pipe( map((message) => message.data), ); const layoutOptions$: Observable = messages$.pipe( filter((msg): msg is LayoutOptionsMessage => 'layoutOptions' in msg), map((message) => message.layoutOptions), startWith({rankdir: 'LR'}), ); messages$ .pipe( filter((msg): msg is GraphContextMessage => 'graphContext' in msg), map((message) => message.graphContext), distinctUntilChanged( (a, b) => a?.id === b?.id && a?.eventCount === b?.eventCount, ), auditTime(16), map((graphContext) => deserializeGraphContext(graphContext)), withLatestFrom(layoutOptions$), map(setOptionsToGraphContext), map(layoutGraphContext), map(serializeGraphContext), ) .subscribe((context) => { self.postMessage(context); }); ================================================ FILE: src/panel.html ================================================

Loading ...

================================================ FILE: src/utils/Observer.emitter.js ================================================ /// import {Observer} from './Observer'; /** * @param {Utils.DataEmitter} emitter * @return {Utils.Observer} * @template T */ export function observeMessageEvents(emitter) { return new Observer((onNext) => { const onMessage = (message) => onNext(message.data); emitter.addEventListener('message', onMessage); return () => { emitter.removeEventListener('message', onMessage); }; }); } /** * @param {Utils.Observer} observer * @param {Utils.Poster} poster * @return {function(): void} stop posting observations * @template T */ export function postObservations(observer, poster) { return observer.observe((message) => poster.postMessage(message)); } ================================================ FILE: src/utils/Observer.test.js ================================================ import {describe, expect, it, jest} from '@jest/globals'; import {InvariantError} from './error'; import {Observer} from './Observer'; import {retry} from './retry'; describe('Observer', () => { it('observes values', () => { const subscribeMock = jest.fn(); const nextMock = jest.fn(); const o = new Observer(subscribeMock); o.observe(nextMock); /** @type {function} */ (subscribeMock.mock.calls[0][0])('value'); expect(nextMock).toBeCalledWith('value'); }); it('observes completion', () => { const subscribeMock = jest.fn(); const completeMock = jest.fn(); const o = new Observer(subscribeMock); o.observe(() => {}, completeMock); /** @type {function} */ (subscribeMock.mock.calls[0][1])(); expect(completeMock).toBeCalledWith(); }); it('observes errors', () => { const subscribeMock = jest.fn(); const errorMock = jest.fn(); const o = new Observer(subscribeMock); o.observe( () => {}, () => {}, errorMock, ); /** @type {function} */ (subscribeMock.mock.calls[0][2])('reason'); expect(errorMock).toBeCalledWith('reason'); }); it('subscribes when first observed', () => { const subscribeMock = jest.fn(); const o = new Observer(subscribeMock); expect(subscribeMock).toBeCalledTimes(0); o.observe(() => {}); expect(subscribeMock).toBeCalledTimes(1); o.observe(() => {}); expect(subscribeMock).toBeCalledTimes(1); }); it('unsubscribes when last observer unsubscribes', () => { const unsubscribeMock = jest.fn(); const o = new Observer(jest.fn().mockReturnValue(unsubscribeMock)); expect(unsubscribeMock).toBeCalledTimes(0); const unsubscribe1 = o.observe(() => {}); expect(unsubscribeMock).toBeCalledTimes(0); const unsubscribe2 = o.observe(() => {}); expect(unsubscribeMock).toBeCalledTimes(0); unsubscribe2(); expect(unsubscribeMock).toBeCalledTimes(0); unsubscribe1(); expect(unsubscribeMock).toBeCalledTimes(1); }); }); describe('Observer.throttle', () => { it('must throw when observing non-object or null', () => { const subscribeMock = jest.fn(); const nextMock = jest.fn(); const o = Observer.throttle(new Observer(subscribeMock)); o.observe(nextMock); expect(() => { /** @type {function} */ (subscribeMock.mock.calls[0][0])('value'); }).toThrowError(InvariantError); expect(() => { /** @type {function} */ (subscribeMock.mock.calls[0][0])('value'); }).toThrowError( 'Observer.throttle must observe non-null objects. Received: string', ); expect(() => { /** @type {function} */ (subscribeMock.mock.calls[0][0])(null); }).toThrowError(InvariantError); expect(() => { /** @type {function} */ (subscribeMock.mock.calls[0][0])(null); }).toThrowError( 'Observer.throttle must observe non-null objects. Received: null', ); }); it('immediately sends first value when no throttle is running', () => { const subscribeMock = jest.fn(); const nextMock = jest.fn(); const o = Observer.throttle(new Observer(subscribeMock), { timeout: () => Promise.resolve(), }); o.observe(nextMock); const value = {}; /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); expect(nextMock).toBeCalledWith(value); }); it('calls key option', async () => { const subscribeMock = jest.fn(); const keyMock = jest.fn(({key}) => key); const o = Observer.throttle(new Observer(subscribeMock), { key: keyMock, }); o.observe(() => {}); const value = {key: 'key'}; /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); expect(keyMock).toBeCalledWith(value); expect(keyMock).toReturnWith(value.key); }); it('calls timeout option', async () => { const subscribeMock = jest.fn(); const timeoutMock = jest.fn().mockImplementation(() => Promise.resolve()); const o = Observer.throttle(new Observer(subscribeMock), { timeout: timeoutMock, }); o.observe(() => {}); const value = {}; /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); expect(timeoutMock).toBeCalledTimes(1); }); it('sends a second value after a throttle timer', async () => { const subscribeMock = jest.fn(); const nextMock = jest.fn(); const o = Observer.throttle(new Observer(subscribeMock), { timeout: () => Promise.resolve(), }); o.observe(nextMock); const value = {}; /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); expect(nextMock).toBeCalledTimes(1); expect(nextMock).nthCalledWith(1, value); await retry(() => expect(nextMock).toBeCalledTimes(2)); expect(nextMock).nthCalledWith(2, value); }); it('calls default timeout option', async () => { const subscribeMock = jest.fn(); const nextMock = jest.fn(); const o = Observer.throttle(new Observer(subscribeMock)); o.observe(nextMock); const value = {}; /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); expect(nextMock).toBeCalledTimes(1); await retry(() => expect(nextMock).toBeCalledTimes(2), { timeout: () => new Promise((resolve) => setTimeout(resolve, 5)), }); expect(nextMock).nthCalledWith(2, value); }); it('skips second and sends a third value during a timer', async () => { const subscribeMock = jest.fn(); const nextMock = jest.fn(); const o = Observer.throttle(new Observer(subscribeMock), { timeout: () => Promise.resolve(), }); o.observe(nextMock); const value = {}; /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); expect(nextMock).toBeCalledTimes(1); expect(nextMock).nthCalledWith(1, value); await retry(() => expect(nextMock).toBeCalledTimes(2)); expect(nextMock).nthCalledWith(2, value); }); it('throttles per object value', async () => { const subscribeMock = jest.fn(); const nextMock = jest.fn(); const o = Observer.throttle(new Observer(subscribeMock), { timeout: () => Promise.resolve(), }); o.observe(nextMock); const value1 = {}; const value2 = {}; /** @type {function} */ (subscribeMock.mock.calls[0][0])(value1); /** @type {function} */ (subscribeMock.mock.calls[0][0])(value2); /** @type {function} */ (subscribeMock.mock.calls[0][0])(value1); /** @type {function} */ (subscribeMock.mock.calls[0][0])(value2); expect(nextMock).toBeCalledTimes(2); expect(nextMock).nthCalledWith(1, value1); expect(nextMock).nthCalledWith(2, value2); await retry(() => expect(nextMock).toBeCalledTimes(4)); expect(nextMock).nthCalledWith(3, value1); expect(nextMock).nthCalledWith(4, value2); }); it('flushes most recent messages before completing', () => { const subscribeMock = jest.fn(); const nextMock = jest.fn(); const completeMock = jest.fn(); const o = Observer.throttle(new Observer(subscribeMock), { timeout: () => Promise.resolve(), }); o.observe(nextMock, completeMock); const value = {}; /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); expect(nextMock).toBeCalledTimes(1); expect(nextMock).nthCalledWith(1, value); /** @type {function} */ (subscribeMock.mock.calls[0][1])(); expect(nextMock).toBeCalledTimes(2); expect(nextMock).nthCalledWith(2, value); expect(completeMock).toBeCalledTimes(1); }); it('flushes most recent messages before error', () => { const subscribeMock = jest.fn(); const nextMock = jest.fn(); const errorMock = jest.fn(); const o = Observer.throttle(new Observer(subscribeMock), { timeout: () => Promise.resolve(), }); o.observe(nextMock, () => {}, errorMock); const value = {}; /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); /** @type {function} */ (subscribeMock.mock.calls[0][0])(value); expect(nextMock).toBeCalledTimes(1); expect(nextMock).nthCalledWith(1, value); /** @type {function} */ (subscribeMock.mock.calls[0][2])('reason'); expect(nextMock).toBeCalledTimes(2); expect(nextMock).nthCalledWith(2, value); expect(errorMock).toBeCalledTimes(1); expect(errorMock).toBeCalledWith('reason'); }); }); describe('Observer.transform', () => { it('observes value returned by transform', () => { const subscribeMock = jest.fn(); const nextMock = jest.fn(); const o = Observer.transform( new Observer(subscribeMock), (value) => 'key: ' + value, ); o.observe(nextMock); /** @type {function} */ (subscribeMock.mock.calls[0][0])('value'); expect(nextMock).toBeCalledWith('key: value'); }); }); ================================================ FILE: src/utils/Observer.ts ================================================ import {Utils} from './Types'; import {invariant} from './error'; /* istanbul ignore next */ /** * Do nothing. * @param args * @memberof Utils.Observer */ function noop(...args: any) {} /** * @param promise * @memberof Utils * @alias makeCancelable */ function makeCancelable(promise: Promise): Utils.CancelablePromise { let cancel = noop; const cancelablePromise = Promise.race([ promise.then((value) => ({value, canceled: false})), new Promise((resolve) => (cancel = resolve)).then(() => ({canceled: true})), ]); return { promise: cancelablePromise, cancel, }; } /** * Implementation of the observer idiom. * * @memberof Utils * @alias Observer */ export class Observer implements Utils.Observer { subscribe: Utils.SubscribeCallback; _unsubscribeParent: (...args: any) => any; handles: {onNext; onComplete; onError}[]; constructor(subscribe: Utils.SubscribeCallback) { this.subscribe = subscribe; /** @type {function | null} */ this._unsubscribeParent = null; this.handles = []; this._onNext = this._onNext.bind(this); this._onComplete = this._onComplete.bind(this); this._onError = this._onError.bind(this); } static transform( target: Utils.Observer, onTransform: (value: T1) => T2, ): Utils.Observer { return new Observer((onNext, ...args) => { return target.observe((value) => { onNext(onTransform(value)); }, ...args); }); } static filter( target: Utils.Observer, testFunc: (value: T) => boolean, ): Utils.Observer { return new Observer((onNext, ...args) => { return target.observe((value) => { if (testFunc(value)) { onNext(value); } }); }); } static reduce( target: Utils.Observer, reducer: (accum: R, value: T) => R, initial: R, ): Utils.Observer { let latest = initial; return new Observer((onNext, ...args) => { return target.observe((value) => { latest = reducer(latest, value); onNext(latest); }, ...args); }); } static throttle( target: Utils.Observer, options?: Utils.ThrottleObserverOptions, ): Utils.Observer { return new ThrottleObserver(target, options); } /** * Immediately observe a value to any new subscribe. * @param {Observer} target * @param {function(): T2} onSubscribe * @return {Observer} * @template T1 * @template T2 */ static onSubscribe( target: Utils.Observer, onSubscribe: () => T2, ): Utils.Observer { return new SubscribeImmediateObserver(target, onSubscribe); } static props( props: {[key in keyof T]: Utils.Observer}, latest: T, ): Utils.Observer { return new Observer((onNext, onComplete, onError) => { const unsubscribes = []; for (const [key, prop] of Object.entries(props)) { unsubscribes.push( prop.observe( (value) => { latest = {...latest, [key]: value}; onNext(latest); }, onComplete, onError, ), ); } return () => { for (const unsubscribe of unsubscribes) { unsubscribe(); } }; }); } observe( onNext: (value: T) => void, onComplete: () => void = noop, onError: (error: Error) => void = noop, ): () => void { this._subscribeToParent(); const handles = {onNext, onComplete, onError}; this.handles.push(handles); return () => { this.handles.splice(this.handles.indexOf(handles), 1); this._unsubscribeFromParent(); }; } protected _onNext(message: T): void { for (let i = 0; i < this.handles.length; i++) { this.handles[i].onNext(message); } } protected _onComplete(): void { for (let i = 0; i < this.handles.length; i++) { this.handles[i].onComplete(); } } protected _onError(reason: any): void { for (let i = 0; i < this.handles.length; i++) { this.handles[i].onError(reason); } } /** * Subscribe to parent. */ protected _subscribeToParent(): void { if (this.handles.length === 0) { this._unsubscribeParent = this.subscribe( this._onNext, this._onComplete, this._onError, ); } } /** * Unsubscribe from parent. */ protected _unsubscribeFromParent(): void { if (this.handles.length === 0) { this._unsubscribeParent(); this._unsubscribeParent = null; } } } /** * Throttle repeated observed messages. * @memberof Utils * @alias ThrottleObserver */ export class ThrottleObserver extends Observer { private _timerMap: Map; /** * Create a ThrottleObserver. */ constructor( target: Utils.Observer, { key = (obj) => obj, timeout = () => new Promise((resolve) => setTimeout(resolve, 16)), } = {} as Utils.ThrottleObserverOptions, ) { const timerMap = new Map() as Map< any, {cancel(): void; active: boolean; value: T} >; super((onNext, onComplete, onError) => { /** * @param {T} message */ const onThrottleNext = (message: T) => { invariant( typeof message === 'object' && message !== null, 'Observer.throttle must observe non-null objects. Received: %0', message === null ? 'null' : typeof message, ); const timerKey = key(message); if (timerMap.has(timerKey)) { const timer = timerMap.get(timerKey); timer.active = true; timer.value = message; } else { const timer = {cancel: noop, active: false, value: null}; (async () => { timerMap.set(timerKey, timer); const {promise, cancel} = makeCancelable(timeout()); timer.cancel = cancel; const {canceled} = await promise; timerMap.delete(timerKey); if (!canceled && timer.active) { onThrottleNext(timer.value); } })(); onNext(message); } }; return target.observe( onThrottleNext, () => { this._flush(); onComplete(); }, (reason) => { this._flush(); onError(reason); }, ); }); this._timerMap = timerMap; } /** * Flush remaining timers. */ private _flush() { for (const timer of this._timerMap.values()) { invariant( timer.active, 'Observer throttle timer must be active when flushing', ); this._onNext(timer.value); timer.cancel(); } this._timerMap.clear(); } } /** * Immediately observe a value to any new subscriber. */ export class SubscribeImmediateObserver extends Observer { onSubscribe: () => T2; /** * Create an SubscribeImmediateObserver. */ constructor(target: Utils.Observer, onSubscribe: () => T2) { super((onNext, onComplete, onError) => target.observe(onNext, onComplete, onError), ); this.onSubscribe = onSubscribe; } observe( onNext: (value: T1 | T2) => void, onComplete?: () => void, onError?: (error: Error) => void, ): () => void { onNext(this.onSubscribe()); return super.observe(onNext, onComplete, onError); } } ================================================ FILE: src/utils/Types.ts ================================================ /** @namespace Utils */ /** * An abstraction of the observer idiom. * * @typedef Utils.Observer * @property {Utils.ObserverObserveMethod} observe * @template T */ /** * Install callbacks for each value observed, when the observer completes, if it * does, or if the observer errors. * * @callback Utils.ObserverObserveMethod * @param {Utils.SubscribeOnNext} onNext called with each observed value * @param {function(): void} [onComplete] called when the observer completes, if * it does * @param {function(*): void} [onError] called when the observer produces an * error, if it does * @return {function(): void} function to unsubscribe from this installation * @template T */ /** * @callback Utils.SubscribeCallback * @param {Utils.SubscribeOnNext} onNext * @param {function(): void} onComplete * @param {function(*): void} onError * @return {function(): void} * @template T * @alias SubscribeCallback */ /** * @callback Utils.SubscribeOnNext * @param {T} value * @return {void} * @template T * @alias SubscribeOnNext */ /** * @typedef Utils.Cancelable * @property {T} [value] * @property {boolean} canceled * @template T * @alias Cancelable */ /** * @typedef Utils.CancelablePromise * @property {Promise>} promise * @property {function(): void} cancel * @template T * @alias CancelablePromise */ /** * @typedef Utils.ThrottleObserverOptions * @property {function(T): *} [key] * @property {function(): Promise} [timeout] * @alias ThrottleObserverOptions * @template T */ /** * @typedef Utils.RetryOptions * @property {function(): Promise} [timeout] * @property {number} [times=10] */ /** * @callback Utils.DataEventListener * @param {{data: T}} event * @return {void} * @template T */ /** * @callback Utils.ModifyDataEventListeners * @param {string} eventName * @param {Utils.DataEventListener} listener * @return {void} * @template T */ /** * @typedef Utils.DataEmitter * @property {Utils.ModifyDataEventListeners} addEventListener * @property {Utils.ModifyDataEventListeners} removeEventListener * @template T */ /** * @typedef Utils.Poster * @property {function(T): void} postMessage * @template T */ export namespace Utils { export interface Observer { /** * @param next called with each observed value * @param complete called when the observer completes, if it does * @param error called when the observer produces an error, if it does * @return function to unsubscribe from this installation */ observe( next: (value: T) => void, complete?: () => void, error?: (error: Error) => void, ): () => void; } export interface SubscribeCallback { ( onNext: (value: T) => void, complete: () => void, error: (error: Error) => void, ): () => void; } export interface SubscribeOnNext { (value: T): void; } export interface Cancelable { value?: T; canceled: boolean; } export interface CancelablePromise { promise: Promise>; cancel(): void; } export interface ThrottleObserverOptions { key?(value: T): any; timeout?(): Promise; } export interface RetryOptions { timeout?(): Promise; number?: number; } } ================================================ FILE: src/utils/dlog.js ================================================ import {getTimestampAsString} from '../devtools/WebAudioGraphIntegrator'; // prettier-ignore /** * Send console logging to inspect window * @param {String} message The description of the debug event * @param {Object} properties The properties * of audio element for debugging */ export function DLOG(message, properties) { const SHOW_EXTRA_DEBUG_LOG = localStorage.getItem('showExtraDebugLog') === 'true'; if (SHOW_EXTRA_DEBUG_LOG) { let debugMessage = getTimestampAsString(); if (message) { debugMessage += message + '\n'; } for (const property in properties) { if (properties[property]) { switch (property) { case 'contextId': debugMessage += ` context ID = ${properties[property]} \n`; break; case 'sourceNodeId': debugMessage += ` source node ID = ${properties[property]} \n`; break; case 'nodeId': debugMessage += ` node ID = ${properties[property]} \n`; break; case 'destinationNodeId': debugMessage += ` destination node ID = ${properties[property]} \n`; break; case 'destinationParamId': debugMessage += ` destination param ID = ${properties[property]} \n`; break; case 'paramId': debugMessage += ` audio param ID = ${properties[property]} \n`; break; case 'reason': debugMessage += ` Error reason is ${properties[property]} \n`; break; default: break; } } } console.debug(debugMessage); } } ================================================ FILE: src/utils/error.js ================================================ /** * An error caused by a falsifiable assumption shown to be false. * @memberof Utils * @alias InvariantError */ export class InvariantError extends Error { /** * Create an InvariantError. * @param {string} message * @param {Array} args */ constructor(message, args) { super(); this._message = message; this._args = args; } /** * @type {string} */ get message() { return this._message.replace(/%(%|\d+)/g, (match) => { if (match[1] === '%') { return '%'; } return this._args[Number(match[1])]; }); } } /** * @param {boolean} test * @param {string} message * @param {Array} args * @memberof Utils * @alias invariant */ export function invariant(test, message, ...args) { if (!test) { throw new InvariantError(message, args); } } ================================================ FILE: src/utils/error.test.js ================================================ import {describe, expect, it} from '@jest/globals'; import {invariant, InvariantError} from './error'; describe('invariant', () => { it('does not throw when correct', () => { invariant(true, 'always passes'); }); it('does throw when incorrect', () => { expect(() => { invariant(false, 'always fails'); }).toThrowError(InvariantError); }); it('replaces %(\\d) with indexed variable argument', () => { expect(() => { invariant(false, 'replaces %%%% with %%'); }).toThrowError('replaces %% with %'); expect(() => { invariant(false, 'replaces %%0 with first arg %0', '"first"'); }).toThrowError('replaces %0 with first arg "first"'); }); }); ================================================ FILE: src/utils/index.js ================================================ /// ================================================ FILE: src/utils/mapThruWorker.ts ================================================ import {fromEvent, Observable, Subscription} from 'rxjs'; import {map} from 'rxjs/operators'; export function mapThruWorker(worker: Worker) { return (source: Observable) => { const messages = fromEvent>(worker, 'message').pipe( map(({data}) => data), ); return new Observable((subscriber) => { const subscription = new Subscription(); subscription.add(messages.subscribe(subscriber)); subscription.add( source.subscribe({ next(value) { worker.postMessage(value); }, }), ); return subscription; }); }; } ================================================ FILE: src/utils/math.js ================================================ import {invariant} from './error'; /** * Clamp a value between two extremes. * @param {number} value * @param {number} min * @param {number} max * @return {number} */ export function clamp(value, min, max) { invariant(min <= max, 'clamp(_, min, max): min is less than max'); invariant(max >= min, 'clamp(_, min, max): max is greater than min'); return Math.min(Math.max(value, min), max); } /** * Truncate a number at nth digit. * * trunc(111.111, 2) returns 100 * trunc(111.111, -2) returns 111.11 * @param {number} value * @param {number} digits a whole number digit to truncate at * @return {number} */ export function trunc(value, digits) { const factor = Math.pow(10, digits); return Math.floor(value / factor) * factor; } ================================================ FILE: src/utils/retry.js ================================================ /** * @param {function(): PromiseLike | T} fn * @param {Utils.RetryOptions} options * @return {Promise} * @template T * @memberof Utils * @alias retry */ export async function retry( fn, {timeout = () => Promise.resolve(), times = 10} = {}, ) { try { return await fn(); } catch (err) { if (times > 1) { await timeout(); return retry(fn, {timeout, times: times - 1}); } throw err; } } ================================================ FILE: src/utils/retry.test.js ================================================ import {describe, expect, it, jest} from '@jest/globals'; import {retry} from './retry'; describe('retry', () => { it('returns value if first attempt succeeds', async () => { await expect(retry(() => 'answer 0')).resolves.toBe('answer 0'); }); it('returns value after failed attempts', async () => { await expect( retry( jest .fn() .mockImplementationOnce(() => { throw new Error('reason 0'); }) .mockImplementationOnce(() => 'answer 1'), ), ).resolves.toBe('answer 1'); }); it('returns last error if no attempt succeeds', async () => { let i = 0; const func = jest.fn(() => { throw new Error(`reason ${i++}`); }); await expect(retry(func)).rejects.toMatchObject({message: 'reason 9'}); expect(func).toBeCalledTimes(10); }); it('tries after a timeout', async () => { let i = 0; const func = jest.fn(() => { throw new Error(`reason ${i++}`); }); const timeout = jest.fn(() => Promise.resolve()); const retryPromise = retry(func, {timeout}); expect(timeout).toBeCalledTimes(1); await expect(retryPromise).rejects.toMatchObject({message: 'reason 9'}); expect(timeout).toBeCalledTimes(9); }); it('tries n times', async () => { let i = 0; const func = jest.fn(() => { throw new Error(`reason ${i++}`); }); await expect(retry(func, {times: 2})).rejects.toMatchObject({ message: 'reason 1', }); expect(func).toBeCalledTimes(2); }); }); ================================================ FILE: src/utils/rxChrome.ts ================================================ import {fromEventPattern, Observable} from 'rxjs'; import {chrome} from '../chrome'; import {ChromeDebuggerAPIEvent} from '../devtools/DebuggerAttachEventController'; /** * Create a function that returns an observable that completes when the api * calls back. * @param method `chrome` api method whose last argument is a callback * @param thisArg `this` inside of the method * @returns observable that completes when the method is done */ export function bindChromeCallback

( method: (...args: [...params: P, callback: (...values: R) => void]) => void, thisArg = null, ) { return (...args: P) => new Observable( (subscriber) => { method.call(thisArg, ...args, (...returnValues: R) => { if (chrome.runtime.lastError) { subscriber.error(chrome.runtime.lastError); } else { if (returnValues.length === 0) { subscriber.next(); } else if (returnValues.length === 1) { subscriber.next(returnValues[0]); } else if (returnValues.length > 1) { subscriber.next(returnValues as any); } subscriber.complete(); } }); }, ); } export const fromChromeEvent = any>( onEvent: Chrome.Event, ) => fromEventPattern< Parameters extends infer T1 ? T1 extends [] ? void : T1 extends [infer T2] ? T2 : T1 : never >(onEvent.addListener.bind(onEvent), onEvent.removeListener.bind(onEvent)); ================================================ FILE: src/utils/rxInterop.ts ================================================ import {Observable} from 'rxjs'; import {Observer} from './Observer'; import {Utils} from './Types'; /** * Wrap a `Utils.Observer` instance with `rxjs.Observable` instance. * * This is a workaround so `rxjs.Observable` can use `Utils.Observer` as a source * until said `Observer` instances can be replaced with `Observable` instances. * * @param observer observer to wrap * @returns observable wrapping an observer */ export function toRX(observer: Utils.Observer): Observable { return new Observable((subscriber) => observer.observe( (value) => subscriber.next(value), () => subscriber.complete(), (err) => subscriber.error(err), ), ); } export function toUtilsObserver( observable: Observable, ): Utils.Observer { return new Observer((next, complete, error) => { const subscription = observable.subscribe({next, complete, error}); return () => { subscription.unsubscribe(); }; }); } ================================================ FILE: src/webpack.config.js ================================================ const {resolve} = require('path'); const CopyPlugin = require('copy-webpack-plugin'); module.exports = (env, argv) => ({ context: __dirname, entry: { 'audion-devtools': './devtools/main', 'audion-panel': './panel/main', 'audion-panelWorker': './panel/worker', }, output: { path: resolve(__dirname, '../build/audion'), }, devtool: argv.mode === 'development' ? 'source-map' : false, resolve: { extensions: ['', '.webpack.js', '.web.js', '.ts', '.tsx', '.js'], }, plugins: [ new CopyPlugin({ patterns: [ {from: './extraSettingPage/options.html', to: 'options.html'}, {from: './extraSettingPage/options.js', to: 'options.js'}, ], }), ], module: { rules: [ { test: /\.css$/, use: ['style-loader', {loader: 'css-loader', options: {modules: true}}], }, {test: /\.tsx?$/, loader: 'ts-loader'}, {test: /\.js$/, loader: 'source-map-loader'}, { test: /\.(png|jpe?g|gif|svg|eot|ttf|woff|woff2)$/i, // More information here https://webpack.js.org/guides/asset-modules/ type: 'asset', }, ], }, }); ================================================ FILE: test/.jest-puppeteer.config.json ================================================ { "launch": { "headless": false, "devtools": true, "args": [ "--no-sandbox", "--disable-extensions-except=build/audion", "--load-extension=build/audion" ] } } ================================================ FILE: test/.jest.config.json ================================================ { "preset": "jest-puppeteer", "injectGlobals": false, "transform": { "\\.[jt]sx?$": "babel-jest" }, "testMatch": ["!**/.*", "**/*.js"] } ================================================ FILE: test/README.md ================================================ A directory of integration tests. ================================================ FILE: test/browserLaunch.js ================================================ /* global browser */ import {it} from '@jest/globals'; it('browser launches with extension', async () => { const browserTargets = await browser.targets(); const devtoolsTarget = browserTargets.find( (target) => target.type() === 'browser', ); await devtoolsTarget.browser(); }); ================================================ FILE: test/updateGraphRender.js ================================================ import {resolve} from 'path'; import {expect, it} from '@jest/globals'; import {from, fromEvent, lastValueFrom, takeUntil, toArray} from 'rxjs'; it('updateGraphRender does not error', async () => { const page = globalThis.page; await page.goto( `file://${resolve(__dirname, '../simulations/updateGraphRender.html')}`, ); const pageErrors = await lastValueFrom( fromEvent(page, 'pageerror').pipe( takeUntil(from(page.waitForSelector('.complete'))), toArray(), ), ); expect(pageErrors).toHaveLength(0); }); ================================================ FILE: tsconfig.json ================================================ { "compilerOptions": { "allowJs": true, "target": "es2020", "moduleResolution": "node", "allowSyntheticDefaultImports": true, "esModuleInterop": true }, "include": ["./src/**/*"] }