Repository: bbc/r-audio Branch: master Commit: 742fc268242b Files: 52 Total size: 70.2 KB Directory structure: gitextract_r_8nlj7g/ ├── .eslintrc.json ├── .gitignore ├── .npmignore ├── LICENSE ├── README.md ├── examples/ │ ├── README.md │ ├── assets/ │ │ └── js/ │ │ └── bit-crusher.js │ ├── audio-worklet.js │ ├── buffers-channels.js │ ├── complex-effects-graph.js │ ├── custom-nodes.js │ ├── delay-lines.js │ ├── examples.js │ ├── gain-matrix.js │ ├── index.html │ ├── index.js │ ├── media-element.js │ ├── media-stream.js │ └── mutation.js ├── index.js ├── package.json ├── src/ │ ├── audio-nodes/ │ │ ├── analyser.js │ │ ├── audio-worklet.js │ │ ├── biquad-filter.js │ │ ├── buffer-source.js │ │ ├── channel-merger.js │ │ ├── channel-splitter.js │ │ ├── constant-source.js │ │ ├── convolver.js │ │ ├── delay.js │ │ ├── dynamics-compressor.js │ │ ├── gain.js │ │ ├── iir-filter.js │ │ ├── index.js │ │ ├── media-element-source.js │ │ ├── media-stream-source.js │ │ ├── oscillator.js │ │ ├── panner.js │ │ ├── stereo-panner.js │ │ └── wave-shaper.js │ ├── base/ │ │ ├── audio-context.js │ │ ├── audio-node.js │ │ ├── component.js │ │ ├── connectable-node.js │ │ └── scheduled-source.js │ └── graph/ │ ├── cycle.js │ ├── extensible.js │ ├── pipeline.js │ ├── split-channels.js │ ├── split.js │ └── utils.js └── webpack.config.js ================================================ FILE CONTENTS ================================================ ================================================ FILE: .eslintrc.json ================================================ { "extends": [ "standard", "eslint:recommended", "plugin:react/recommended" ], "env": { "browser": true }, "rules": { "react/prop-types": "off", "react/no-deprecated": "off", "no-unused-vars": "off", "space-before-function-paren": "off", "no-return-assign": "off", "semi": [ "error", "always" ] } } ================================================ FILE: .gitignore ================================================ .DS_Store *.tex evaluation native_test* node_modules dist ================================================ FILE: .npmignore ================================================ examples/ evaluation/ .eslintrc.json .DS_Store ================================================ FILE: LICENSE ================================================ Copyright (c) 2018-present British Broadcasting Corporation All rights reserved (http://www.bbc.co.uk) and r-audio Contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: README.md ================================================ # r-audio A library of React components for building [Web Audio](https://www.w3.org/TR/webaudio/) graphs. ## Objectives 👉 make Web Audio graph code more readable and representative of the graph shape 👉 make it easier to create reusable graphs 👉 make state management easier with React's one-way data bindings and single source of state 👉 represent any arbitrary directed graphs in JSX 👉 support all non-deprecated audio nodes including `AudioWorklet` 👉 allow interspersed HTML components in audio components ## Installation ```bash npm install r-audio ``` ## Usage example Stereo waveshaper + amplitude modulation on a WAV loop ```jsx this.audioContext = ctx}> ``` ## Useful links - [Full usage examples](https://github.com/bbc/r-audio/tree/master/examples) - [API Reference](https://github.com/bbc/r-audio/wiki/API-Reference) ## Development setup ```bash npm install npm run dev ``` The demo page will be served at `localhost:8080`. Use a recent version of Chrome or Firefox for the best experience. Firefox Web Audio developer tool is especially handy (bear in mind Firefox does not support AudioWorklet as of 17 April 2018). ================================================ FILE: examples/README.md ================================================ # r-audio examples The files in this directory constitute a demo web app where you can test the examples and experiment. To launch the demo app, run `npm run dev`. Every example has notes/explanations embedded as HTML along the `r-audio` components. ================================================ FILE: examples/assets/js/bit-crusher.js ================================================ // Copyright (c) 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. /** * A AudioWorklet-based BitCrusher demo from the spec example. * * @class BitCrusher * @extends AudioWorkletProcessor * @see https://webaudio.github.io/web-audio-api/#the-bitcrusher-node */ class BitCrusher extends AudioWorkletProcessor { static get parameterDescriptors() { return [ {name: 'bitDepth', defaultValue: 12, minValue: 1, maxValue: 16}, { name: 'frequencyReduction', defaultValue: 0.5, minValue: 0, maxValue: 1, }, ]; } constructor(options) { super(options); this.phase_ = 0; this.lastSampleValue_ = 0; } process(inputs, outputs, parameters) { let input = inputs[0]; let output = outputs[0]; let bitDepth = parameters.bitDepth; let frequencyReduction = parameters.frequencyReduction; for (let channel = 0; channel < input.length; ++channel) { let inputChannel = input[channel]; let outputChannel = output[channel]; for (let i = 0; i < inputChannel.length; ++i) { let step = Math.pow(0.5, bitDepth[i]); this.phase_ += frequencyReduction[i]; if (this.phase_ >= 1.0) { this.phase_ -= 1.0; this.lastSampleValue_ = step * Math.floor(inputChannel[i] / step + 0.5); } outputChannel[i] = this.lastSampleValue_; } } return true; } } registerProcessor('bit-crusher', BitCrusher); ================================================ FILE: examples/audio-worklet.js ================================================ import React from 'react'; import { render } from 'react-dom'; import { RAnalyser, RAudioContext, RAudioWorklet, RDelay, RGain, RMediaStreamSource, RPipeline, RSplitChannels } from '../index.js'; export default class AudioWorkletExample extends React.Component { constructor() { super(); this.state = { stream: null, ready: false }; } loadWorkletAndStream(ctx) { const streamPromise = navigator.mediaDevices.getUserMedia({ audio: true, video: false }) .then(stream => this.setState({ stream })); const workletPromise = ctx.audioWorklet .addModule('/assets/js/bit-crusher.js'); Promise.all([ streamPromise, workletPromise ]) .then(() => this.setState({ ready: true })); } render() { return ( this.loadWorkletAndStream.bind(this)(ctx)}>

Buffers and Channels

This example demonstrates how to use an AudioWorklet in r-audio. It also shows a RAnalyser in action.

Notice that the graph only renders after both the media stream and the worklet have been initialised.

{ this.state.ready ? ( { proxy => { const data = new Float32Array(proxy.frequencyBinCount); // when this function first runs, there will be no data yet // so wait a bit // in reality one might want to save the `proxy` object and call it independently // for instance, inside a `requestAnimationFrame` call setTimeout(() => { proxy.getFloatFrequencyData(data); console.log(data); // eslint-disable-line no-console }, 3000); } } ) : null }
); } } ================================================ FILE: examples/buffers-channels.js ================================================ import React from 'react'; import { render } from 'react-dom'; import { RAudioContext, RBufferSource, RConstantSource, RConvolver, RDynamicsCompressor, RGain, ROscillator, RPipeline, RSplitChannels, RWaveShaper } from '../index.js'; export default class BuffersAndChannels extends React.Component { constructor() { super(); this.state = { buffer: null }; } componentDidMount() { fetch('/assets/audio/b.wav') .then(res => res.arrayBuffer()) .then(ab => this.audioContext.decodeAudioData(ab)) .then(buffer => this.setState({ buffer })); } makeDistortionCurve(amount) { var k = typeof amount === 'number' ? amount : 50, n_samples = 44100, curve = new Float32Array(n_samples), deg = Math.PI / 180, i = 0, x; for (; i < n_samples; ++i) { x = i * 2 / n_samples - 1; curve[i] = (3 + k) * x * 20 * deg / (Math.PI + k * Math.abs(x)); } return curve; } render() { return ( this.audioContext = ctx}>

Buffers and Channels

This example demonstrates initialising a RBufferSource with a decoded AudioBuffer.

It also shows how to process channels separately.

); } } ================================================ FILE: examples/complex-effects-graph.js ================================================ import React from 'react'; import { render } from 'react-dom'; import { RAudioContext, RBiquadFilter, RGain, ROscillator, RPipeline, RSplit, RStereoPanner } from '../index.js'; const pipeline = (detune, gain, filterFreq, pan) => (

Complex effects graph

This example demonstrates how r-audio handles various graph configurations, including non-connectable nodes in pipelines and deeply nested parallel/serial connections.

It also shows how to create ‘dead-end’ branches using the disconnected attribute.

); export default class ComplexGraph extends React.Component { constructor(props) { super(props); this.state = { detune: 50, gain: 0.4, filterFreq: 600, pan: 0 }; setInterval(() => { this.setState({ detune: Math.random() * 100, gain: Math.random() / 2 + 0.5, filterFreq: Math.random() * 3000 + 200, pan: Math.random() * 2 - 1 }); }, 2000); } render() { return pipeline(this.state.detune, this.state.gain, this.state.filterFreq, this.state.pan); } } ================================================ FILE: examples/custom-nodes.js ================================================ import React from 'react'; import { render } from 'react-dom'; import { RAudioContext, RCycle, RDelay, RExtensible, RGain, RMediaElementSource, RPipeline, RSplit } from '../index.js'; class DelayLine extends RExtensible { renderGraph() { return ( ); } } export default class CustomNodeExample extends React.Component { constructor(props) { super(props); this.audio = new Audio('/assets/audio/clarinet.mp3'); this.audio.autoplay = true; this.audio.loop = true; } render() { return (

Creating custom nodes

This example demonstrates how to create custom r-audio nodes. This can be done by extending RExtensible, which is itself an extension of RPipeline. We define the contents of our custom node by overriding the renderGraph method, which simply returns a bit of JSX, just like React components' render method.

); } } ================================================ FILE: examples/delay-lines.js ================================================ /** **/ import React from 'react'; import { render } from 'react-dom'; import { RAudioContext, RBiquadFilter, RCycle, RDelay, RGain, ROscillator, RPipeline, RSplit, RStereoPanner } from '../index.js'; export default class DelayLineExample extends React.Component { constructor(props) { super(props); this.state = { periodicWave: null, start: 0, stop: 3 }; // a simple waveform can be created with a series of periodically repeating numbers const realComponents = [0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1]; // imaginary components can all be 0 for demo purposes const imagComponents = realComponents.map(() => 0); this.onContextInit = ctx => { this.setState({ periodicWave: ctx.createPeriodicWave( Float32Array.from(realComponents), Float32Array.from(imagComponents), { disableNormalization: true } ) }); // schedule restart of the oscillator after 6 seconds setInterval(() => this.setState({ start: ctx.currentTime, stop: ctx.currentTime + 3 }), 6000); }; } render() { return (

Delay lines & scheduling

This example demonstrates how one can create feedback delay lines using the RCycle component. It also shows how scheduling works.

Make sure to always include a RGain with gain < 1 to avoid infinite feedback.

); } } ================================================ FILE: examples/examples.js ================================================ import React from 'react'; import AudioWorkletExample from './audio-worklet.js'; import DelayLineExample from './delay-lines.js'; import ComplexGraph from './complex-effects-graph.js'; import BuffersAndChannels from './buffers-channels.js'; import MediaElementSourceExample from './media-element.js'; import MediaStreamSourceExample from './media-stream.js'; import Mutation from './mutation.js'; import GainMatrixExample from './gain-matrix.js'; import CustomNodeExample from './custom-nodes.js'; const examples = { 'audio-worklet': , 'delay-lines-scheduling': , 'complex-effects-graph': , 'buffers-channels': , 'media-element': , 'media-stream': , 'mutation': , 'gain-matrix': , 'custom-node': , }; export default examples; ================================================ FILE: examples/gain-matrix.js ================================================ import React from 'react'; import { render } from 'react-dom'; import { RAudioContext, RBufferSource, RExtensible, RGain, RPipeline, RSplit, RSplitChannels } from '../index.js'; class GainMatrix extends RExtensible { constructor(props) { super(props); const gains = (new Array(props.channelCount || 2)) .fill((new Array(props.channelCount || 2)).fill(1)); this.state = { gains }; this.makeRow = this.makeRow.bind(this); } onGainInput(e) { const [x, y] = e.target.name.split('').map(v => parseInt(v)); const gains = this.state.gains.slice().map(arr => arr.slice()); gains[x][y] = e.target.value; this.setState({ gains }); } makeRow(row, rowIndex) { return ( { row.map((cellGain, columnIndex) => (

)) }
); } renderGraph() { return ( { this.state.gains.map(this.makeRow) } ); } } export default class GainMatrixExample extends React.Component { constructor() { super(); this.state = { buffer: null }; } componentDidMount() { // In Safari decodeAudioData doesn't return a promise // so we need to run this as both a callback and a promise handler const loadBuffer = buffer => buffer && this.setState({ buffer }); fetch('/assets/audio/clarinet.mp3') .then(res => res.arrayBuffer()) .then(ab => this.audioContext.decodeAudioData(ab, loadBuffer, null)) .then(loadBuffer); } render() { return ( this.audioContext = ctx}>

Gain Matrix

This example (courtesy of Tom Nixon from BBC R&D) shows how we can create complex multichannel graphs using RSplitChannels and explicit connectToChannel props.

Each channel of the stereo input signal is routed to both channels of the output signal and each branch is processed by a separate RGain. This kind of graph is particularly useful when binauralising audio.

Stereo audio recording by Freesound user debudding (Public Domain).

); } } ================================================ FILE: examples/index.html ================================================ r-audio
================================================ FILE: examples/index.js ================================================ import React from 'react'; import { render } from 'react-dom'; import examples from './examples.js'; const example = location.hash.slice(1); const onExampleChange = e => { location.hash = e.target.value; location.reload(); }; render( (

{ examples[example] || null }
), document.getElementById('app') ); ================================================ FILE: examples/media-element.js ================================================ import React from 'react'; import { render } from 'react-dom'; import { RAudioContext, RCycle, RDelay, RGain, RMediaElementSource, RPipeline } from '../index.js'; export default class MediaElementSourceExample extends React.Component { constructor(props) { super(props); this.audio = new Audio('/assets/audio/clarinet.mp3'); this.audio.autoplay = true; this.audio.loop = true; } render() { return (

Media Element

This example demonstrates plugging a HTML5 Audio element to the r-audio graph using RMediaElementSource. A reference to the audio element could also be obtained via React refs.

); } } ================================================ FILE: examples/media-stream.js ================================================ import React from 'react'; import { render } from 'react-dom'; import { RAudioContext, RCycle, RDelay, RGain, RMediaStreamSource, RPanner, RPipeline } from '../index.js'; export default class MediaStreamSourceExample extends React.Component { constructor(props) { super(props); this.state = { stream: null }; navigator.mediaDevices.getUserMedia({ audio: true, video: false }) .then(stream => this.setState({ stream })); } render() { return this.state.stream ? (

Media Stream

This example demonstrates plugging aMediaStream object (from either a WebRTC peer or the native audio input device) into the r-audio graph using RMediaStreamSource.

) : null; } } ================================================ FILE: examples/mutation.js ================================================ import React from 'react'; import { render } from 'react-dom'; import { RAudioContext, RBiquadFilter, RGain, ROscillator, RPipeline, RSplit, RStereoPanner } from '../index.js'; export default class Mutation extends React.Component { constructor() { super(); this.nodeCache = [ , , ]; this.state = { nodes: this.nodeCache, toggle: true, freq: 440 }; this.change = () => { const changed = this.nodeCache.slice(); changed.splice(1, 1, ); this.setState({ nodes: changed }); }; } render() { return (

Mutation

This example demonstrates how r-audio graphs can be mutated via React state. r-audio takes care of reconfiguring the connections and instantiating new nodes as necessary.
{this.state.nodes}
); } } ================================================ FILE: index.js ================================================ import RAudioContext from './src/base/audio-context.js'; import RPipeline from './src/graph/pipeline.js'; import RSplit from './src/graph/split.js'; import RCycle from './src/graph/cycle.js'; import RExtensible from './src/graph/extensible.js'; import RSplitChannels from './src/graph/split-channels.js'; import { RAnalyser, RAudioWorklet, RBiquadFilter, RBufferSource, RChannelMerger, RChannelSplitter, RConvolver, RConstantSource, RDelay, RDynamicsCompressor, RGain, RIIRFilter, RMediaElementSource, RMediaStreamSource, ROscillator, RPanner, RStereoPanner, RWaveShaper } from './src/audio-nodes/index.js'; export { RAnalyser, RAudioContext, RAudioWorklet, RBiquadFilter, RBufferSource, RChannelMerger, RChannelSplitter, RConvolver, RConstantSource, RCycle, RDelay, RDynamicsCompressor, RGain, RIIRFilter, RMediaElementSource, RMediaStreamSource, ROscillator, RPanner, RPipeline, RSplit, RSplitChannels, RStereoPanner, RWaveShaper, RExtensible }; ================================================ FILE: package.json ================================================ { "name": "r-audio", "version": "1.2.0", "description": "A library of React components for building Web Audio graphs. ", "module": "dist/r-audio.min.js", "main": "dist/r-audio.min.js", "scripts": { "dev": "NODE_ENV=development webpack-dev-server", "build": "webpack", "test": "echo \"Error: no test specified\" && exit 1", "prepublishOnly": "npm run build" }, "repository": { "type": "git", "url": "git+https://github.com/bbc/r-audio.git" }, "keywords": [ "web-audio", "react" ], "author": "jakubfiala", "license": "Apache-2.0", "bugs": { "url": "https://github.com/bbc/r-audio/issues" }, "homepage": "https://github.com/bbc/r-audio#readme", "devDependencies": { "babel-cli": "^6.26.0", "babel-core": "^6.26.3", "babel-loader": "^7.1.5", "babel-preset-env": "^1.7.0", "babel-preset-react": "^6.24.1", "eslint": "^4.19.1", "eslint-config-standard": "^11.0.0", "eslint-loader": "^2.1.0", "eslint-plugin-import": "^2.14.0", "eslint-plugin-node": "^6.0.1", "eslint-plugin-promise": "^3.8.0", "eslint-plugin-react": "^7.11.1", "eslint-plugin-standard": "^3.1.0", "uglifyjs-webpack-plugin": "^1.3.0", "webpack": "^4.20.2", "webpack-cli": "^3.1.0", "webpack-dev-server": "^3.1.14" }, "peerDependencies": { "prop-types": "^15.6.2", "react": "^16.5.0", "react-dom": "^16.5.0" }, "dependencies": { "prop-types": "^15.6.2", "react": "^16.5.0", "react-dom": "^16.5.0" } } ================================================ FILE: src/audio-nodes/analyser.js ================================================ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; import PropTypes from 'prop-types'; export default class RAnalyser extends RConnectableNode { constructor(props) { super(props); this.params = { fftSize: this.props.fftSize, minDecibels: this.props.minDecibels, maxDecibels: this.props.maxDecibels, smoothingTimeConstant: this.props.smoothingTimeConstant }; } componentWillMount() { super.componentWillMount(); if (!this.node) { this.node = this.context.audio.createAnalyser(); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } render() { const analyserProxy = Object.freeze({ getFloatFrequencyData: array => { return this.node.getFloatFrequencyData(array); }, getByteFrequencyData: array => { return this.node.getByteFrequencyData(array); }, getFloatTimeDomainData: array => { return this.node.getFloatTimeDomainData(array); }, getByteTimeDomainData: array => { return this.node.getByteTimeDomainData(array); }, frequencyBinCount: this.node.frequencyBinCount }); this.props.children(analyserProxy); return super.render(); } } RAnalyser.propTypes = { children: PropTypes.func.isRequired }; ================================================ FILE: src/audio-nodes/audio-worklet.js ================================================ /* global AudioWorkletNode */ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; export default class RAudioWorklet extends RConnectableNode { constructor(props) { super(props); this.params = Object.assign({}, this.props); } componentWillMount() { super.componentWillMount(); if (!this.node) { this.node = new AudioWorkletNode(this.context.audio, this.props.worklet); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } } ================================================ FILE: src/audio-nodes/biquad-filter.js ================================================ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; import PropTypes from 'prop-types'; export default class RBiquadFilter extends RConnectableNode { constructor(props) { super(props); this.params = { frequency: props.frequency, detune: props.detune, Q: props.Q, gain: props.gain, type: props.type }; } componentWillMount() { super.componentWillMount(); if (!this.node) { this.node = this.context.audio.createBiquadFilter(); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } render() { if (typeof this.props.children === 'function') { const filterProxy = Object.freeze({ getFrequencyResponse: (frequencyHz, magResponse, phaseResponse) => { return this.node.getFrequencyResponse(frequencyHz, magResponse, phaseResponse); } }); this.props.children(filterProxy); } return super.render(); } } RBiquadFilter.propTypes = { children: PropTypes.func }; ================================================ FILE: src/audio-nodes/buffer-source.js ================================================ import React from 'react'; import RScheduledSource from './../base/scheduled-source.js'; export default class RBufferSource extends RScheduledSource { constructor(props) { super(props); this.params = { buffer: props.buffer || null, detune: props.detune || 0, loop: props.loop || false, loopStart: props.loopStart || 0, loopEnd: props.loopEnd || 0, playbackRate: props.playbackRate || 1 }; this.onEnded = this.onEnded.bind(this); this.instantiateNode = this.instantiateNode.bind(this); } instantiateNode() { if (!this.node) { this.node = this.context.audio.createBufferSource(); this.node.addEventListener('ended', this.onEnded); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } // we need to make a new AudioBufferSourceNode after playback ends onEnded(e) { super.onEnded(e); this.instantiateNode(); this.connectToAllDestinations(this.props.destination, this.node); if (this.props.onEnded) this.props.onEnded(e); } componentWillMount() { super.componentWillMount(); this.instantiateNode(); } componentDidMount() { this.readyToPlay = !!this.props.buffer; super.componentDidMount(); } shouldStartWithPropsChange(prevProps, currentProps) { return prevProps.buffer !== currentProps.buffer; } componentDidUpdate(prevProps, prevState) { this.readyToPlay = !!this.props.buffer; super.componentDidUpdate(prevProps, prevState); } } ================================================ FILE: src/audio-nodes/channel-merger.js ================================================ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; export default class RChannelMerger extends RConnectableNode { constructor(props) { super(props); this.params = { channelCount: 1 }; } componentWillMount() { super.componentWillMount(); if (!this.node) { this.node = this.context.audio.createChannelMerger(this.props.channelCount); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } } ================================================ FILE: src/audio-nodes/channel-splitter.js ================================================ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; export default class RChannelSplitter extends RConnectableNode { constructor(props) { super(props); this.params = { channelCount: 1 }; } // override of RAudioNode.getConnectionArguments // because we need to have some default many-to-many behaviour getConnectionArguments(destination, destinationIndex, toParam) { const connectTarget = toParam ? destination[toParam] : destination; // we use modulo for channel distribution // in case we're connecting to more nodes than we have channels const fromChannel = destinationIndex % this.props.channelCount; // normally we expect to connect to the first channel of each destination // but this can be overriden const toChannel = !isNaN(this.props.connectToChannel) ? this.props.connectToChannel : 0; return [ connectTarget ].concat(toParam ? [] : [ fromChannel, toChannel ]); } componentWillMount() { super.componentWillMount(); if (!this.node) { this.node = this.context.audio.createChannelSplitter(this.props.channelCount); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } } ================================================ FILE: src/audio-nodes/constant-source.js ================================================ import React from 'react'; import RScheduledSource from './../base/scheduled-source.js'; export default class RConstantSource extends RScheduledSource { constructor(props) { super(props); this.params = { offset: props.offset }; this.instantiateNode = this.instantiateNode.bind(this); this.readyToPlay = true; this.onEnded = this.onEnded.bind(this); } onEnded(e) { super.onEnded(e); if (this.props.onEnded) this.props.onEnded(e); } instantiateNode() { if (!this.node || this.playbackScheduled === false) { this.node = this.context.audio.createConstantSource(); this.node.addEventListener('ended', this.onEnded); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } componentWillMount() { super.componentWillMount(); this.instantiateNode(); } } ================================================ FILE: src/audio-nodes/convolver.js ================================================ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; export default class RConvolver extends RConnectableNode { constructor(props) { super(props); this.params = { buffer: props.buffer || null, normalize: props.normalize || true }; } componentWillMount() { super.componentWillMount(); if (!this.node) { this.node = this.context.audio.createConvolver(); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } } ================================================ FILE: src/audio-nodes/delay.js ================================================ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; export default class RDelay extends RConnectableNode { constructor(props) { super(props); this.params = { delayTime: props.delayTime }; } componentWillMount() { super.componentWillMount(); if (!this.node) { this.node = this.context.audio.createDelay(); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } } ================================================ FILE: src/audio-nodes/dynamics-compressor.js ================================================ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; export default class RDynamicsCompressor extends RConnectableNode { constructor(props) { super(props); this.params = { threshold: props.threshold || -24, knee: props.knee || 30, ratio: props.ratio || 12, attack: props.attack || 0.003, release: props.release || 0.25 }; } componentWillMount() { super.componentWillMount(); if (!this.node) { this.node = this.context.audio.createDynamicsCompressor(); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } } ================================================ FILE: src/audio-nodes/gain.js ================================================ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; export default class RGain extends RConnectableNode { constructor(props) { super(props); this.params = { gain: this.props.gain }; } componentWillMount() { super.componentWillMount(); if (!this.node) { this.node = this.context.audio.createGain(); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } } ================================================ FILE: src/audio-nodes/iir-filter.js ================================================ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; import PropTypes from 'prop-types'; export default class RIIRFilter extends RConnectableNode { constructor(props) { super(props); this.params = {}; } componentWillMount() { super.componentWillMount(); if (!this.node) { this.node = this.context.audio.createIIRFilter({ feedback: this.props.feedback, feedforward: this.props.feedforward }); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } render() { if (typeof this.props.children === 'function') { const filterProxy = Object.freeze({ getFrequencyResponse: (frequencyHz, magResponse, phaseResponse) => { return this.node.getFrequencyResponse(frequencyHz, magResponse, phaseResponse); } }); this.props.children(filterProxy); } return super.render(); } } RIIRFilter.propTypes = { children: PropTypes.func }; ================================================ FILE: src/audio-nodes/index.js ================================================ import RAnalyser from './analyser.js'; import RAudioWorklet from './audio-worklet.js'; import RBiquadFilter from './biquad-filter.js'; import RBufferSource from './buffer-source.js'; import RChannelMerger from './channel-merger.js'; import RChannelSplitter from './channel-splitter.js'; import RConstantSource from './constant-source.js'; import RConvolver from './convolver.js'; import RDelay from './delay.js'; import RDynamicsCompressor from './dynamics-compressor.js'; import RGain from './gain.js'; import RIIRFilter from './iir-filter.js'; import RMediaElementSource from './media-element-source.js'; import RMediaStreamSource from './media-stream-source.js'; import ROscillator from './oscillator.js'; import RPanner from './panner.js'; import RStereoPanner from './stereo-panner.js'; import RWaveShaper from './wave-shaper.js'; export { RAnalyser, RAudioWorklet, RBiquadFilter, RBufferSource, RChannelMerger, RChannelSplitter, RConstantSource, RConvolver, RDelay, RDynamicsCompressor, RGain, RIIRFilter, RMediaElementSource, RMediaStreamSource, ROscillator, RStereoPanner, RPanner, RWaveShaper }; ================================================ FILE: src/audio-nodes/media-element-source.js ================================================ import React from 'react'; import RAudioNode from './../base/audio-node.js'; export default class RMediaElementSource extends RAudioNode { constructor(props) { super(props); this.params = {}; this.createNode = this.createNode.bind(this); } createNode() { this.node = this.context.audio.createMediaElementSource(this.props.element); this.context.nodes.set(this.props.identifier, this.node); } componentWillMount() { super.componentWillMount(); if (!this.node) { this.createNode(); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } componentWillReceiveProps(nextProps) { if (nextProps.element !== this.props.element) this.createNode(); } } ================================================ FILE: src/audio-nodes/media-stream-source.js ================================================ import React from 'react'; import RAudioNode from './../base/audio-node.js'; export default class RMediaStreamSource extends RAudioNode { constructor(props) { super(props); this.params = { buffer: props.buffer || null }; this.createNode = this.createNode.bind(this); } createNode() { this.node = this.context.audio.createMediaStreamSource(this.props.stream); this.context.nodes.set(this.props.identifier, this.node); } componentWillMount() { super.componentWillMount(); if (!this.node) { this.createNode(); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } componentWillReceiveProps(nextProps) { if (nextProps.stream !== this.props.stream) this.createNode(); } } ================================================ FILE: src/audio-nodes/oscillator.js ================================================ import React from 'react'; import RScheduledSource from './../base/scheduled-source.js'; export default class ROscillator extends RScheduledSource { constructor(props) { super(props); this.params = { frequency: props.frequency, detune: props.detune, type: props.type, periodicWave: props.periodicWave }; this.instantiateNode = this.instantiateNode.bind(this); this.readyToPlay = true; this.onEnded = this.onEnded.bind(this); } onEnded(e) { super.onEnded(e); if (this.props.onEnded) this.props.onEnded(e); } instantiateNode() { if (!this.node || this.playbackScheduled === false) { this.node = this.context.audio.createOscillator(); this.node.addEventListener('ended', this.onEnded); if (this.props.periodicWave) { this.node.setPeriodicWave(this.props.periodicWave); } this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } componentWillMount() { super.componentWillMount(); this.instantiateNode(); } componentWillReceiveProps(nextProps) { super.componentWillReceiveProps(nextProps); if (this.props.periodicWave !== nextProps.periodicWave) { this.node.setPeriodicWave(nextProps.periodicWave); } } } ================================================ FILE: src/audio-nodes/panner.js ================================================ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; export default class RPanner extends RConnectableNode { constructor(props) { super(props); this.params = { panningModel: this.props.panningModel, distanceModel: this.props.distanceModel, refDistance: this.props.refDistance, maxDistance: this.props.maxDistance, rolloffFactor: this.props.rolloffFactor, coneInnerAngle: this.props.coneInnerAngle, coneOuterAngle: this.props.coneOuterAngle, coneOuterGain: this.props.coneOuterGain, positionX: this.props.positionX, positionY: this.props.positionY, positionZ: this.props.positionZ, orientationX: this.props.orientationX, orientationY: this.props.orientationY, orientationZ: this.props.orientationZ }; } componentWillMount() { super.componentWillMount(); if (!this.node) { this.node = this.context.audio.createPanner(); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } } ================================================ FILE: src/audio-nodes/stereo-panner.js ================================================ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; export default class RStereoPanner extends RConnectableNode { constructor(props) { super(props); this.params = { pan: props.pan }; } componentWillMount() { super.componentWillMount(); const props = this.props; if (!this.node) { this.node = this.context.audio.createStereoPanner(); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } } ================================================ FILE: src/audio-nodes/wave-shaper.js ================================================ import React from 'react'; import RConnectableNode from './../base/connectable-node.js'; export default class RWaveShaper extends RConnectableNode { constructor(props) { super(props); this.params = { curve: props.curve || null, oversample: props.oversample || 'none' }; } componentWillMount() { super.componentWillMount(); if (!this.node) { this.node = this.context.audio.createWaveShaper(); this.context.nodes.set(this.props.identifier, this.node); } this.updateParams = this.updateParams.bind(this); this.updateParams(this.props); } } ================================================ FILE: src/base/audio-context.js ================================================ import React from 'react'; import PropTypes from 'prop-types'; import RComponent from './component.js'; window.AudioContext = window.AudioContext || window.webkitAudioContext || null; if (!window.AudioContext) { throw new Error( 'Could not find AudioContext. This may be because your browser does not support Web Audio.'); } /** * Contains and manages the Web Audio graph. * All immediate children connect directly to its Destination. * * @class RAudioContext (name) */ export default class RAudioContext extends React.Component { constructor(props) { super(props); // repository of all nodes in the graph // keyed by Symbols this.nodes = new Map(); this._context = new AudioContext(props.options); if (this.props.onInit) this.props.onInit(this._context); if (this.props.debug) { window.RAudioNodeMap = this.nodes; } } componentWillMount() { this._context.resume(); } getChildContext() { return { audio: this._context, debug: this.props.debug, nodes: this.nodes }; } componentWillUnmount() { this._context.suspend(); } render() { const children = React.Children .toArray(this.props.children) .map(child => { if (!RComponent.isPrototypeOf(child.type)) return child; const audioContextProps = { destination: () => this._context.destination, identifier: Symbol(child.type.name) }; return React.cloneElement(child, audioContextProps); }); if (this.props.debug) { return (
RAudioContext
    {children}
); } return children || []; } } RAudioContext.childContextTypes = { audio: PropTypes.instanceOf(AudioContext), nodes: PropTypes.instanceOf(Map), debug: PropTypes.bool }; ================================================ FILE: src/base/audio-node.js ================================================ import React from 'react'; import RComponent from './component.js'; /** * Any RComponent that corresponds to an AudioNode is a RAudioNode * * @class RAudioNode (name) */ export default class RAudioNode extends RComponent { constructor(props) { super(props); // internal AudioNode instance this.node = null; // dictionary of AudioNode parameters (either AudioParams or object properties) this.params = {}; this.connectToAllDestinations = this.connectToAllDestinations.bind(this); this.setParam = this.setParam.bind(this); } // recursively builds up a list of nodes pointed to by IDs or lists of IDs flattenPointers(destinations, flattened = []) { for (let element of destinations) { if (Array.isArray(element)) { this.flattenPointers(element, flattened); } else if (typeof element === 'symbol') { flattened.push(this.context.nodes.get(element)); } else { flattened.push(element); } } return flattened; } /** * Generates arguments for AudioNode.connect * Useful because we can, for instance, override the channel assignment logic for ChannelSplitter etc. * * @param {function} destination The AudioNode to connect to * @param {number} destinationIndex The index of the AudioNode among other destinations * @param {string|null} toParam The name of the AudioParam to connect to (or undefined) * @param {number} fromChannel The index of the chosen output channel of this node (default is 0) * @param {number} toChannel The index of the chosen input channel of the destination node (default is 0) */ getConnectionArguments(destination, destinationIndex, toParam, fromChannel = 0, toChannel = 0) { const connectTarget = toParam ? destination[toParam] : destination; return [ connectTarget ].concat(toParam ? [] : [ fromChannel, toChannel ]); } /** * Connects the given AudioNode to this RAudioNode's destinations. * Abstracts away this operation as it's used in multiple lifecycle stages. * * @param {function} destinationFunction The function that will return the destinations * @param {AudioNode} webAudioNode The web audio node */ connectToAllDestinations(destinationFunction, webAudioNode) { webAudioNode.disconnect(); if (destinationFunction && !this.props.disconnected) { let destinations = destinationFunction(); if (!(destinations instanceof Array)) destinations = [ destinations ]; this.flattenPointers(destinations).forEach((destination, di) => { if (destination) { const connectArgs = this.getConnectionArguments( destination, di, this.props.connectToParam, this.props.connectFromChannel, this.props.connectToChannel); webAudioNode.connect(...connectArgs); } }); } } componentWillMount() { super.componentWillMount(); } componentWillReceiveProps(nextProps) { this.updateParams(nextProps); } componentWillUpdate(nextProps, nextState) { // update the node's record in the node registry if (this.props.identifier !== nextProps.identifier) { this.context.nodes.delete(this.props.identifier); this.context.nodes.set(nextProps.identifier, this.node); } } // we use DidUpdate to connect to new destinations, // because WillUpdate might get called before the new destinations are ready componentDidUpdate(prevProps, prevState) { if (prevProps.destination !== this.props.destination) { this.connectToAllDestinations(this.props.destination, this.node); } } componentWillUnmount() { this.node.disconnect(); this.context.nodes.delete(this.props.identifier); } resolveTransitionProps(props, propName) { const transitionTime = typeof props.transitionTime === 'number' ? props.transitionTime : props.transitionTime ? props.transitionTime[propName] : null; const transitionCurve = typeof props.transitionCurve === 'string' ? props.transitionCurve : props.transitionCurve ? props.transitionCurve[propName] : null; return [ transitionTime, transitionCurve ]; } // updates only Web Audio-related parameters // (both AudioParams and regular properties) updateParams(props) { if (!this.params) return; for (let p in this.params) { if (!(p in props)) continue; const [ transitionTime, transitionCurve ] = this.resolveTransitionProps(props, p); if (this.node[p] instanceof AudioParam) { this.setParam(this.node[p], props[p], transitionTime, transitionCurve); } else if (this.node.parameters && this.node.parameters.has(p)) { let param = this.node.parameters.get(p); this.setParam(param, props[p], transitionTime, transitionCurve); } else if (p in this.node) { // some browsers (e.g. Chrome) will try to set channelCount and throw an error // since we can't use Object.getOwnPropertyDescriptor on the AudioNodes // we simply wrap the action in a try-catch try { if (this.node[p] !== props[p]) this.node[p] = props[p]; } catch(e) { console.warn(`Tried setting ${p} on node`, this.node); // eslint-disable-line no-console } } } } setParam(param, value, transitionTime, transitionCurve) { if (transitionCurve) { const fn = `${transitionCurve}RampToValueAtTime`; // `exponentialRamp` doesn't seem to work on Firefox, so fall back to linear try { param[fn](value, transitionTime); } catch (e) { param['linearRampToValueAtTime'](value, transitionTime); } } else { param.setValueAtTime(value, transitionTime || this.context.audio.currentTime); } } componentDidMount() { this.connectToAllDestinations(this.props.destination, this.node); } render() { if (this.context.debug) { return (
  • {this.constructor.name} {this.props.name || ''} {this.props.disconnected && 'disconnected' || ''}
    { this.props.connectToParam ? connects to {this.props.connectToParam} : null }
      { Object.keys(this.params).map((p, pi) => { if (!this.props[p] && this.props[p] !== 0) return null; let param = this.props[p]; if (typeof this.props[p] === 'boolean') param = this.props[p].toString(); if (!(['number', 'string', 'boolean'].includes(typeof this.props[p]))) { param = param.constructor.name; } return
    • {p}: {param}
    • ; }) }
  • ); } return null; } } ================================================ FILE: src/base/component.js ================================================ import React from 'react'; import PropTypes from 'prop-types'; /** * Anything that requires an AudioContext is a RComponent * * @class RComponent (name) */ export default class RComponent extends React.Component { componentWillMount() { if (!this.context.audio) throw new ReferenceError('RComponent needs to be a child of a RAudioContext'); } render() { return null; } } RComponent.contextTypes = { audio: PropTypes.instanceOf(window.AudioContext || window.webkitAudioContext), nodes: PropTypes.instanceOf(Map), debug: PropTypes.bool }; ================================================ FILE: src/base/connectable-node.js ================================================ import React from 'react'; import RAudioNode from './audio-node.js'; /** * Any RAudioNode that can be connected to is a RConnectableNode * * @class RConnectableNode (name) */ export default class RConnectableNode extends RAudioNode { componentWillUnmount() { super.componentWillUnmount(); if (this.props.parent) { const parents = this.props.parent(); this.flattenPointers(parents).forEach((parentIdentifier, parentIndex) => { const parent = this.context.nodes.get(parentIdentifier); if (!parent) return; try { parent.disconnect(this.node); } catch (e) { console.warn(e); // eslint-disable-line no-console } }); } } } ================================================ FILE: src/base/scheduled-source.js ================================================ import React from 'react'; import RAudioNode from './audio-node.js'; /** * Any RAudioNode that can be scheduled to start/end is a RScheduledSource * * @class RScheduledSource (name) */ export default class RScheduledSource extends RAudioNode { constructor(props) { super(props); this.readyToPlay = false; this.playbackScheduled = false; this.onEnded = this.onEnded.bind(this); this.schedule = this.schedule.bind(this); } onEnded() { this.playbackScheduled = false; // Web Audio will remove the node from the graph after stopping, so reinstantiate it this.instantiateNode(); this.connectToAllDestinations(this.props.destination, this.node); } schedule() { const shouldScheduleStart = typeof this.props.start === 'number' && this.readyToPlay && !this.playbackScheduled && (typeof this.props.stop !== 'number' || this.props.start < this.props.stop); const shouldScheduleStop = typeof this.props.stop === 'number'; if (shouldScheduleStart) { this.node.start(this.props.start || 0, this.props.offset || 0, this.props.duration); this.playbackScheduled = true; } if (shouldScheduleStop) { this.node.stop(this.props.stop); } } /** Overriding this method enables sources to specify special conditions when playback should be rescheduled. e.g. BufferSource should be rescheduled if a new buffer is provided **/ shouldStartWithPropsChange() { return false; } componentDidMount() { super.componentDidMount(); this.schedule(); } componentDidUpdate(prevProps, prevState) { super.componentDidUpdate(prevProps, prevState); if (prevProps.start !== this.props.start || prevProps.stop !== this.props.stop || this.shouldStartWithPropsChange(prevProps, this.props)) { this.schedule(); } } } ================================================ FILE: src/graph/cycle.js ================================================ import React from 'react'; import RAudioNode from './../base/audio-node.js'; import RComponent from './../base/component.js'; import { isConnectable } from './utils.js'; /** * A RComponent which connects each child to itself as well as the destination * * @class RCycle (name) */ export default class RCycle extends RComponent { constructor(props) { super(props); this.inputs = []; } componentWillMount() { super.componentWillMount(); this.context.nodes.set(this.props.identifier, this.inputs); } componentWillUpdate(nextProps, nextState) { // update the node's record in the node registry if (this.props.identifier !== nextProps.identifier) { this.context.nodes.delete(this.props.identifier); this.context.nodes.set(nextProps.identifier, this.inputs); } } render() { while (this.inputs.length > 0) this.inputs.pop(); const children = React.Children .toArray(this.props.children) .filter(c => c !== null && c !== []) .map(c => ({ component: c, identifier: Symbol(c.type.name + Date.now()) })) .map((childTuple, childIndex, childrenArray) => { const type = childTuple.component.type; if (RComponent.isPrototypeOf(childTuple.component.type) && isConnectable(childTuple.component)) { this.inputs.push(childTuple.identifier); } const pipelineProps = { destination: () => { let destination = this.props.destination(); const ownNode = this.context.nodes.get(childTuple.identifier); if (!(destination instanceof Array)) destination = [ destination ]; return destination.concat([ ownNode ]); }, identifier: childTuple.identifier }; return React.cloneElement(childTuple.component, pipelineProps); }); if (this.inputs.length === 0) { const destination = this.props.destination(); if (destination instanceof Array) this.inputs.push(...destination); else this.inputs.push(destination); } if (this.context.debug) { return (
  • RCycle
      {children}
  • ); } return children; } } ================================================ FILE: src/graph/extensible.js ================================================ import React from 'react'; import RPipeline from './pipeline.js'; /** * A subclass of RPipeline which can be extended to create custom r-audio nodes * To create a custom node, subclass RExtensible and override the `renderGraph` method, * returning the r-audio graph of your custom node * * @class RExtensible (name) **/ export default class RExtensible extends RPipeline { renderGraph() { return null; } addKeys(child, childIndex) { return React.cloneElement(child, { key: childIndex }); } render() { this.customChildren = [ this.renderGraph() ].map(this.addKeys); return super.render(); } } ================================================ FILE: src/graph/pipeline.js ================================================ import React from 'react'; import RComponent from './../base/component.js'; import { isConnectable } from './utils.js'; /** * A RComponent which connects its children in a series, creating inbound branches if necessary. * * @class RPipeline (name) */ export default class RPipeline extends RComponent { constructor(props) { super(props); this.resolveDestination = this.resolveDestination.bind(this); this.resolvePointer = this.resolvePointer.bind(this); this.resolveParent = this.resolveParent.bind(this); } /** * Ensures whatever value we get from the `nodes` Map, we resolve it to where the actual AudioNodes are. * * @param {AudioNode|Array} pointer The value found in the `nodes` Map * @return {AudioNode|Array} the actual destination(s) */ resolvePointer(pointer) { // we might find that the pointer actually leads us to a list of other pointers (Symbols) // this happens, for instance, if the next child is a RSplit let resolved = pointer; if (pointer instanceof Array) { // it could also happen that the pointer leads to an AudioNode reference (esp. if it's an AudioContextDestination) resolved = pointer.map(identifier => this.context.nodes.get(identifier) || identifier); } return resolved; } /** * Tries to provide a destination getter function for the given index in the children array. * It optimizes for finding the nearest node in the graph which the given child can connect to. * * @param {number} currentIndex The current child's index * @param {Array} childrenArray The array of all children in the pipeline * @return {Function} a function which returns the closest possible destination node */ resolveDestination(currentIndex, childrenArray) { let destinationFunction = null; if (currentIndex === childrenArray.length - 1) { destinationFunction = () => this.props.destination(); } else if (!isConnectable(childrenArray[currentIndex + 1].component)) { let childIndex = currentIndex + 1; while (childrenArray[++childIndex]) { if (isConnectable(childrenArray[childIndex].component)) break; } if (childIndex === currentIndex + 1 || !childrenArray[childIndex]) { destinationFunction = () => this.props.destination(); } else { destinationFunction = () => this.resolvePointer(this.context.nodes.get(childrenArray[childIndex].identifier)); } } else { destinationFunction = () => this.resolvePointer(this.context.nodes.get(childrenArray[currentIndex + 1].identifier)); } return destinationFunction; } resolveParent(currentIndex, childrenArray) { if (currentIndex === 0) { return this.getParent || null; } else { const children = childrenArray.slice(0, currentIndex); const parents = []; let child = children.pop(); if (RComponent.isPrototypeOf(child.component.type)) { // the first preceding RComponent is always a valid parent parents.push(child.identifier); // if it's a connectable type it's also the only parent if (isConnectable(child.component)) return () => parents; // if not, continue child = children.pop(); } // look for all preceding RComponents until we hit one which is connectable while (child) { if (isConnectable(child.component) || !RComponent.isPrototypeOf(child.component.type)) break; parents.push(child.identifier); child = children.pop(); } return () => parents; } } /** * Returns an Object containing the given Component and its unique identifier * * @param {Component} component The React component * @return {Object} the identified child object */ createIdentifiedChild(component) { const identifiedChild = { component, identifier: Symbol(component.type.name + Date.now()) }; if (!this.foundFirstConnectableType && isConnectable(component)) { identifiedChild.identifier = this.props.identifier; this.foundFirstConnectableType = true; } return identifiedChild; } /** * Returns a clone of the child Component with parent, destination and identifier props added to it * * @param {Object} identifiedChild The identified child object * @param {Number} childIndex The child index * @param {Array} childrenArray The children array * @return {Component} A clone of the child Component */ createEmbeddableChild(identifiedChild, childIndex, childrenArray) { if (!RComponent.isPrototypeOf(identifiedChild.component.type)) return identifiedChild.component; const getDestination = this.resolveDestination(childIndex, childrenArray); const getParent = this.resolveParent(childIndex, childrenArray); const pipelineProps = { destination: getDestination, parent: getParent, identifier: identifiedChild.identifier }; if (childIndex === childrenArray.length - 1) { Object.assign(pipelineProps, { connectFromChannel: this.props.connectFromChannel || 0, connectToChannel: this.props.connectToChannel || 0 }); } return React.cloneElement(identifiedChild.component, pipelineProps); } render() { this.foundFirstConnectableType = false; const originalChildren = React.Children.toArray(this.props.children); const children = (this.customChildren || originalChildren) .filter(c => c !== null && c !== []) // double mapping because the second functor needs to peek ahead on the children array .map(this.createIdentifiedChild, this) .map(this.createEmbeddableChild, this); if (this.context.debug) { return (
  • {this.constructor.name}
      {children}
  • ); } return children; } } ================================================ FILE: src/graph/split-channels.js ================================================ import React from 'react'; import RComponent from './../base/component.js'; import RSplit from './split.js'; import RPipeline from './pipeline.js'; import RChannelSplitter from '../audio-nodes/channel-splitter.js'; import RChannelMerger from '../audio-nodes/channel-merger.js'; // This is a helper RComponent which splits the input between its children // connected in parallel, one channel per branch. // It's better to use this instead of RChannelSplitter and RChannelMerger // as it takes care of the channel connections automatically export default class RSplitChannels extends RComponent { render() { const children = React.Children .toArray(this.props.children) .slice(0, this.props.channelCount) .map((element, ci) => { const channelProps = { connectFromChannel: 0, connectToChannel: element.props.connectToChannel || ci }; return React.cloneElement(element, channelProps); }); return ( {children} ); } } ================================================ FILE: src/graph/split.js ================================================ import React from 'react'; import RAudioNode from './../base/audio-node.js'; import RComponent from './../base/component.js'; import { isConnectable, propertyFromChildOrParent } from './utils.js'; /** * A RComponent which connects its children in parallel, creating inbound branches if necessary. * * @class RSplit (name) */ export default class RSplit extends RComponent { constructor(props) { super(props); this.inputs = []; } componentWillMount() { super.componentWillMount(); this.context.nodes.set(this.props.identifier, this.inputs); } componentWillUpdate(nextProps, nextState) { // update the node's record in the node registry if (this.props.identifier !== nextProps.identifier) { this.context.nodes.delete(this.props.identifier); this.context.nodes.set(nextProps.identifier, this.inputs); } } render() { while (this.inputs.length) this.inputs.pop(); const children = React.Children .toArray(this.props.children) .filter(c => c !== null && c !== []) .map(c => ({ component: c, identifier: Symbol(c.type.name + Date.now()) })) .map((childTuple, childIndex, childrenArray) => { if (!RComponent.isPrototypeOf(childTuple.component.type)) return childTuple.component; const type = childTuple.component.type; if (RComponent.isPrototypeOf(type) && isConnectable(childTuple.component)) { this.inputs.push(childTuple.identifier); } // this rather strange and terse piece of code // figures out the channel connections of the RSplit child // where children can override the parent (RSplit) settings // this is useful for RSplitChannels const [ connectFromChannel, connectToChannel ] = [ 'connectFromChannel', 'connectToChannel' ] .map(propertyFromChildOrParent(childTuple.component, this)); const splitProps = { destination: this.props.destination, identifier: childTuple.identifier, connectFromChannel, connectToChannel }; return React.cloneElement(childTuple.component, splitProps); }); if (!this.inputs.length) { const destination = this.props.destination(); if (destination instanceof Array) this.inputs.push(...destination); else this.inputs.push(destination); } if (this.context.debug) { return (
  • RSplit
      {children}
  • ); } return children; } } ================================================ FILE: src/graph/utils.js ================================================ import RConnectableNode from './../base/connectable-node.js'; const connectableComponents = [ 'RSplit', 'RCycle', 'RSplitChannels', 'RPipeline' ]; const isConnectable = component => { return RConnectableNode.isPrototypeOf(component.type) || connectableComponents.includes(component.type.name) || Object.getPrototypeOf(component.type).name === 'RExtensible'; }; // this rather strange function is used when we want to get a numeric value // from either a child or its parent's props // but the child takes precedence (for overriding) const propertyFromChildOrParent = (child, parent) => property => { return !isNaN(child.props[property]) ? child.props[property] : parent.props[property]; }; export { isConnectable, propertyFromChildOrParent }; ================================================ FILE: webpack.config.js ================================================ const webpack = require('webpack'); const path = require('path'); const UglifyJsPlugin = require('uglifyjs-webpack-plugin'); const Config = { output: { path: path.resolve(__dirname, 'dist'), filename: 'r-audio.min.js', }, entry: './examples/index.js', mode: process.env['NODE_ENV'] || 'production', devtool: process.env['NODE_ENV'] === 'development' ? 'source-map' : false, resolve: { modules: [ 'node_modules' ] }, module: { rules: [ { test: /\.js$|\.jsx$/, exclude: /node_modules/, use: ['babel-loader?presets[]=react,env', 'eslint-loader?fix=true&emitWarning=true'] } ] }, devServer: { contentBase: path.join(__dirname, 'examples'), compress: true, port: 8080 } }; if (!(process.env['NODE_ENV'] === 'development')) { Config.output.library = 'r-audio'; Config.output.libraryTarget = 'umd'; Config.entry = './index.js'; Config.optimization = { minimizer: [ new UglifyJsPlugin() ] }; Config.externals = ['react', 'react-dom', 'prop-types']; } module.exports = Config;