Repository: bbc/r-audio
Branch: master
Commit: 742fc268242b
Files: 52
Total size: 70.2 KB
Directory structure:
gitextract_r_8nlj7g/
├── .eslintrc.json
├── .gitignore
├── .npmignore
├── LICENSE
├── README.md
├── examples/
│ ├── README.md
│ ├── assets/
│ │ └── js/
│ │ └── bit-crusher.js
│ ├── audio-worklet.js
│ ├── buffers-channels.js
│ ├── complex-effects-graph.js
│ ├── custom-nodes.js
│ ├── delay-lines.js
│ ├── examples.js
│ ├── gain-matrix.js
│ ├── index.html
│ ├── index.js
│ ├── media-element.js
│ ├── media-stream.js
│ └── mutation.js
├── index.js
├── package.json
├── src/
│ ├── audio-nodes/
│ │ ├── analyser.js
│ │ ├── audio-worklet.js
│ │ ├── biquad-filter.js
│ │ ├── buffer-source.js
│ │ ├── channel-merger.js
│ │ ├── channel-splitter.js
│ │ ├── constant-source.js
│ │ ├── convolver.js
│ │ ├── delay.js
│ │ ├── dynamics-compressor.js
│ │ ├── gain.js
│ │ ├── iir-filter.js
│ │ ├── index.js
│ │ ├── media-element-source.js
│ │ ├── media-stream-source.js
│ │ ├── oscillator.js
│ │ ├── panner.js
│ │ ├── stereo-panner.js
│ │ └── wave-shaper.js
│ ├── base/
│ │ ├── audio-context.js
│ │ ├── audio-node.js
│ │ ├── component.js
│ │ ├── connectable-node.js
│ │ └── scheduled-source.js
│ └── graph/
│ ├── cycle.js
│ ├── extensible.js
│ ├── pipeline.js
│ ├── split-channels.js
│ ├── split.js
│ └── utils.js
└── webpack.config.js
================================================
FILE CONTENTS
================================================
================================================
FILE: .eslintrc.json
================================================
{
"extends": [
"standard",
"eslint:recommended",
"plugin:react/recommended"
],
"env": {
"browser": true
},
"rules": {
"react/prop-types": "off",
"react/no-deprecated": "off",
"no-unused-vars": "off",
"space-before-function-paren": "off",
"no-return-assign": "off",
"semi": [
"error",
"always"
]
}
}
================================================
FILE: .gitignore
================================================
.DS_Store
*.tex
evaluation
native_test*
node_modules
dist
================================================
FILE: .npmignore
================================================
examples/
evaluation/
.eslintrc.json
.DS_Store
================================================
FILE: LICENSE
================================================
Copyright (c) 2018-present British Broadcasting Corporation
All rights reserved
(http://www.bbc.co.uk) and r-audio Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================
FILE: README.md
================================================
# r-audio
A library of React components for building [Web Audio](https://www.w3.org/TR/webaudio/) graphs.
## Objectives
👉 make Web Audio graph code more readable and representative of the graph shape
👉 make it easier to create reusable graphs
👉 make state management easier with React's one-way data bindings and single source of state
👉 represent any arbitrary directed graphs in JSX
👉 support all non-deprecated audio nodes including `AudioWorklet`
👉 allow interspersed HTML components in audio components
## Installation
```bash
npm install r-audio
```
## Usage example
Stereo waveshaper + amplitude modulation on a WAV loop
```jsx
<RAudioContext debug={true} onInit={ctx => this.audioContext = ctx}>
<RPipeline>
<RBufferSource buffer={this.state.buffer} loop/>
<RSplitChannels channelCount={2}>
<RPipeline>
<RWaveShaper curve={this.makeDistortionCurve(200)} />
<RConvolver buffer={this.state.buffer} />
<RDynamicsCompressor threshold={-50} knee={40}/>
<RGain gain={.5} />
</RPipeline>
<RPipeline>
<ROscillator frequency={1} type="sine" detune={0} connectToParam="gain" />
<RGain gain={1} />
</RPipeline>
</RSplitChannels>
</RPipeline>
</RAudioContext>
```
## Useful links
- [Full usage examples](https://github.com/bbc/r-audio/tree/master/examples)
- [API Reference](https://github.com/bbc/r-audio/wiki/API-Reference)
## Development setup
```bash
npm install
npm run dev
```
The demo page will be served at `localhost:8080`. Use a recent version of Chrome or Firefox for the best experience.
Firefox Web Audio developer tool is especially handy (bear in mind Firefox does not support AudioWorklet as of 17 April 2018).
================================================
FILE: examples/README.md
================================================
# r-audio examples
The files in this directory constitute a demo web app where you can test the examples and experiment. To launch the demo app, run `npm run dev`.
Every example has notes/explanations embedded as HTML along the `r-audio` components.
================================================
FILE: examples/assets/js/bit-crusher.js
================================================
// Copyright (c) 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* A AudioWorklet-based BitCrusher demo from the spec example.
*
* @class BitCrusher
* @extends AudioWorkletProcessor
* @see https://webaudio.github.io/web-audio-api/#the-bitcrusher-node
*/
class BitCrusher extends AudioWorkletProcessor {
static get parameterDescriptors() {
return [
{name: 'bitDepth', defaultValue: 12, minValue: 1, maxValue: 16}, {
name: 'frequencyReduction',
defaultValue: 0.5,
minValue: 0,
maxValue: 1,
},
];
}
constructor(options) {
super(options);
this.phase_ = 0;
this.lastSampleValue_ = 0;
}
process(inputs, outputs, parameters) {
let input = inputs[0];
let output = outputs[0];
let bitDepth = parameters.bitDepth;
let frequencyReduction = parameters.frequencyReduction;
for (let channel = 0; channel < input.length; ++channel) {
let inputChannel = input[channel];
let outputChannel = output[channel];
for (let i = 0; i < inputChannel.length; ++i) {
let step = Math.pow(0.5, bitDepth[i]);
this.phase_ += frequencyReduction[i];
if (this.phase_ >= 1.0) {
this.phase_ -= 1.0;
this.lastSampleValue_ =
step * Math.floor(inputChannel[i] / step + 0.5);
}
outputChannel[i] = this.lastSampleValue_;
}
}
return true;
}
}
registerProcessor('bit-crusher', BitCrusher);
================================================
FILE: examples/audio-worklet.js
================================================
import React from 'react';
import { render } from 'react-dom';
import {
RAnalyser,
RAudioContext,
RAudioWorklet,
RDelay,
RGain,
RMediaStreamSource,
RPipeline,
RSplitChannels
} from '../index.js';
export default class AudioWorkletExample extends React.Component {
constructor() {
super();
this.state = { stream: null, ready: false };
}
loadWorkletAndStream(ctx) {
const streamPromise = navigator.mediaDevices.getUserMedia({ audio: true, video: false })
.then(stream => this.setState({ stream }));
const workletPromise = ctx.audioWorklet
.addModule('/assets/js/bit-crusher.js');
Promise.all([ streamPromise, workletPromise ])
.then(() => this.setState({ ready: true }));
}
render() {
return (
<RAudioContext debug={true} onInit={ctx => this.loadWorkletAndStream.bind(this)(ctx)}>
<article>
<h1>Buffers and Channels</h1>
<p>This example demonstrates how to use an <code>AudioWorklet</code> in <em>r-audio</em>. It also shows a RAnalyser in action.</p>
<p>Notice that the graph only renders after both the media stream and the worklet have been initialised.</p>
</article>
{
this.state.ready ? (
<RPipeline>
<RMediaStreamSource stream={this.state.stream} />
<RAnalyser fftSize={2048}>
{
proxy => {
const data = new Float32Array(proxy.frequencyBinCount);
// when this function first runs, there will be no data yet
// so wait a bit
// in reality one might want to save the `proxy` object and call it independently
// for instance, inside a `requestAnimationFrame` call
setTimeout(() => {
proxy.getFloatFrequencyData(data);
console.log(data); // eslint-disable-line no-console
}, 3000);
}
}
</RAnalyser>
<RDelay delayTime={0.3} bitDepth={4} />
<RSplitChannels channelCount={2}>
<RAudioWorklet worklet="bit-crusher" bitDepth={4} frequencyReduction={0.5}/>
<RAudioWorklet worklet="bit-crusher" bitDepth={4} frequencyReduction={0.5}/>
</RSplitChannels>
<RGain gain={0.4} />
</RPipeline>
) : null
}
</RAudioContext>
);
}
}
================================================
FILE: examples/buffers-channels.js
================================================
import React from 'react';
import { render } from 'react-dom';
import {
RAudioContext,
RBufferSource,
RConstantSource,
RConvolver,
RDynamicsCompressor,
RGain,
ROscillator,
RPipeline,
RSplitChannels,
RWaveShaper
} from '../index.js';
export default class BuffersAndChannels extends React.Component {
constructor() {
super();
this.state = { buffer: null };
}
componentDidMount() {
fetch('/assets/audio/b.wav')
.then(res => res.arrayBuffer())
.then(ab => this.audioContext.decodeAudioData(ab))
.then(buffer => this.setState({ buffer }));
}
makeDistortionCurve(amount) {
var k = typeof amount === 'number' ? amount : 50,
n_samples = 44100,
curve = new Float32Array(n_samples),
deg = Math.PI / 180,
i = 0,
x;
for (; i < n_samples; ++i) {
x = i * 2 / n_samples - 1;
curve[i] = (3 + k) * x * 20 * deg / (Math.PI + k * Math.abs(x));
}
return curve;
}
render() {
return (
<RAudioContext debug={true} onInit={ctx => this.audioContext = ctx}>
<article>
<h1>Buffers and Channels</h1>
<p>This example demonstrates initialising a <code>RBufferSource</code> with a decoded <code>AudioBuffer</code>.</p>
<p>It also shows how to process channels separately.</p>
</article>
<RPipeline>
<RBufferSource buffer={this.state.buffer} loop start={0}/>
<RSplitChannels channelCount={2}>
<RPipeline>
<RWaveShaper curve={this.makeDistortionCurve(200)} />
<RConvolver buffer={this.state.buffer} />
<RDynamicsCompressor threshold={-50} knee={40}/>
<RConstantSource offset={0} connectToParam="gain" start={0}/>
<RGain gain={0.5} />
</RPipeline>
<RPipeline>
<ROscillator frequency={1} type="sine" detune={0} connectToParam="gain" start={0}/>
<RGain gain={1} />
</RPipeline>
</RSplitChannels>
</RPipeline>
</RAudioContext>
);
}
}
================================================
FILE: examples/complex-effects-graph.js
================================================
import React from 'react';
import { render } from 'react-dom';
import {
RAudioContext,
RBiquadFilter,
RGain,
ROscillator,
RPipeline,
RSplit,
RStereoPanner
} from '../index.js';
const pipeline = (detune, gain, filterFreq, pan) => (
<RAudioContext debug={true}>
<article>
<h1>Complex effects graph</h1>
<p>This example demonstrates how <em>r-audio</em> handles various graph configurations,
including non-connectable nodes in pipelines and deeply nested parallel/serial connections.</p>
<p>It also shows how to create ‘dead-end’ branches using the <code>disconnected</code> attribute.</p>
</article>
<RPipeline>
<ROscillator start={0} frequency={440} type="triangle" detune={0}/>
<ROscillator start={0} frequency={220} type="triangle" detune={detune} transitionTime={0.5}/>
<RGain gain={gain} transitionTime={1} name='gainToSplit'/>
<RSplit>
<ROscillator start={0} frequency={330} type="triangle" detune={detune + 3} transitionTime={0.5} />
<RBiquadFilter frequency={1000} gain={gain} Q={1} type="lowpass" detune={detune}
transitionTime={{ gain: 5, detune: 10 }}
transitionCurve={{ gain: 'exponential', detune: 'linear' }} />
<RPipeline>
<RBiquadFilter frequency={1000} gain={1} Q={1} type="lowpass" detune={5} transitionTime={0.8}/>
<RBiquadFilter frequency={1000} gain={1} Q={1} type="lowpass" detune={5} transitionTime={0.8}/>
<RBiquadFilter frequency={1000} gain={1} Q={1} type="lowpass" detune={5} transitionTime={0.8}/>
<ROscillator start={0} frequency={1} type="sine" detune={0} connectToParam='pan' />
<RStereoPanner />
<RBiquadFilter frequency={1000} gain={1} Q={1} type="lowpass" detune={3} transitionTime={0.8} />
</RPipeline>
<RPipeline>
<RBiquadFilter frequency={1000} gain={1} Q={1} type="lowpass" detune={3} transitionTime={0.8} disconnected />
</RPipeline>
</RSplit>
<RPipeline>
<ROscillator start={0} frequency={110} type="sawtooth" detune={0}/>
<ROscillator start={0} frequency={1} type="sine" detune={0} connectToParam='pan' />
<RStereoPanner />
</RPipeline>
<RGain gain={0.8} transitionTime={1}/>
<RBiquadFilter frequency={filterFreq} gain={1.5} Q={10.1} type="lowpass" detune={0} transitionTime={0.8}/>
</RPipeline>
</RAudioContext>
);
export default class ComplexGraph extends React.Component {
constructor(props) {
super(props);
this.state = {
detune: 50,
gain: 0.4,
filterFreq: 600,
pan: 0
};
setInterval(() => {
this.setState({
detune: Math.random() * 100,
gain: Math.random() / 2 + 0.5,
filterFreq: Math.random() * 3000 + 200,
pan: Math.random() * 2 - 1
});
}, 2000);
}
render() {
return pipeline(this.state.detune, this.state.gain, this.state.filterFreq, this.state.pan);
}
}
================================================
FILE: examples/custom-nodes.js
================================================
import React from 'react';
import { render } from 'react-dom';
import {
RAudioContext,
RCycle,
RDelay,
RExtensible,
RGain,
RMediaElementSource,
RPipeline,
RSplit
} from '../index.js';
class DelayLine extends RExtensible {
renderGraph() {
return (
<RCycle>
<RPipeline>
<RGain gain={this.props.gain}/>
<RDelay delayTime={this.props.delayTime}/>
</RPipeline>
</RCycle>
);
}
}
export default class CustomNodeExample extends React.Component {
constructor(props) {
super(props);
this.audio = new Audio('/assets/audio/clarinet.mp3');
this.audio.autoplay = true;
this.audio.loop = true;
}
render() {
return (
<RAudioContext debug={true}>
<article>
<h1>Creating custom nodes</h1>
<p>This example demonstrates how to create custom <em>r-audio</em> nodes.
This can be done by extending <code>RExtensible</code>,
which is itself an extension of <em>RPipeline</em>.
We define the contents of our custom node by overriding the <code>renderGraph</code> method,
which simply returns a bit of JSX, just like React components'
<code>render</code> method.</p>
</article>
<RPipeline>
<RMediaElementSource element={this.audio} />
<DelayLine gain={0.7} delayTime={0.3} />
<RGain gain={2} />
</RPipeline>
</RAudioContext>
);
}
}
================================================
FILE: examples/delay-lines.js
================================================
/**
**/
import React from 'react';
import { render } from 'react-dom';
import {
RAudioContext,
RBiquadFilter,
RCycle,
RDelay,
RGain,
ROscillator,
RPipeline,
RSplit,
RStereoPanner
} from '../index.js';
export default class DelayLineExample extends React.Component {
constructor(props) {
super(props);
this.state = { periodicWave: null, start: 0, stop: 3 };
// a simple waveform can be created with a series of periodically repeating numbers
const realComponents = [0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1];
// imaginary components can all be 0 for demo purposes
const imagComponents = realComponents.map(() => 0);
this.onContextInit = ctx => {
this.setState({
periodicWave: ctx.createPeriodicWave(
Float32Array.from(realComponents),
Float32Array.from(imagComponents),
{ disableNormalization: true }
)
});
// schedule restart of the oscillator after 6 seconds
setInterval(() => this.setState({ start: ctx.currentTime, stop: ctx.currentTime + 3 }), 6000);
};
}
render() {
return (
<RAudioContext debug={true} onInit={this.onContextInit}>
<article>
<h1>Delay lines & scheduling</h1>
<p>
This example demonstrates how one can create feedback delay lines using the <code>RCycle</code> component.
It also shows how scheduling works.
</p>
<p>Make sure to always include a <code>RGain</code> with <code>gain</code> < 1 to avoid infinite feedback.</p>
</article>
<RPipeline>
<ROscillator
frequency={220} type="triangle" detune={0}
periodicWave={this.state.periodicWave}
start={this.state.start}
stop={this.state.stop}/>
<ROscillator frequency={1} type="square" detune={0} connectToParam="gain" start={0}/>
<RGain gain={1} />
<RSplit>
<RGain gain={0.5} />
<RCycle>
<RPipeline>
<RDelay delayTime={0.1} />
<RGain gain={0.4} />
<RStereoPanner pan={-1}/>
</RPipeline>
</RCycle>
<RCycle>
<RPipeline>
<RDelay delayTime={0.3} />
<RGain gain={0.4} />
<RStereoPanner pan={1}/>
</RPipeline>
</RCycle>
</RSplit>
</RPipeline>
</RAudioContext>
);
}
}
================================================
FILE: examples/examples.js
================================================
import React from 'react';
import AudioWorkletExample from './audio-worklet.js';
import DelayLineExample from './delay-lines.js';
import ComplexGraph from './complex-effects-graph.js';
import BuffersAndChannels from './buffers-channels.js';
import MediaElementSourceExample from './media-element.js';
import MediaStreamSourceExample from './media-stream.js';
import Mutation from './mutation.js';
import GainMatrixExample from './gain-matrix.js';
import CustomNodeExample from './custom-nodes.js';
const examples = {
'audio-worklet': <AudioWorkletExample/>,
'delay-lines-scheduling': <DelayLineExample />,
'complex-effects-graph': <ComplexGraph/>,
'buffers-channels': <BuffersAndChannels/>,
'media-element': <MediaElementSourceExample/>,
'media-stream': <MediaStreamSourceExample/>,
'mutation': <Mutation/>,
'gain-matrix': <GainMatrixExample/>,
'custom-node': <CustomNodeExample />,
};
export default examples;
================================================
FILE: examples/gain-matrix.js
================================================
import React from 'react';
import { render } from 'react-dom';
import {
RAudioContext,
RBufferSource,
RExtensible,
RGain,
RPipeline,
RSplit,
RSplitChannels
} from '../index.js';
class GainMatrix extends RExtensible {
constructor(props) {
super(props);
const gains = (new Array(props.channelCount || 2))
.fill((new Array(props.channelCount || 2)).fill(1));
this.state = { gains };
this.makeRow = this.makeRow.bind(this);
}
onGainInput(e) {
const [x, y] = e.target.name.split('').map(v => parseInt(v));
const gains = this.state.gains.slice().map(arr => arr.slice());
gains[x][y] = e.target.value;
this.setState({ gains });
}
makeRow(row, rowIndex) {
return (
<RSplit key={rowIndex}>
{
row.map((cellGain, columnIndex) => (
<RPipeline key={columnIndex}>
<RGain name={`gain${rowIndex}${columnIndex}`} gain={cellGain}
connectToChannel={columnIndex}/>
<form>
<label htmlFor={`label-${rowIndex}${columnIndex}`}>
{`Row: ${rowIndex} - Column: ${columnIndex}`}
</label>
<input type="range" min="0" max="1" step="any" defaultValue="1"
id={`label-${rowIndex}${columnIndex}`}
name={`${rowIndex}${columnIndex}`}
onChange={this.onGainInput.bind(this)}/>
<hr/>
</form>
</RPipeline>
))
}
</RSplit>
);
}
renderGraph() {
return (
<RSplitChannels channelCount={this.props.channelCount}>
{ this.state.gains.map(this.makeRow) }
</RSplitChannels>
);
}
}
export default class GainMatrixExample extends React.Component {
constructor() {
super();
this.state = {
buffer: null
};
}
componentDidMount() {
// In Safari decodeAudioData doesn't return a promise
// so we need to run this as both a callback and a promise handler
const loadBuffer = buffer => buffer && this.setState({ buffer });
fetch('/assets/audio/clarinet.mp3')
.then(res => res.arrayBuffer())
.then(ab => this.audioContext.decodeAudioData(ab, loadBuffer, null))
.then(loadBuffer);
}
render() {
return (
<RAudioContext debug={false} onInit={ctx => this.audioContext = ctx}>
<article>
<h1>Gain Matrix</h1>
<p>
This example (courtesy of <a href="http://github.com/tomjnixon">Tom Nixon</a> from
BBC R&D) shows how we can create complex multichannel graphs
using <code>RSplitChannels</code> and explicit <code>connectToChannel</code> props.
</p>
<p>
Each channel of the stereo input signal is routed to both channels of the output signal
and each branch is processed by a separate <code>RGain</code>.
This kind of graph is particularly useful when binauralising audio.
</p>
<p>
Stereo audio recording by Freesound user <a href="https://freesound.org/people/debudding/">debudding</a> (Public Domain).
</p>
</article>
<RPipeline>
<RBufferSource buffer={this.state.buffer} loop start={0}/>
<GainMatrix channelCount={2}/>
</RPipeline>
</RAudioContext>
);
}
}
================================================
FILE: examples/index.html
================================================
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>r-audio</title>
<script defer type="text/javascript" src="/r-audio.min.js"></script>
<style type="text/css">
body {
font-family: sans-serif;
font-size: 17px;
line-height: 1.3;
}
</style>
</head>
<body>
<div id="app">
</div>
</body>
</html>
================================================
FILE: examples/index.js
================================================
import React from 'react';
import { render } from 'react-dom';
import examples from './examples.js';
const example = location.hash.slice(1);
const onExampleChange = e => {
location.hash = e.target.value;
location.reload();
};
render(
(
<main>
<header>
<label htmlFor="example-select">Select an example: </label>
<select id="example-select" onChange={onExampleChange} value={example}>
<option value="" disabled>Choose an example</option>
{
Object.keys(examples).map((ex, ei) => <option key={ei} value={ex}>{ex}</option>)
}
</select>
</header>
<hr/>
{ examples[example] || null }
</main>
),
document.getElementById('app')
);
================================================
FILE: examples/media-element.js
================================================
import React from 'react';
import { render } from 'react-dom';
import {
RAudioContext,
RCycle,
RDelay,
RGain,
RMediaElementSource,
RPipeline
} from '../index.js';
export default class MediaElementSourceExample extends React.Component {
constructor(props) {
super(props);
this.audio = new Audio('/assets/audio/clarinet.mp3');
this.audio.autoplay = true;
this.audio.loop = true;
}
render() {
return (
<RAudioContext debug={true}>
<article>
<h1>Media Element</h1>
<p>This example demonstrates plugging a HTML5 Audio element to the <em>r-audio</em> graph using <code>RMediaElementSource</code>. A reference to the audio element could also be obtained via React refs.</p>
</article>
<RPipeline>
<RMediaElementSource element={this.audio} />
<RCycle>
<RPipeline>
<RDelay delayTime={0.3} />
<RGain gain={0.8} />
</RPipeline>
</RCycle>
<RGain gain={2} />
</RPipeline>
</RAudioContext>
);
}
}
================================================
FILE: examples/media-stream.js
================================================
import React from 'react';
import { render } from 'react-dom';
import {
RAudioContext,
RCycle,
RDelay,
RGain,
RMediaStreamSource,
RPanner,
RPipeline
} from '../index.js';
export default class MediaStreamSourceExample extends React.Component {
constructor(props) {
super(props);
this.state = { stream: null };
navigator.mediaDevices.getUserMedia({ audio: true, video: false })
.then(stream => this.setState({ stream }));
}
render() {
return this.state.stream ? (
<RAudioContext debug={true}>
<article>
<h1>Media Stream</h1>
<p>This example demonstrates plugging a<code>MediaStream</code> object (from either a WebRTC peer or the native audio input device)
into the <em>r-audio</em> graph using <code>RMediaStreamSource</code>.</p>
</article>
<RPipeline>
<RMediaStreamSource stream={this.state.stream} />
<RCycle>
<RPipeline>
<RDelay delayTime={0.3} />
<RGain gain={0.2} />
</RPipeline>
</RCycle>
<RPanner positionY={0} positionX={0} panningModel="HRTF"/>
</RPipeline>
</RAudioContext>
) : null;
}
}
================================================
FILE: examples/mutation.js
================================================
import React from 'react';
import { render } from 'react-dom';
import {
RAudioContext,
RBiquadFilter,
RGain,
ROscillator,
RPipeline,
RSplit,
RStereoPanner
} from '../index.js';
export default class Mutation extends React.Component {
constructor() {
super();
this.nodeCache = [
<ROscillator start={1} key={1} frequency={440} type="triangle" detune={0} />,
<RBiquadFilter key={2} frequency={600} type="lowpass" detune={0} transitionDuration={0.8} />,
<RStereoPanner key={3} />
];
this.state = {
nodes: this.nodeCache,
toggle: true,
freq: 440
};
this.change = () => {
const changed = this.nodeCache.slice();
changed.splice(1, 1, <RGain key={2} gain={0.5} />);
this.setState({ nodes: changed });
};
}
render() {
return (
<RAudioContext debug={true}>
<article>
<h1>Mutation</h1>
This example demonstrates how <em>r-audio</em> graphs can be mutated via React state.
<em>r-audio</em> takes care of reconfiguring the connections and instantiating new nodes as necessary.
</article>
<RPipeline>
<button onClick={this.change}>Mutate audio graph</button>
<ROscillator start={0} frequency={440} type="triangle" detune={0} />
{this.state.nodes}
<RGain gain={0.5} transitionDuration={1} />
</RPipeline>
</RAudioContext>
);
}
}
================================================
FILE: index.js
================================================
import RAudioContext from './src/base/audio-context.js';
import RPipeline from './src/graph/pipeline.js';
import RSplit from './src/graph/split.js';
import RCycle from './src/graph/cycle.js';
import RExtensible from './src/graph/extensible.js';
import RSplitChannels from './src/graph/split-channels.js';
import {
RAnalyser,
RAudioWorklet,
RBiquadFilter,
RBufferSource,
RChannelMerger,
RChannelSplitter,
RConvolver,
RConstantSource,
RDelay,
RDynamicsCompressor,
RGain,
RIIRFilter,
RMediaElementSource,
RMediaStreamSource,
ROscillator,
RPanner,
RStereoPanner,
RWaveShaper
} from './src/audio-nodes/index.js';
export {
RAnalyser,
RAudioContext,
RAudioWorklet,
RBiquadFilter,
RBufferSource,
RChannelMerger,
RChannelSplitter,
RConvolver,
RConstantSource,
RCycle,
RDelay,
RDynamicsCompressor,
RGain,
RIIRFilter,
RMediaElementSource,
RMediaStreamSource,
ROscillator,
RPanner,
RPipeline,
RSplit,
RSplitChannels,
RStereoPanner,
RWaveShaper,
RExtensible
};
================================================
FILE: package.json
================================================
{
"name": "r-audio",
"version": "1.2.0",
"description": "A library of React components for building Web Audio graphs. ",
"module": "dist/r-audio.min.js",
"main": "dist/r-audio.min.js",
"scripts": {
"dev": "NODE_ENV=development webpack-dev-server",
"build": "webpack",
"test": "echo \"Error: no test specified\" && exit 1",
"prepublishOnly": "npm run build"
},
"repository": {
"type": "git",
"url": "git+https://github.com/bbc/r-audio.git"
},
"keywords": [
"web-audio",
"react"
],
"author": "jakubfiala",
"license": "Apache-2.0",
"bugs": {
"url": "https://github.com/bbc/r-audio/issues"
},
"homepage": "https://github.com/bbc/r-audio#readme",
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-core": "^6.26.3",
"babel-loader": "^7.1.5",
"babel-preset-env": "^1.7.0",
"babel-preset-react": "^6.24.1",
"eslint": "^4.19.1",
"eslint-config-standard": "^11.0.0",
"eslint-loader": "^2.1.0",
"eslint-plugin-import": "^2.14.0",
"eslint-plugin-node": "^6.0.1",
"eslint-plugin-promise": "^3.8.0",
"eslint-plugin-react": "^7.11.1",
"eslint-plugin-standard": "^3.1.0",
"uglifyjs-webpack-plugin": "^1.3.0",
"webpack": "^4.20.2",
"webpack-cli": "^3.1.0",
"webpack-dev-server": "^3.1.14"
},
"peerDependencies": {
"prop-types": "^15.6.2",
"react": "^16.5.0",
"react-dom": "^16.5.0"
},
"dependencies": {
"prop-types": "^15.6.2",
"react": "^16.5.0",
"react-dom": "^16.5.0"
}
}
================================================
FILE: src/audio-nodes/analyser.js
================================================
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
import PropTypes from 'prop-types';
export default class RAnalyser extends RConnectableNode {
constructor(props) {
super(props);
this.params = {
fftSize: this.props.fftSize,
minDecibels: this.props.minDecibels,
maxDecibels: this.props.maxDecibels,
smoothingTimeConstant: this.props.smoothingTimeConstant
};
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.node = this.context.audio.createAnalyser();
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
render() {
const analyserProxy = Object.freeze({
getFloatFrequencyData: array => {
return this.node.getFloatFrequencyData(array);
},
getByteFrequencyData: array => {
return this.node.getByteFrequencyData(array);
},
getFloatTimeDomainData: array => {
return this.node.getFloatTimeDomainData(array);
},
getByteTimeDomainData: array => {
return this.node.getByteTimeDomainData(array);
},
frequencyBinCount: this.node.frequencyBinCount
});
this.props.children(analyserProxy);
return super.render();
}
}
RAnalyser.propTypes = {
children: PropTypes.func.isRequired
};
================================================
FILE: src/audio-nodes/audio-worklet.js
================================================
/* global AudioWorkletNode */
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
export default class RAudioWorklet extends RConnectableNode {
constructor(props) {
super(props);
this.params = Object.assign({}, this.props);
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.node = new AudioWorkletNode(this.context.audio, this.props.worklet);
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
}
================================================
FILE: src/audio-nodes/biquad-filter.js
================================================
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
import PropTypes from 'prop-types';
export default class RBiquadFilter extends RConnectableNode {
constructor(props) {
super(props);
this.params = {
frequency: props.frequency,
detune: props.detune,
Q: props.Q,
gain: props.gain,
type: props.type
};
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.node = this.context.audio.createBiquadFilter();
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
render() {
if (typeof this.props.children === 'function') {
const filterProxy = Object.freeze({
getFrequencyResponse: (frequencyHz, magResponse, phaseResponse) => {
return this.node.getFrequencyResponse(frequencyHz, magResponse, phaseResponse);
}
});
this.props.children(filterProxy);
}
return super.render();
}
}
RBiquadFilter.propTypes = {
children: PropTypes.func
};
================================================
FILE: src/audio-nodes/buffer-source.js
================================================
import React from 'react';
import RScheduledSource from './../base/scheduled-source.js';
export default class RBufferSource extends RScheduledSource {
constructor(props) {
super(props);
this.params = {
buffer: props.buffer || null,
detune: props.detune || 0,
loop: props.loop || false,
loopStart: props.loopStart || 0,
loopEnd: props.loopEnd || 0,
playbackRate: props.playbackRate || 1
};
this.onEnded = this.onEnded.bind(this);
this.instantiateNode = this.instantiateNode.bind(this);
}
instantiateNode() {
if (!this.node) {
this.node = this.context.audio.createBufferSource();
this.node.addEventListener('ended', this.onEnded);
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
// we need to make a new AudioBufferSourceNode after playback ends
onEnded(e) {
super.onEnded(e);
this.instantiateNode();
this.connectToAllDestinations(this.props.destination, this.node);
if (this.props.onEnded) this.props.onEnded(e);
}
componentWillMount() {
super.componentWillMount();
this.instantiateNode();
}
componentDidMount() {
this.readyToPlay = !!this.props.buffer;
super.componentDidMount();
}
shouldStartWithPropsChange(prevProps, currentProps) {
return prevProps.buffer !== currentProps.buffer;
}
componentDidUpdate(prevProps, prevState) {
this.readyToPlay = !!this.props.buffer;
super.componentDidUpdate(prevProps, prevState);
}
}
================================================
FILE: src/audio-nodes/channel-merger.js
================================================
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
export default class RChannelMerger extends RConnectableNode {
constructor(props) {
super(props);
this.params = { channelCount: 1 };
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.node = this.context.audio.createChannelMerger(this.props.channelCount);
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
}
================================================
FILE: src/audio-nodes/channel-splitter.js
================================================
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
export default class RChannelSplitter extends RConnectableNode {
constructor(props) {
super(props);
this.params = { channelCount: 1 };
}
// override of RAudioNode.getConnectionArguments
// because we need to have some default many-to-many behaviour
getConnectionArguments(destination, destinationIndex, toParam) {
const connectTarget = toParam ? destination[toParam] : destination;
// we use modulo for channel distribution
// in case we're connecting to more nodes than we have channels
const fromChannel = destinationIndex % this.props.channelCount;
// normally we expect to connect to the first channel of each destination
// but this can be overriden
const toChannel = !isNaN(this.props.connectToChannel) ? this.props.connectToChannel : 0;
return [ connectTarget ].concat(toParam ? [] : [ fromChannel, toChannel ]);
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.node = this.context.audio.createChannelSplitter(this.props.channelCount);
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
}
================================================
FILE: src/audio-nodes/constant-source.js
================================================
import React from 'react';
import RScheduledSource from './../base/scheduled-source.js';
export default class RConstantSource extends RScheduledSource {
constructor(props) {
super(props);
this.params = {
offset: props.offset
};
this.instantiateNode = this.instantiateNode.bind(this);
this.readyToPlay = true;
this.onEnded = this.onEnded.bind(this);
}
onEnded(e) {
super.onEnded(e);
if (this.props.onEnded) this.props.onEnded(e);
}
instantiateNode() {
if (!this.node || this.playbackScheduled === false) {
this.node = this.context.audio.createConstantSource();
this.node.addEventListener('ended', this.onEnded);
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
componentWillMount() {
super.componentWillMount();
this.instantiateNode();
}
}
================================================
FILE: src/audio-nodes/convolver.js
================================================
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
export default class RConvolver extends RConnectableNode {
constructor(props) {
super(props);
this.params = {
buffer: props.buffer || null,
normalize: props.normalize || true
};
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.node = this.context.audio.createConvolver();
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
}
================================================
FILE: src/audio-nodes/delay.js
================================================
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
export default class RDelay extends RConnectableNode {
constructor(props) {
super(props);
this.params = {
delayTime: props.delayTime
};
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.node = this.context.audio.createDelay();
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
}
================================================
FILE: src/audio-nodes/dynamics-compressor.js
================================================
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
export default class RDynamicsCompressor extends RConnectableNode {
constructor(props) {
super(props);
this.params = {
threshold: props.threshold || -24,
knee: props.knee || 30,
ratio: props.ratio || 12,
attack: props.attack || 0.003,
release: props.release || 0.25
};
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.node = this.context.audio.createDynamicsCompressor();
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
}
================================================
FILE: src/audio-nodes/gain.js
================================================
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
export default class RGain extends RConnectableNode {
constructor(props) {
super(props);
this.params = {
gain: this.props.gain
};
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.node = this.context.audio.createGain();
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
}
================================================
FILE: src/audio-nodes/iir-filter.js
================================================
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
import PropTypes from 'prop-types';
export default class RIIRFilter extends RConnectableNode {
constructor(props) {
super(props);
this.params = {};
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.node = this.context.audio.createIIRFilter({
feedback: this.props.feedback,
feedforward: this.props.feedforward
});
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
render() {
if (typeof this.props.children === 'function') {
const filterProxy = Object.freeze({
getFrequencyResponse: (frequencyHz, magResponse, phaseResponse) => {
return this.node.getFrequencyResponse(frequencyHz, magResponse, phaseResponse);
}
});
this.props.children(filterProxy);
}
return super.render();
}
}
RIIRFilter.propTypes = {
children: PropTypes.func
};
================================================
FILE: src/audio-nodes/index.js
================================================
import RAnalyser from './analyser.js';
import RAudioWorklet from './audio-worklet.js';
import RBiquadFilter from './biquad-filter.js';
import RBufferSource from './buffer-source.js';
import RChannelMerger from './channel-merger.js';
import RChannelSplitter from './channel-splitter.js';
import RConstantSource from './constant-source.js';
import RConvolver from './convolver.js';
import RDelay from './delay.js';
import RDynamicsCompressor from './dynamics-compressor.js';
import RGain from './gain.js';
import RIIRFilter from './iir-filter.js';
import RMediaElementSource from './media-element-source.js';
import RMediaStreamSource from './media-stream-source.js';
import ROscillator from './oscillator.js';
import RPanner from './panner.js';
import RStereoPanner from './stereo-panner.js';
import RWaveShaper from './wave-shaper.js';
export {
RAnalyser,
RAudioWorklet,
RBiquadFilter,
RBufferSource,
RChannelMerger,
RChannelSplitter,
RConstantSource,
RConvolver,
RDelay,
RDynamicsCompressor,
RGain,
RIIRFilter,
RMediaElementSource,
RMediaStreamSource,
ROscillator,
RStereoPanner,
RPanner,
RWaveShaper
};
================================================
FILE: src/audio-nodes/media-element-source.js
================================================
import React from 'react';
import RAudioNode from './../base/audio-node.js';
export default class RMediaElementSource extends RAudioNode {
constructor(props) {
super(props);
this.params = {};
this.createNode = this.createNode.bind(this);
}
createNode() {
this.node = this.context.audio.createMediaElementSource(this.props.element);
this.context.nodes.set(this.props.identifier, this.node);
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.createNode();
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
componentWillReceiveProps(nextProps) {
if (nextProps.element !== this.props.element) this.createNode();
}
}
================================================
FILE: src/audio-nodes/media-stream-source.js
================================================
import React from 'react';
import RAudioNode from './../base/audio-node.js';
export default class RMediaStreamSource extends RAudioNode {
constructor(props) {
super(props);
this.params = {
buffer: props.buffer || null
};
this.createNode = this.createNode.bind(this);
}
createNode() {
this.node = this.context.audio.createMediaStreamSource(this.props.stream);
this.context.nodes.set(this.props.identifier, this.node);
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.createNode();
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
componentWillReceiveProps(nextProps) {
if (nextProps.stream !== this.props.stream) this.createNode();
}
}
================================================
FILE: src/audio-nodes/oscillator.js
================================================
import React from 'react';
import RScheduledSource from './../base/scheduled-source.js';
export default class ROscillator extends RScheduledSource {
constructor(props) {
super(props);
this.params = {
frequency: props.frequency,
detune: props.detune,
type: props.type,
periodicWave: props.periodicWave
};
this.instantiateNode = this.instantiateNode.bind(this);
this.readyToPlay = true;
this.onEnded = this.onEnded.bind(this);
}
onEnded(e) {
super.onEnded(e);
if (this.props.onEnded) this.props.onEnded(e);
}
instantiateNode() {
if (!this.node || this.playbackScheduled === false) {
this.node = this.context.audio.createOscillator();
this.node.addEventListener('ended', this.onEnded);
if (this.props.periodicWave) {
this.node.setPeriodicWave(this.props.periodicWave);
}
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
componentWillMount() {
super.componentWillMount();
this.instantiateNode();
}
componentWillReceiveProps(nextProps) {
super.componentWillReceiveProps(nextProps);
if (this.props.periodicWave !== nextProps.periodicWave) {
this.node.setPeriodicWave(nextProps.periodicWave);
}
}
}
================================================
FILE: src/audio-nodes/panner.js
================================================
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
export default class RPanner extends RConnectableNode {
constructor(props) {
super(props);
this.params = {
panningModel: this.props.panningModel,
distanceModel: this.props.distanceModel,
refDistance: this.props.refDistance,
maxDistance: this.props.maxDistance,
rolloffFactor: this.props.rolloffFactor,
coneInnerAngle: this.props.coneInnerAngle,
coneOuterAngle: this.props.coneOuterAngle,
coneOuterGain: this.props.coneOuterGain,
positionX: this.props.positionX,
positionY: this.props.positionY,
positionZ: this.props.positionZ,
orientationX: this.props.orientationX,
orientationY: this.props.orientationY,
orientationZ: this.props.orientationZ
};
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.node = this.context.audio.createPanner();
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
}
================================================
FILE: src/audio-nodes/stereo-panner.js
================================================
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
export default class RStereoPanner extends RConnectableNode {
constructor(props) {
super(props);
this.params = {
pan: props.pan
};
}
componentWillMount() {
super.componentWillMount();
const props = this.props;
if (!this.node) {
this.node = this.context.audio.createStereoPanner();
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
}
================================================
FILE: src/audio-nodes/wave-shaper.js
================================================
import React from 'react';
import RConnectableNode from './../base/connectable-node.js';
export default class RWaveShaper extends RConnectableNode {
constructor(props) {
super(props);
this.params = {
curve: props.curve || null,
oversample: props.oversample || 'none'
};
}
componentWillMount() {
super.componentWillMount();
if (!this.node) {
this.node = this.context.audio.createWaveShaper();
this.context.nodes.set(this.props.identifier, this.node);
}
this.updateParams = this.updateParams.bind(this);
this.updateParams(this.props);
}
}
================================================
FILE: src/base/audio-context.js
================================================
import React from 'react';
import PropTypes from 'prop-types';
import RComponent from './component.js';
window.AudioContext = window.AudioContext || window.webkitAudioContext || null;
if (!window.AudioContext) {
throw new Error(
'Could not find AudioContext. This may be because your browser does not support Web Audio.');
}
/**
* Contains and manages the Web Audio graph.
* All immediate children connect directly to its Destination.
*
* @class RAudioContext (name)
*/
export default class RAudioContext extends React.Component {
constructor(props) {
super(props);
// repository of all nodes in the graph
// keyed by Symbols
this.nodes = new Map();
this._context = new AudioContext(props.options);
if (this.props.onInit) this.props.onInit(this._context);
if (this.props.debug) {
window.RAudioNodeMap = this.nodes;
}
}
componentWillMount() {
this._context.resume();
}
getChildContext() {
return {
audio: this._context,
debug: this.props.debug,
nodes: this.nodes
};
}
componentWillUnmount() {
this._context.suspend();
}
render() {
const children = React.Children
.toArray(this.props.children)
.map(child => {
if (!RComponent.isPrototypeOf(child.type)) return child;
const audioContextProps = {
destination: () => this._context.destination,
identifier: Symbol(child.type.name)
};
return React.cloneElement(child, audioContextProps);
});
if (this.props.debug) {
return (
<div>
<strong>RAudioContext</strong>
<ul>
{children}
</ul>
</div>
);
}
return children || [];
}
}
RAudioContext.childContextTypes = {
audio: PropTypes.instanceOf(AudioContext),
nodes: PropTypes.instanceOf(Map),
debug: PropTypes.bool
};
================================================
FILE: src/base/audio-node.js
================================================
import React from 'react';
import RComponent from './component.js';
/**
* Any RComponent that corresponds to an AudioNode is a RAudioNode
*
* @class RAudioNode (name)
*/
export default class RAudioNode extends RComponent {
constructor(props) {
super(props);
// internal AudioNode instance
this.node = null;
// dictionary of AudioNode parameters (either AudioParams or object properties)
this.params = {};
this.connectToAllDestinations = this.connectToAllDestinations.bind(this);
this.setParam = this.setParam.bind(this);
}
// recursively builds up a list of nodes pointed to by IDs or lists of IDs
flattenPointers(destinations, flattened = []) {
for (let element of destinations) {
if (Array.isArray(element)) {
this.flattenPointers(element, flattened);
} else if (typeof element === 'symbol') {
flattened.push(this.context.nodes.get(element));
} else {
flattened.push(element);
}
}
return flattened;
}
/**
* Generates arguments for AudioNode.connect
* Useful because we can, for instance, override the channel assignment logic for ChannelSplitter etc.
*
* @param {function} destination The AudioNode to connect to
* @param {number} destinationIndex The index of the AudioNode among other destinations
* @param {string|null} toParam The name of the AudioParam to connect to (or undefined)
* @param {number} fromChannel The index of the chosen output channel of this node (default is 0)
* @param {number} toChannel The index of the chosen input channel of the destination node (default is 0)
*/
getConnectionArguments(destination, destinationIndex, toParam, fromChannel = 0, toChannel = 0) {
const connectTarget = toParam ? destination[toParam] : destination;
return [ connectTarget ].concat(toParam ? [] : [ fromChannel, toChannel ]);
}
/**
* Connects the given AudioNode to this RAudioNode's destinations.
* Abstracts away this operation as it's used in multiple lifecycle stages.
*
* @param {function} destinationFunction The function that will return the destinations
* @param {AudioNode} webAudioNode The web audio node
*/
connectToAllDestinations(destinationFunction, webAudioNode) {
webAudioNode.disconnect();
if (destinationFunction && !this.props.disconnected) {
let destinations = destinationFunction();
if (!(destinations instanceof Array)) destinations = [ destinations ];
this.flattenPointers(destinations).forEach((destination, di) => {
if (destination) {
const connectArgs = this.getConnectionArguments(
destination,
di,
this.props.connectToParam,
this.props.connectFromChannel,
this.props.connectToChannel);
webAudioNode.connect(...connectArgs);
}
});
}
}
componentWillMount() {
super.componentWillMount();
}
componentWillReceiveProps(nextProps) {
this.updateParams(nextProps);
}
componentWillUpdate(nextProps, nextState) {
// update the node's record in the node registry
if (this.props.identifier !== nextProps.identifier) {
this.context.nodes.delete(this.props.identifier);
this.context.nodes.set(nextProps.identifier, this.node);
}
}
// we use DidUpdate to connect to new destinations,
// because WillUpdate might get called before the new destinations are ready
componentDidUpdate(prevProps, prevState) {
if (prevProps.destination !== this.props.destination) {
this.connectToAllDestinations(this.props.destination, this.node);
}
}
componentWillUnmount() {
this.node.disconnect();
this.context.nodes.delete(this.props.identifier);
}
resolveTransitionProps(props, propName) {
const transitionTime = typeof props.transitionTime === 'number'
? props.transitionTime
: props.transitionTime ? props.transitionTime[propName] : null;
const transitionCurve = typeof props.transitionCurve === 'string'
? props.transitionCurve
: props.transitionCurve ? props.transitionCurve[propName] : null;
return [ transitionTime, transitionCurve ];
}
// updates only Web Audio-related parameters
// (both AudioParams and regular properties)
updateParams(props) {
if (!this.params) return;
for (let p in this.params) {
if (!(p in props)) continue;
const [ transitionTime, transitionCurve ] = this.resolveTransitionProps(props, p);
if (this.node[p] instanceof AudioParam) {
this.setParam(this.node[p], props[p], transitionTime, transitionCurve);
} else if (this.node.parameters && this.node.parameters.has(p)) {
let param = this.node.parameters.get(p);
this.setParam(param, props[p], transitionTime, transitionCurve);
} else if (p in this.node) {
// some browsers (e.g. Chrome) will try to set channelCount and throw an error
// since we can't use Object.getOwnPropertyDescriptor on the AudioNodes
// we simply wrap the action in a try-catch
try {
if (this.node[p] !== props[p]) this.node[p] = props[p];
} catch(e) {
console.warn(`Tried setting ${p} on node`, this.node); // eslint-disable-line no-console
}
}
}
}
setParam(param, value, transitionTime, transitionCurve) {
if (transitionCurve) {
const fn = `${transitionCurve}RampToValueAtTime`;
// `exponentialRamp` doesn't seem to work on Firefox, so fall back to linear
try {
param[fn](value, transitionTime);
} catch (e) {
param['linearRampToValueAtTime'](value, transitionTime);
}
} else {
param.setValueAtTime(value, transitionTime || this.context.audio.currentTime);
}
}
componentDidMount() {
this.connectToAllDestinations(this.props.destination, this.node);
}
render() {
if (this.context.debug) {
return (
<li>
<div>
<strong>
{this.constructor.name} <em>{this.props.name || ''}</em>
<sup><mark>{this.props.disconnected && 'disconnected' || ''}</mark></sup>
</strong>
<div>{ this.props.connectToParam ? <span> connects to <em>{this.props.connectToParam}</em></span> : null }</div>
</div>
<ul>
{
Object.keys(this.params).map((p, pi) => {
if (!this.props[p] && this.props[p] !== 0) return null;
let param = this.props[p];
if (typeof this.props[p] === 'boolean') param = this.props[p].toString();
if (!(['number', 'string', 'boolean'].includes(typeof this.props[p]))) {
param = param.constructor.name;
}
return <li key={pi}>{p}: <code>{param}</code></li>;
})
}
</ul>
</li>
);
}
return null;
}
}
================================================
FILE: src/base/component.js
================================================
import React from 'react';
import PropTypes from 'prop-types';
/**
* Anything that requires an AudioContext is a RComponent
*
* @class RComponent (name)
*/
export default class RComponent extends React.Component {
componentWillMount() {
if (!this.context.audio) throw new ReferenceError('RComponent needs to be a child of a RAudioContext');
}
render() { return null; }
}
RComponent.contextTypes = {
audio: PropTypes.instanceOf(window.AudioContext || window.webkitAudioContext),
nodes: PropTypes.instanceOf(Map),
debug: PropTypes.bool
};
================================================
FILE: src/base/connectable-node.js
================================================
import React from 'react';
import RAudioNode from './audio-node.js';
/**
* Any RAudioNode that can be connected to is a RConnectableNode
*
* @class RConnectableNode (name)
*/
export default class RConnectableNode extends RAudioNode {
componentWillUnmount() {
super.componentWillUnmount();
if (this.props.parent) {
const parents = this.props.parent();
this.flattenPointers(parents).forEach((parentIdentifier, parentIndex) => {
const parent = this.context.nodes.get(parentIdentifier);
if (!parent) return;
try {
parent.disconnect(this.node);
} catch (e) {
console.warn(e); // eslint-disable-line no-console
}
});
}
}
}
================================================
FILE: src/base/scheduled-source.js
================================================
import React from 'react';
import RAudioNode from './audio-node.js';
/**
* Any RAudioNode that can be scheduled to start/end is a RScheduledSource
*
* @class RScheduledSource (name)
*/
export default class RScheduledSource extends RAudioNode {
constructor(props) {
super(props);
this.readyToPlay = false;
this.playbackScheduled = false;
this.onEnded = this.onEnded.bind(this);
this.schedule = this.schedule.bind(this);
}
onEnded() {
this.playbackScheduled = false;
// Web Audio will remove the node from the graph after stopping, so reinstantiate it
this.instantiateNode();
this.connectToAllDestinations(this.props.destination, this.node);
}
schedule() {
const shouldScheduleStart =
typeof this.props.start === 'number' &&
this.readyToPlay &&
!this.playbackScheduled &&
(typeof this.props.stop !== 'number' || this.props.start < this.props.stop);
const shouldScheduleStop =
typeof this.props.stop === 'number';
if (shouldScheduleStart) {
this.node.start(this.props.start || 0, this.props.offset || 0, this.props.duration);
this.playbackScheduled = true;
}
if (shouldScheduleStop) {
this.node.stop(this.props.stop);
}
}
/**
Overriding this method enables sources to specify special conditions when playback should be rescheduled.
e.g. BufferSource should be rescheduled if a new buffer is provided
**/
shouldStartWithPropsChange() {
return false;
}
componentDidMount() {
super.componentDidMount();
this.schedule();
}
componentDidUpdate(prevProps, prevState) {
super.componentDidUpdate(prevProps, prevState);
if (prevProps.start !== this.props.start ||
prevProps.stop !== this.props.stop ||
this.shouldStartWithPropsChange(prevProps, this.props)) {
this.schedule();
}
}
}
================================================
FILE: src/graph/cycle.js
================================================
import React from 'react';
import RAudioNode from './../base/audio-node.js';
import RComponent from './../base/component.js';
import { isConnectable } from './utils.js';
/**
* A RComponent which connects each child to itself as well as the destination
*
* @class RCycle (name)
*/
export default class RCycle extends RComponent {
constructor(props) {
super(props);
this.inputs = [];
}
componentWillMount() {
super.componentWillMount();
this.context.nodes.set(this.props.identifier, this.inputs);
}
componentWillUpdate(nextProps, nextState) {
// update the node's record in the node registry
if (this.props.identifier !== nextProps.identifier) {
this.context.nodes.delete(this.props.identifier);
this.context.nodes.set(nextProps.identifier, this.inputs);
}
}
render() {
while (this.inputs.length > 0) this.inputs.pop();
const children = React.Children
.toArray(this.props.children)
.filter(c => c !== null && c !== [])
.map(c => ({ component: c, identifier: Symbol(c.type.name + Date.now()) }))
.map((childTuple, childIndex, childrenArray) => {
const type = childTuple.component.type;
if (RComponent.isPrototypeOf(childTuple.component.type) && isConnectable(childTuple.component)) {
this.inputs.push(childTuple.identifier);
}
const pipelineProps = {
destination: () => {
let destination = this.props.destination();
const ownNode = this.context.nodes.get(childTuple.identifier);
if (!(destination instanceof Array)) destination = [ destination ];
return destination.concat([ ownNode ]);
},
identifier: childTuple.identifier
};
return React.cloneElement(childTuple.component, pipelineProps);
});
if (this.inputs.length === 0) {
const destination = this.props.destination();
if (destination instanceof Array) this.inputs.push(...destination);
else this.inputs.push(destination);
}
if (this.context.debug) {
return (
<li>
<strong>RCycle</strong>
<ul>
{children}
</ul>
</li>
);
}
return children;
}
}
================================================
FILE: src/graph/extensible.js
================================================
import React from 'react';
import RPipeline from './pipeline.js';
/**
* A subclass of RPipeline which can be extended to create custom r-audio nodes
* To create a custom node, subclass RExtensible and override the `renderGraph` method,
* returning the r-audio graph of your custom node
*
* @class RExtensible (name)
**/
export default class RExtensible extends RPipeline {
renderGraph() {
return null;
}
addKeys(child, childIndex) {
return React.cloneElement(child, { key: childIndex });
}
render() {
this.customChildren = [ this.renderGraph() ].map(this.addKeys);
return super.render();
}
}
================================================
FILE: src/graph/pipeline.js
================================================
import React from 'react';
import RComponent from './../base/component.js';
import { isConnectable } from './utils.js';
/**
* A RComponent which connects its children in a series, creating inbound branches if necessary.
*
* @class RPipeline (name)
*/
export default class RPipeline extends RComponent {
constructor(props) {
super(props);
this.resolveDestination = this.resolveDestination.bind(this);
this.resolvePointer = this.resolvePointer.bind(this);
this.resolveParent = this.resolveParent.bind(this);
}
/**
* Ensures whatever value we get from the `nodes` Map, we resolve it to where the actual AudioNodes are.
*
* @param {AudioNode|Array} pointer The value found in the `nodes` Map
* @return {AudioNode|Array<AudioNode>} the actual destination(s)
*/
resolvePointer(pointer) {
// we might find that the pointer actually leads us to a list of other pointers (Symbols)
// this happens, for instance, if the next child is a RSplit
let resolved = pointer;
if (pointer instanceof Array) {
// it could also happen that the pointer leads to an AudioNode reference (esp. if it's an AudioContextDestination)
resolved = pointer.map(identifier => this.context.nodes.get(identifier) || identifier);
}
return resolved;
}
/**
* Tries to provide a destination getter function for the given index in the children array.
* It optimizes for finding the nearest node in the graph which the given child can connect to.
*
* @param {number} currentIndex The current child's index
* @param {Array} childrenArray The array of all children in the pipeline
* @return {Function} a function which returns the closest possible destination node
*/
resolveDestination(currentIndex, childrenArray) {
let destinationFunction = null;
if (currentIndex === childrenArray.length - 1) {
destinationFunction = () => this.props.destination();
} else if (!isConnectable(childrenArray[currentIndex + 1].component)) {
let childIndex = currentIndex + 1;
while (childrenArray[++childIndex]) {
if (isConnectable(childrenArray[childIndex].component)) break;
}
if (childIndex === currentIndex + 1 || !childrenArray[childIndex]) {
destinationFunction = () => this.props.destination();
} else {
destinationFunction = () => this.resolvePointer(this.context.nodes.get(childrenArray[childIndex].identifier));
}
} else {
destinationFunction = () => this.resolvePointer(this.context.nodes.get(childrenArray[currentIndex + 1].identifier));
}
return destinationFunction;
}
resolveParent(currentIndex, childrenArray) {
if (currentIndex === 0) {
return this.getParent || null;
} else {
const children = childrenArray.slice(0, currentIndex);
const parents = [];
let child = children.pop();
if (RComponent.isPrototypeOf(child.component.type)) {
// the first preceding RComponent is always a valid parent
parents.push(child.identifier);
// if it's a connectable type it's also the only parent
if (isConnectable(child.component)) return () => parents;
// if not, continue
child = children.pop();
}
// look for all preceding RComponents until we hit one which is connectable
while (child) {
if (isConnectable(child.component) ||
!RComponent.isPrototypeOf(child.component.type)) break;
parents.push(child.identifier);
child = children.pop();
}
return () => parents;
}
}
/**
* Returns an Object containing the given Component and its unique identifier
*
* @param {Component} component The React component
* @return {Object} the identified child object
*/
createIdentifiedChild(component) {
const identifiedChild = {
component,
identifier: Symbol(component.type.name + Date.now())
};
if (!this.foundFirstConnectableType && isConnectable(component)) {
identifiedChild.identifier = this.props.identifier;
this.foundFirstConnectableType = true;
}
return identifiedChild;
}
/**
* Returns a clone of the child Component with parent, destination and identifier props added to it
*
* @param {Object} identifiedChild The identified child object
* @param {Number} childIndex The child index
* @param {Array} childrenArray The children array
* @return {Component} A clone of the child Component
*/
createEmbeddableChild(identifiedChild, childIndex, childrenArray) {
if (!RComponent.isPrototypeOf(identifiedChild.component.type)) return identifiedChild.component;
const getDestination = this.resolveDestination(childIndex, childrenArray);
const getParent = this.resolveParent(childIndex, childrenArray);
const pipelineProps = {
destination: getDestination,
parent: getParent,
identifier: identifiedChild.identifier
};
if (childIndex === childrenArray.length - 1) {
Object.assign(pipelineProps, {
connectFromChannel: this.props.connectFromChannel || 0,
connectToChannel: this.props.connectToChannel || 0
});
}
return React.cloneElement(identifiedChild.component, pipelineProps);
}
render() {
this.foundFirstConnectableType = false;
const originalChildren = React.Children.toArray(this.props.children);
const children = (this.customChildren || originalChildren)
.filter(c => c !== null && c !== [])
// double mapping because the second functor needs to peek ahead on the children array
.map(this.createIdentifiedChild, this)
.map(this.createEmbeddableChild, this);
if (this.context.debug) {
return (
<li>
<strong>{this.constructor.name}</strong>
<ul>
{children}
</ul>
</li>
);
}
return children;
}
}
================================================
FILE: src/graph/split-channels.js
================================================
import React from 'react';
import RComponent from './../base/component.js';
import RSplit from './split.js';
import RPipeline from './pipeline.js';
import RChannelSplitter from '../audio-nodes/channel-splitter.js';
import RChannelMerger from '../audio-nodes/channel-merger.js';
// This is a helper RComponent which splits the input between its children
// connected in parallel, one channel per branch.
// It's better to use this instead of RChannelSplitter and RChannelMerger
// as it takes care of the channel connections automatically
export default class RSplitChannels extends RComponent {
render() {
const children = React.Children
.toArray(this.props.children)
.slice(0, this.props.channelCount)
.map((element, ci) => {
const channelProps = {
connectFromChannel: 0,
connectToChannel: element.props.connectToChannel || ci
};
return React.cloneElement(element, channelProps);
});
return (
<RPipeline identifier={this.props.identifier} destination={this.props.destination}>
<RChannelSplitter channelCount={this.props.channelCount} />
<RSplit>
{children}
</RSplit>
<RChannelMerger channelCount={this.props.channelCount} />
</RPipeline>
);
}
}
================================================
FILE: src/graph/split.js
================================================
import React from 'react';
import RAudioNode from './../base/audio-node.js';
import RComponent from './../base/component.js';
import {
isConnectable,
propertyFromChildOrParent
} from './utils.js';
/**
* A RComponent which connects its children in parallel, creating inbound branches if necessary.
*
* @class RSplit (name)
*/
export default class RSplit extends RComponent {
constructor(props) {
super(props);
this.inputs = [];
}
componentWillMount() {
super.componentWillMount();
this.context.nodes.set(this.props.identifier, this.inputs);
}
componentWillUpdate(nextProps, nextState) {
// update the node's record in the node registry
if (this.props.identifier !== nextProps.identifier) {
this.context.nodes.delete(this.props.identifier);
this.context.nodes.set(nextProps.identifier, this.inputs);
}
}
render() {
while (this.inputs.length) this.inputs.pop();
const children = React.Children
.toArray(this.props.children)
.filter(c => c !== null && c !== [])
.map(c => ({ component: c, identifier: Symbol(c.type.name + Date.now()) }))
.map((childTuple, childIndex, childrenArray) => {
if (!RComponent.isPrototypeOf(childTuple.component.type)) return childTuple.component;
const type = childTuple.component.type;
if (RComponent.isPrototypeOf(type) && isConnectable(childTuple.component)) {
this.inputs.push(childTuple.identifier);
}
// this rather strange and terse piece of code
// figures out the channel connections of the RSplit child
// where children can override the parent (RSplit) settings
// this is useful for RSplitChannels
const [ connectFromChannel, connectToChannel ] =
[ 'connectFromChannel', 'connectToChannel' ]
.map(propertyFromChildOrParent(childTuple.component, this));
const splitProps = {
destination: this.props.destination,
identifier: childTuple.identifier,
connectFromChannel,
connectToChannel
};
return React.cloneElement(childTuple.component, splitProps);
});
if (!this.inputs.length) {
const destination = this.props.destination();
if (destination instanceof Array) this.inputs.push(...destination);
else this.inputs.push(destination);
}
if (this.context.debug) {
return (
<li>
<strong>RSplit</strong>
<ul>
{children}
</ul>
</li>
);
}
return children;
}
}
================================================
FILE: src/graph/utils.js
================================================
import RConnectableNode from './../base/connectable-node.js';
const connectableComponents = [
'RSplit',
'RCycle',
'RSplitChannels',
'RPipeline'
];
const isConnectable = component => {
return RConnectableNode.isPrototypeOf(component.type) ||
connectableComponents.includes(component.type.name) ||
Object.getPrototypeOf(component.type).name === 'RExtensible';
};
// this rather strange function is used when we want to get a numeric value
// from either a child or its parent's props
// but the child takes precedence (for overriding)
const propertyFromChildOrParent = (child, parent) => property => {
return !isNaN(child.props[property])
? child.props[property]
: parent.props[property];
};
export {
isConnectable,
propertyFromChildOrParent
};
================================================
FILE: webpack.config.js
================================================
const webpack = require('webpack');
const path = require('path');
const UglifyJsPlugin = require('uglifyjs-webpack-plugin');
const Config = {
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'r-audio.min.js',
},
entry: './examples/index.js',
mode: process.env['NODE_ENV'] || 'production',
devtool: process.env['NODE_ENV'] === 'development' ? 'source-map' : false,
resolve: {
modules: [
'node_modules'
]
},
module: {
rules: [
{
test: /\.js$|\.jsx$/,
exclude: /node_modules/,
use: ['babel-loader?presets[]=react,env', 'eslint-loader?fix=true&emitWarning=true']
}
]
},
devServer: {
contentBase: path.join(__dirname, 'examples'),
compress: true,
port: 8080
}
};
if (!(process.env['NODE_ENV'] === 'development')) {
Config.output.library = 'r-audio';
Config.output.libraryTarget = 'umd';
Config.entry = './index.js';
Config.optimization = { minimizer: [ new UglifyJsPlugin() ] };
Config.externals = ['react', 'react-dom', 'prop-types'];
}
module.exports = Config;
gitextract_r_8nlj7g/ ├── .eslintrc.json ├── .gitignore ├── .npmignore ├── LICENSE ├── README.md ├── examples/ │ ├── README.md │ ├── assets/ │ │ └── js/ │ │ └── bit-crusher.js │ ├── audio-worklet.js │ ├── buffers-channels.js │ ├── complex-effects-graph.js │ ├── custom-nodes.js │ ├── delay-lines.js │ ├── examples.js │ ├── gain-matrix.js │ ├── index.html │ ├── index.js │ ├── media-element.js │ ├── media-stream.js │ └── mutation.js ├── index.js ├── package.json ├── src/ │ ├── audio-nodes/ │ │ ├── analyser.js │ │ ├── audio-worklet.js │ │ ├── biquad-filter.js │ │ ├── buffer-source.js │ │ ├── channel-merger.js │ │ ├── channel-splitter.js │ │ ├── constant-source.js │ │ ├── convolver.js │ │ ├── delay.js │ │ ├── dynamics-compressor.js │ │ ├── gain.js │ │ ├── iir-filter.js │ │ ├── index.js │ │ ├── media-element-source.js │ │ ├── media-stream-source.js │ │ ├── oscillator.js │ │ ├── panner.js │ │ ├── stereo-panner.js │ │ └── wave-shaper.js │ ├── base/ │ │ ├── audio-context.js │ │ ├── audio-node.js │ │ ├── component.js │ │ ├── connectable-node.js │ │ └── scheduled-source.js │ └── graph/ │ ├── cycle.js │ ├── extensible.js │ ├── pipeline.js │ ├── split-channels.js │ ├── split.js │ └── utils.js └── webpack.config.js
SYMBOL INDEX (171 symbols across 38 files)
FILE: examples/assets/js/bit-crusher.js
class BitCrusher (line 12) | class BitCrusher extends AudioWorkletProcessor {
method parameterDescriptors (line 13) | static get parameterDescriptors() {
method constructor (line 24) | constructor(options) {
method process (line 30) | process(inputs, outputs, parameters) {
FILE: examples/audio-worklet.js
class AudioWorkletExample (line 15) | class AudioWorkletExample extends React.Component {
method constructor (line 16) | constructor() {
method loadWorkletAndStream (line 21) | loadWorkletAndStream(ctx) {
method render (line 32) | render() {
FILE: examples/buffers-channels.js
class BuffersAndChannels (line 17) | class BuffersAndChannels extends React.Component {
method constructor (line 18) | constructor() {
method componentDidMount (line 24) | componentDidMount() {
method makeDistortionCurve (line 31) | makeDistortionCurve(amount) {
method render (line 45) | render() {
FILE: examples/complex-effects-graph.js
class ComplexGraph (line 55) | class ComplexGraph extends React.Component {
method constructor (line 56) | constructor(props) {
method render (line 75) | render() {
FILE: examples/custom-nodes.js
class DelayLine (line 15) | class DelayLine extends RExtensible {
method renderGraph (line 16) | renderGraph() {
class CustomNodeExample (line 28) | class CustomNodeExample extends React.Component {
method constructor (line 29) | constructor(props) {
method render (line 36) | render() {
FILE: examples/delay-lines.js
class DelayLineExample (line 19) | class DelayLineExample extends React.Component {
method constructor (line 20) | constructor(props) {
method render (line 43) | render() {
FILE: examples/gain-matrix.js
class GainMatrix (line 14) | class GainMatrix extends RExtensible {
method constructor (line 15) | constructor(props) {
method onGainInput (line 25) | onGainInput(e) {
method makeRow (line 34) | makeRow(row, rowIndex) {
method renderGraph (line 59) | renderGraph() {
class GainMatrixExample (line 68) | class GainMatrixExample extends React.Component {
method constructor (line 69) | constructor() {
method componentDidMount (line 76) | componentDidMount() {
method render (line 87) | render() {
FILE: examples/media-element.js
class MediaElementSourceExample (line 13) | class MediaElementSourceExample extends React.Component {
method constructor (line 14) | constructor(props) {
method render (line 21) | render() {
FILE: examples/media-stream.js
class MediaStreamSourceExample (line 14) | class MediaStreamSourceExample extends React.Component {
method constructor (line 15) | constructor(props) {
method render (line 24) | render() {
FILE: examples/mutation.js
class Mutation (line 14) | class Mutation extends React.Component {
method constructor (line 15) | constructor() {
method render (line 36) | render() {
FILE: src/audio-nodes/analyser.js
class RAnalyser (line 5) | class RAnalyser extends RConnectableNode {
method constructor (line 6) | constructor(props) {
method componentWillMount (line 17) | componentWillMount() {
method render (line 29) | render() {
FILE: src/audio-nodes/audio-worklet.js
class RAudioWorklet (line 5) | class RAudioWorklet extends RConnectableNode {
method constructor (line 6) | constructor(props) {
method componentWillMount (line 12) | componentWillMount() {
FILE: src/audio-nodes/biquad-filter.js
class RBiquadFilter (line 5) | class RBiquadFilter extends RConnectableNode {
method constructor (line 6) | constructor(props) {
method componentWillMount (line 18) | componentWillMount() {
method render (line 30) | render() {
FILE: src/audio-nodes/buffer-source.js
class RBufferSource (line 4) | class RBufferSource extends RScheduledSource {
method constructor (line 5) | constructor(props) {
method instantiateNode (line 21) | instantiateNode() {
method onEnded (line 34) | onEnded(e) {
method componentWillMount (line 41) | componentWillMount() {
method componentDidMount (line 46) | componentDidMount() {
method shouldStartWithPropsChange (line 51) | shouldStartWithPropsChange(prevProps, currentProps) {
method componentDidUpdate (line 55) | componentDidUpdate(prevProps, prevState) {
FILE: src/audio-nodes/channel-merger.js
class RChannelMerger (line 4) | class RChannelMerger extends RConnectableNode {
method constructor (line 5) | constructor(props) {
method componentWillMount (line 11) | componentWillMount() {
FILE: src/audio-nodes/channel-splitter.js
class RChannelSplitter (line 4) | class RChannelSplitter extends RConnectableNode {
method constructor (line 5) | constructor(props) {
method getConnectionArguments (line 13) | getConnectionArguments(destination, destinationIndex, toParam) {
method componentWillMount (line 25) | componentWillMount() {
FILE: src/audio-nodes/constant-source.js
class RConstantSource (line 4) | class RConstantSource extends RScheduledSource {
method constructor (line 5) | constructor(props) {
method onEnded (line 17) | onEnded(e) {
method instantiateNode (line 22) | instantiateNode() {
method componentWillMount (line 34) | componentWillMount() {
FILE: src/audio-nodes/convolver.js
class RConvolver (line 4) | class RConvolver extends RConnectableNode {
method constructor (line 5) | constructor(props) {
method componentWillMount (line 14) | componentWillMount() {
FILE: src/audio-nodes/delay.js
class RDelay (line 4) | class RDelay extends RConnectableNode {
method constructor (line 5) | constructor(props) {
method componentWillMount (line 13) | componentWillMount() {
FILE: src/audio-nodes/dynamics-compressor.js
class RDynamicsCompressor (line 4) | class RDynamicsCompressor extends RConnectableNode {
method constructor (line 5) | constructor(props) {
method componentWillMount (line 17) | componentWillMount() {
FILE: src/audio-nodes/gain.js
class RGain (line 4) | class RGain extends RConnectableNode {
method constructor (line 5) | constructor(props) {
method componentWillMount (line 13) | componentWillMount() {
FILE: src/audio-nodes/iir-filter.js
class RIIRFilter (line 5) | class RIIRFilter extends RConnectableNode {
method constructor (line 6) | constructor(props) {
method componentWillMount (line 12) | componentWillMount() {
method render (line 28) | render() {
FILE: src/audio-nodes/media-element-source.js
class RMediaElementSource (line 4) | class RMediaElementSource extends RAudioNode {
method constructor (line 5) | constructor(props) {
method createNode (line 13) | createNode() {
method componentWillMount (line 18) | componentWillMount() {
method componentWillReceiveProps (line 29) | componentWillReceiveProps(nextProps) {
FILE: src/audio-nodes/media-stream-source.js
class RMediaStreamSource (line 4) | class RMediaStreamSource extends RAudioNode {
method constructor (line 5) | constructor(props) {
method createNode (line 15) | createNode() {
method componentWillMount (line 20) | componentWillMount() {
method componentWillReceiveProps (line 31) | componentWillReceiveProps(nextProps) {
FILE: src/audio-nodes/oscillator.js
class ROscillator (line 4) | class ROscillator extends RScheduledSource {
method constructor (line 5) | constructor(props) {
method onEnded (line 20) | onEnded(e) {
method instantiateNode (line 25) | instantiateNode() {
method componentWillMount (line 41) | componentWillMount() {
method componentWillReceiveProps (line 46) | componentWillReceiveProps(nextProps) {
FILE: src/audio-nodes/panner.js
class RPanner (line 4) | class RPanner extends RConnectableNode {
method constructor (line 5) | constructor(props) {
method componentWillMount (line 26) | componentWillMount() {
FILE: src/audio-nodes/stereo-panner.js
class RStereoPanner (line 4) | class RStereoPanner extends RConnectableNode {
method constructor (line 5) | constructor(props) {
method componentWillMount (line 13) | componentWillMount() {
FILE: src/audio-nodes/wave-shaper.js
class RWaveShaper (line 4) | class RWaveShaper extends RConnectableNode {
method constructor (line 5) | constructor(props) {
method componentWillMount (line 14) | componentWillMount() {
FILE: src/base/audio-context.js
class RAudioContext (line 17) | class RAudioContext extends React.Component {
method constructor (line 18) | constructor(props) {
method componentWillMount (line 33) | componentWillMount() {
method getChildContext (line 37) | getChildContext() {
method componentWillUnmount (line 45) | componentWillUnmount() {
method render (line 49) | render() {
FILE: src/base/audio-node.js
class RAudioNode (line 9) | class RAudioNode extends RComponent {
method constructor (line 10) | constructor(props) {
method flattenPointers (line 21) | flattenPointers(destinations, flattened = []) {
method getConnectionArguments (line 45) | getConnectionArguments(destination, destinationIndex, toParam, fromCha...
method connectToAllDestinations (line 58) | connectToAllDestinations(destinationFunction, webAudioNode) {
method componentWillMount (line 81) | componentWillMount() {
method componentWillReceiveProps (line 85) | componentWillReceiveProps(nextProps) {
method componentWillUpdate (line 89) | componentWillUpdate(nextProps, nextState) {
method componentDidUpdate (line 99) | componentDidUpdate(prevProps, prevState) {
method componentWillUnmount (line 105) | componentWillUnmount() {
method resolveTransitionProps (line 110) | resolveTransitionProps(props, propName) {
method updateParams (line 124) | updateParams(props) {
method setParam (line 150) | setParam(param, value, transitionTime, transitionCurve) {
method componentDidMount (line 164) | componentDidMount() {
method render (line 168) | render() {
FILE: src/base/component.js
class RComponent (line 9) | class RComponent extends React.Component {
method componentWillMount (line 10) | componentWillMount() {
method render (line 14) | render() { return null; }
FILE: src/base/connectable-node.js
class RConnectableNode (line 9) | class RConnectableNode extends RAudioNode {
method componentWillUnmount (line 10) | componentWillUnmount() {
FILE: src/base/scheduled-source.js
class RScheduledSource (line 9) | class RScheduledSource extends RAudioNode {
method constructor (line 10) | constructor(props) {
method onEnded (line 19) | onEnded() {
method schedule (line 26) | schedule() {
method shouldStartWithPropsChange (line 49) | shouldStartWithPropsChange() {
method componentDidMount (line 53) | componentDidMount() {
method componentDidUpdate (line 58) | componentDidUpdate(prevProps, prevState) {
FILE: src/graph/cycle.js
class RCycle (line 12) | class RCycle extends RComponent {
method constructor (line 13) | constructor(props) {
method componentWillMount (line 18) | componentWillMount() {
method componentWillUpdate (line 23) | componentWillUpdate(nextProps, nextState) {
method render (line 31) | render() {
FILE: src/graph/extensible.js
class RExtensible (line 11) | class RExtensible extends RPipeline {
method renderGraph (line 12) | renderGraph() {
method addKeys (line 16) | addKeys(child, childIndex) {
method render (line 20) | render() {
FILE: src/graph/pipeline.js
class RPipeline (line 11) | class RPipeline extends RComponent {
method constructor (line 12) | constructor(props) {
method resolvePointer (line 25) | resolvePointer(pointer) {
method resolveDestination (line 46) | resolveDestination(currentIndex, childrenArray) {
method resolveParent (line 70) | resolveParent(currentIndex, childrenArray) {
method createIdentifiedChild (line 107) | createIdentifiedChild(component) {
method createEmbeddableChild (line 129) | createEmbeddableChild(identifiedChild, childIndex, childrenArray) {
method render (line 151) | render() {
FILE: src/graph/split-channels.js
class RSplitChannels (line 13) | class RSplitChannels extends RComponent {
method render (line 14) | render() {
FILE: src/graph/split.js
class RSplit (line 15) | class RSplit extends RComponent {
method constructor (line 16) | constructor(props) {
method componentWillMount (line 21) | componentWillMount() {
method componentWillUpdate (line 26) | componentWillUpdate(nextProps, nextState) {
method render (line 34) | render() {
Condensed preview — 52 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (77K chars).
[
{
"path": ".eslintrc.json",
"chars": 367,
"preview": "{\n \"extends\": [\n \"standard\",\n \"eslint:recommended\",\n \"plugin:react/recommended\"\n ],\n \"env\": {\n \"browser\":"
},
{
"path": ".gitignore",
"chars": 58,
"preview": ".DS_Store\n*.tex\nevaluation\nnative_test*\nnode_modules\ndist\n"
},
{
"path": ".npmignore",
"chars": 47,
"preview": "examples/\nevaluation/\n.eslintrc.json\n.DS_Store\n"
},
{
"path": "LICENSE",
"chars": 655,
"preview": "Copyright (c) 2018-present British Broadcasting Corporation\n\nAll rights reserved\n\n(http://www.bbc.co.uk) and r-audio Con"
},
{
"path": "README.md",
"chars": 1727,
"preview": "# r-audio\nA library of React components for building [Web Audio](https://www.w3.org/TR/webaudio/) graphs.\n\n## Objectives"
},
{
"path": "examples/README.md",
"chars": 252,
"preview": "# r-audio examples\n\nThe files in this directory constitute a demo web app where you can test the examples and experiment"
},
{
"path": "examples/assets/js/bit-crusher.js",
"chars": 1567,
"preview": "// Copyright (c) 2017 The Chromium Authors. All rights reserved.\n// Use of this source code is governed by a BSD-style l"
},
{
"path": "examples/audio-worklet.js",
"chars": 2493,
"preview": "import React from 'react';\nimport { render } from 'react-dom';\n\nimport {\n RAnalyser,\n RAudioContext,\n RAudioWorklet,\n"
},
{
"path": "examples/buffers-channels.js",
"chars": 2085,
"preview": "import React from 'react';\nimport { render } from 'react-dom';\n\nimport {\n RAudioContext,\n RBufferSource,\n RConstantSo"
},
{
"path": "examples/complex-effects-graph.js",
"chars": 2993,
"preview": "import React from 'react';\nimport { render } from 'react-dom';\n\nimport {\n RAudioContext,\n RBiquadFilter,\n RGain,\n RO"
},
{
"path": "examples/custom-nodes.js",
"chars": 1467,
"preview": "import React from 'react';\nimport { render } from 'react-dom';\n\nimport {\n RAudioContext,\n RCycle,\n RDelay,\n RExtensi"
},
{
"path": "examples/delay-lines.js",
"chars": 2502,
"preview": "/**\n\n**/\nimport React from 'react';\nimport { render } from 'react-dom';\n\nimport {\n RAudioContext,\n RBiquadFilter,\n RC"
},
{
"path": "examples/examples.js",
"chars": 932,
"preview": "import React from 'react';\n\nimport AudioWorkletExample from './audio-worklet.js';\nimport DelayLineExample from './delay-"
},
{
"path": "examples/gain-matrix.js",
"chars": 3364,
"preview": "import React from 'react';\nimport { render } from 'react-dom';\n\nimport {\n RAudioContext,\n RBufferSource,\n RExtensible"
},
{
"path": "examples/index.html",
"chars": 340,
"preview": "<!DOCTYPE html>\n<html>\n<head>\n <meta charset=\"utf-8\">\n <title>r-audio</title>\n <script defer type=\"text/javascript\" s"
},
{
"path": "examples/index.js",
"chars": 734,
"preview": "import React from 'react';\nimport { render } from 'react-dom';\nimport examples from './examples.js';\n\nconst example = lo"
},
{
"path": "examples/media-element.js",
"chars": 1086,
"preview": "import React from 'react';\nimport { render } from 'react-dom';\n\nimport {\n RAudioContext,\n RCycle,\n RDelay,\n RGain,\n "
},
{
"path": "examples/media-stream.js",
"chars": 1217,
"preview": "import React from 'react';\nimport { render } from 'react-dom';\n\nimport {\n RAudioContext,\n RCycle,\n RDelay,\n RGain,\n "
},
{
"path": "examples/mutation.js",
"chars": 1444,
"preview": "import React from 'react';\nimport { render } from 'react-dom';\n\nimport {\n RAudioContext,\n RBiquadFilter,\n RGain,\n RO"
},
{
"path": "index.js",
"chars": 1034,
"preview": "import RAudioContext from './src/base/audio-context.js';\nimport RPipeline from './src/graph/pipeline.js';\nimport RSplit "
},
{
"path": "package.json",
"chars": 1532,
"preview": "{\n \"name\": \"r-audio\",\n \"version\": \"1.2.0\",\n \"description\": \"A library of React components for building Web Audio grap"
},
{
"path": "src/audio-nodes/analyser.js",
"chars": 1414,
"preview": "import React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\nimport PropTypes from 'prop-typ"
},
{
"path": "src/audio-nodes/audio-worklet.js",
"chars": 604,
"preview": "/* global AudioWorkletNode */\nimport React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\n\n"
},
{
"path": "src/audio-nodes/biquad-filter.js",
"chars": 1128,
"preview": "import React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\nimport PropTypes from 'prop-typ"
},
{
"path": "src/audio-nodes/buffer-source.js",
"chars": 1592,
"preview": "import React from 'react';\nimport RScheduledSource from './../base/scheduled-source.js';\n\nexport default class RBufferSo"
},
{
"path": "src/audio-nodes/channel-merger.js",
"chars": 568,
"preview": "import React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\n\nexport default class RChannelM"
},
{
"path": "src/audio-nodes/channel-splitter.js",
"chars": 1299,
"preview": "import React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\n\nexport default class RChannelS"
},
{
"path": "src/audio-nodes/constant-source.js",
"chars": 937,
"preview": "import React from 'react';\nimport RScheduledSource from './../base/scheduled-source.js';\n\nexport default class RConstant"
},
{
"path": "src/audio-nodes/convolver.js",
"chars": 602,
"preview": "import React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\n\nexport default class RConvolve"
},
{
"path": "src/audio-nodes/delay.js",
"chars": 550,
"preview": "import React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\n\nexport default class RDelay ex"
},
{
"path": "src/audio-nodes/dynamics-compressor.js",
"chars": 720,
"preview": "import React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\n\nexport default class RDynamics"
},
{
"path": "src/audio-nodes/gain.js",
"chars": 543,
"preview": "import React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\n\nexport default class RGain ext"
},
{
"path": "src/audio-nodes/iir-filter.js",
"chars": 1080,
"preview": "import React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\nimport PropTypes from 'prop-typ"
},
{
"path": "src/audio-nodes/index.js",
"chars": 1142,
"preview": "import RAnalyser from './analyser.js';\nimport RAudioWorklet from './audio-worklet.js';\nimport RBiquadFilter from './biqu"
},
{
"path": "src/audio-nodes/media-element-source.js",
"chars": 747,
"preview": "import React from 'react';\nimport RAudioNode from './../base/audio-node.js';\n\nexport default class RMediaElementSource e"
},
{
"path": "src/audio-nodes/media-stream-source.js",
"chars": 782,
"preview": "import React from 'react';\nimport RAudioNode from './../base/audio-node.js';\n\nexport default class RMediaStreamSource ex"
},
{
"path": "src/audio-nodes/oscillator.js",
"chars": 1353,
"preview": "import React from 'react';\nimport RScheduledSource from './../base/scheduled-source.js';\n\nexport default class ROscillat"
},
{
"path": "src/audio-nodes/panner.js",
"chars": 1140,
"preview": "import React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\n\nexport default class RPanner e"
},
{
"path": "src/audio-nodes/stereo-panner.js",
"chars": 583,
"preview": "import React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\n\nexport default class RStereoPa"
},
{
"path": "src/audio-nodes/wave-shaper.js",
"chars": 606,
"preview": "import React from 'react';\nimport RConnectableNode from './../base/connectable-node.js';\n\nexport default class RWaveShap"
},
{
"path": "src/base/audio-context.js",
"chars": 1888,
"preview": "import React from 'react';\nimport PropTypes from 'prop-types';\nimport RComponent from './component.js';\n\nwindow.AudioCon"
},
{
"path": "src/base/audio-node.js",
"chars": 6968,
"preview": "import React from 'react';\nimport RComponent from './component.js';\n\n/**\n * Any RComponent that corresponds to an AudioN"
},
{
"path": "src/base/component.js",
"chars": 565,
"preview": "import React from 'react';\nimport PropTypes from 'prop-types';\n\n/**\n * Anything that requires an AudioContext is a RComp"
},
{
"path": "src/base/connectable-node.js",
"chars": 724,
"preview": "import React from 'react';\nimport RAudioNode from './audio-node.js';\n\n/**\n * Any RAudioNode that can be connected to is "
},
{
"path": "src/base/scheduled-source.js",
"chars": 1875,
"preview": "import React from 'react';\nimport RAudioNode from './audio-node.js';\n\n/**\n * Any RAudioNode that can be scheduled to sta"
},
{
"path": "src/graph/cycle.js",
"chars": 2254,
"preview": "import React from 'react';\nimport RAudioNode from './../base/audio-node.js';\nimport RComponent from './../base/component"
},
{
"path": "src/graph/extensible.js",
"chars": 633,
"preview": "import React from 'react';\nimport RPipeline from './pipeline.js';\n\n/**\n * A subclass of RPipeline which can be extended "
},
{
"path": "src/graph/pipeline.js",
"chars": 5989,
"preview": "import React from 'react';\nimport RComponent from './../base/component.js';\n\nimport { isConnectable } from './utils.js';"
},
{
"path": "src/graph/split-channels.js",
"chars": 1287,
"preview": "import React from 'react';\n\nimport RComponent from './../base/component.js';\nimport RSplit from './split.js';\nimport RPi"
},
{
"path": "src/graph/split.js",
"chars": 2580,
"preview": "import React from 'react';\nimport RAudioNode from './../base/audio-node.js';\nimport RComponent from './../base/component"
},
{
"path": "src/graph/utils.js",
"chars": 783,
"preview": "import RConnectableNode from './../base/connectable-node.js';\n\nconst connectableComponents = [\n 'RSplit',\n 'RCycle',\n "
},
{
"path": "webpack.config.js",
"chars": 1081,
"preview": "const webpack = require('webpack');\nconst path = require('path');\nconst UglifyJsPlugin = require('uglifyjs-webpack-plugi"
}
]
About this extraction
This page contains the full source code of the bbc/r-audio GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 52 files (70.2 KB), approximately 18.4k tokens, and a symbol index with 171 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.