Initial web release

This commit is contained in:
Geraint 2024-10-22 12:24:55 +01:00
parent a9d78f0331
commit c867188d8e
14 changed files with 1815 additions and 1 deletions

View File

@ -93,14 +93,21 @@ struct SignalsmithStretch {
template<class Inputs>
void seek(Inputs &&inputs, int inputSamples, double playbackRate) {
inputBuffer.reset();
Sample totalEnergy = 0;
for (int c = 0; c < channels; ++c) {
auto &&inputChannel = inputs[c];
auto &&bufferChannel = inputBuffer[c];
int startIndex = std::max<int>(0, inputSamples - stft.windowSize() - stft.interval());
for (int i = startIndex; i < inputSamples; ++i) {
bufferChannel[i] = inputChannel[i];
Sample s = inputChannel[i];
totalEnergy += s*s;
bufferChannel[i] = s;
}
}
if (totalEnergy >= noiseFloor) {
silenceCounter = 0;
silenceFirst = true;
}
inputBuffer += inputSamples;
didSeek = true;
seekTimeFactor = (playbackRate*stft.interval() > 1) ? 1/playbackRate : stft.interval();

23
web/Makefile Normal file
View File

@ -0,0 +1,23 @@
release/SignalsmithStretch.mjs: release/SignalsmithStretch.js
@ echo "let module = {}, exports = {};" > release/SignalsmithStretch.mjs
@ cat release/SignalsmithStretch.js >> release/SignalsmithStretch.mjs
@ echo "let _export=SignalsmithStretch;export default _export;" >> release/SignalsmithStretch.mjs
release/SignalsmithStretch.js: emscripten/main.js web-wrapper.js ../signalsmith-stretch.h
@ cp emscripten/main.js release/SignalsmithStretch.js
@ cat web-wrapper.js >> release/SignalsmithStretch.js
emscripten/main.js: emscripten/main.cpp
@ emscripten/compile.sh emscripten/main.cpp emscripten/main.js SignalsmithStretch
## Development helpers
jsdoc: release/SignalsmithStretch.js release/SignalsmithStretch.mjs
npx jsdoc release --verbose
server:
python3 -m http.server
watch:
# pip3 install watchdog
watchmedo shell-command --patterns='*.js;Makefile' --command='make jsdoc;echo "rebuilt"' --drop

1
web/Scope.mjs Normal file

File diff suppressed because one or more lines are too long

221
web/demo.html Normal file
View File

@ -0,0 +1,221 @@
<!DOCTYPE html>
<html>
<head>
<title>Signalsmith Stretch Web Audio demo</title>
<style>
#start-overlay {
position: fixed;
top: 0;
left: 0;
height: 100vh;
width: 100vw;
display: flex;
flex-direction: column;
z-index: 100;
}
button {
justify-content: center;
align-items: center;
flex-grow: 1;
font: inherit;
}
body {
margin: 0;
height: 100vh;
width: 100vw;
max-height: 100vh;
max-width: 100vw;
display: grid;
grid-template-areas: "playback upload" "controls controls" "scope scope";
grid-template-columns: 1fr 4rem;
grid-template-rows: 2.5rem 1fr calc(min(30vh, 8rem));
font-size: 12pt;
font-family: Arial, sans-serif;
}
#controls {
grid-area: controls;
display: grid;
grid-template-columns: 4rem 1fr 5rem;
grid-auto-rows: max-content;
padding: 1rem;
align-content: space-evenly;
}
#controls label {
grid-column: 1;
text-align: right;
padding-right: 1ex;
}
#controls input[type=range], #controls input[type=checkbox] {
grid-column: 2;
}
#controls .input[type=number] {
grid-column: 3;
}
#scope {
grid-area: scope;
width: 100%;
height: 100%;
border: none;
}
#playback {
grid-area: playback;
background: #DDD;
}
#upload {
grid-area: upload;
background: #EEE;
border: 2px dashed #CCC;
border-radius: 4px;
cursor: pointer;
}
</style>
</head>
<body>
<input id="playback" type="range" value="0" min="0" max="1" step="0.001">
<input id="upload-file" type="file" style="visibility: hidden" accept="audio/*">
<button id="upload">upload</button>
<div id="controls">
<label>active</label>
<input type="checkbox" data-key="active">
<label>rate</label>
<input type="range" min="0" max="4" step="0.01" data-key="rate">
<input type="number" min="0" max="4" step="0.01" data-key="rate">
<label>semitones</label>
<input type="range" min="-12" max="12" step="1" data-key="semitones">
<input type="number" min="-12" max="12" step="1" data-key="semitones">
<label>shelf freq</label>
<input type="range" min="4000" max="12000" step="100" data-key="shelfFreq">
<input type="number" min="4000" max="12000" step="100" data-key="shelfFreq">
<label>shelf dB</label>
<input type="range" min="-24" max="12" step="0.1" data-key="shelfDb">
<input type="number" min="-24" max="12" step="0.1" data-key="shelfDb">
</div>
<script type="module">
import SignalsmithStretch from "./release/SignalsmithStretch.mjs";
import Scope from './Scope.mjs';
let $ = document.querySelector.bind(document);
let $$ = document.querySelectorAll.bind(document);
(async () => {
let audioContext = new AudioContext();
let stretch;
let audioDuration = 1;
// add scope, for fun
let scope = await Scope(audioContext);
scope.connect(audioContext.destination);
let scopeFrame = scope.openInterface();
scopeFrame.id = 'scope';
document.body.appendChild(scopeFrame);
let filter = audioContext.createBiquadFilter();
filter.connect(scope);
// Drop zone
document.body.ondragover = event => {
event.preventDefault();
}
document.body.ondrop = handleDrop;
function handleDrop(event) {
event.preventDefault();
var dt = event.dataTransfer;
handleFile(dt.items ? dt.items[0].getAsFile() : dt.files[0]);
}
function handleFile(file) {
return new Promise((pass, fail) => {
var reader = new FileReader();
reader.onload = e => pass(handleArrayBuffer(reader.result));
reader.onerror = fail;
reader.readAsArrayBuffer(file);
});
}
async function handleArrayBuffer(arrayBuffer) {
let audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
audioDuration = audioBuffer.duration;
let channelBuffers = []
for (let c = 0; c < audioBuffer.numberOfChannels; ++c) {
channelBuffers.push(audioBuffer.getChannelData(c));
}
// fresh node
if (stretch) {
stretch.stop();
stretch.disconnect();
}
stretch = await SignalsmithStretch(audioContext);
stretch.connect(filter);
await stretch.addBuffers(channelBuffers);
controlValues.loopEnd = audioDuration;
controlsChanged();
}
// fetch audio and add buffer
let response = await fetch('loop.mp3');
handleArrayBuffer(await response.arrayBuffer());
let controlValuesInitial = {
active: false,
rate: 1,
semitones: 0,
shelfFreq: 8000,
shelfDb: 0
};
let controlValues = Object.assign({}, controlValuesInitial);
$$('#controls input').forEach(input => {
let isCheckbox = input.type == 'checkbox';
let key = input.dataset.key;
input.oninput = input.onchange = e => {
controlValues[key] = isCheckbox ? input.checked : parseFloat(input.value);
controlsChanged();
};
if (!isCheckbox) input.ondblclick = e => {
controlValues[key] = controlValuesInitial[key];
controlsChanged();
};
});
function controlsChanged() {
$$('#controls input').forEach(input => {
let key = input.dataset.key;
let value = controlValues[key];
// Update value if it doesn't match
if (value !== parseFloat(input.value)) input.value = value;
});
if (stretch) {
let obj = Object.assign({output: audioContext.currentTime + 0.15}, controlValues);
stretch.schedule(obj);
}
filter.type = 'highshelf'; // https://developer.mozilla.org/en-US/docs/Web/API/BiquadFilterNode/type
filter.Q.value = 0.71;
filter.frequency.value = controlValues.shelfFreq;
filter.gain.value = controlValues.shelfDb;
audioContext.resume();
}
controlsChanged();
$('#upload').onclick = e => $('#upload-file').click();
$('#upload-file').onchange = async e => {
stretch.stop();
await handleFile($('#upload-file').files[0]).catch(e => alert(e.message));
if (stretch) {
controlValues.active = true;
controlsChanged();
}
}
let playbackPosition = $('#playback');
setInterval(_ => {
playbackPosition.max = audioDuration;
playbackPosition.value = stretch?.inputTime;
}, 100);
playbackPosition.oninput = playbackPosition.onchange = e => {
let inputTime = parseFloat(playbackPosition.value);
stretch.schedule(Object.assign({input: inputTime}, controlValues));
};
})();
</script>
</body>
</html>

53
web/emscripten/compile.sh Executable file
View File

@ -0,0 +1,53 @@
# compile.sh main.cpp out.js ModuleName
export SCRIPT_DIR=`dirname "$0"`
if [ -z "$EMSDK_DIR" ]
then
export EMSDK_DIR="${SCRIPT_DIR}/emsdk"
fi
if ! test -d "${EMSDK_DIR}"
then
echo "SDK not found - cloning from Github"
git clone https://github.com/emscripten-core/emsdk.git "${EMSDK_DIR}"
cd "${EMSDK_DIR}" && git pull && ./emsdk install latest && ./emsdk activate latest
fi
EMSDK_QUIET=1 . "${EMSDK_DIR}/emsdk_env.sh" \
&& emcc --check \
&& python3 --version \
&& cmake --version
if [ "$#" -le 1 ]; then
echo "Missing .cpp / .js arguments"
exit 1
fi
INPUT_CPP="$1"
OUTPUT_JS="$2"
mkdir -p $(dirname $OUTPUT_JS)
MODULE_NAME="$3"
if [ -z "$MODULE_NAME" ]
then
MODULE_NAME=$(basename "$OUTPUT_JS" ".${OUTPUT_JS##*.}")
fi
echo "$MODULE_NAME: $INPUT_CPP -> $OUTPUT_JS"
# -sSTRICT -sASSERTIONS --closure=0 \
em++ \
$INPUT_CPP -o "${OUTPUT_JS}" \
-sEXPORT_NAME=$MODULE_NAME -DEXPORT_NAME=$MODULE_NAME \
-I "${SCRIPT_DIR}" \
-std=c++11 -O3 -ffast-math -fno-exceptions -fno-rtti \
--pre-js "${SCRIPT_DIR}/pre.js" --closure 0 \
-Wall -Wextra -Wfatal-errors -Wpedantic -pedantic-errors \
-sSINGLE_FILE=1 -sMODULARIZE -sENVIRONMENT=web,worker,shell -sNO_EXIT_RUNTIME=1 \
-sFILESYSTEM=0 -sEXPORTED_RUNTIME_METHODS=HEAP8,UTF8ToString \
-sINITIAL_MEMORY=512kb -sALLOW_MEMORY_GROWTH=1 -sMEMORY_GROWTH_GEOMETRIC_STEP=0.5 -sABORTING_MALLOC=1 \
-sSTRICT=1 -sDYNAMIC_EXECUTION=0
# Remove last 4 lines (UMD definition)
node -e "let f=process.argv[1],fs=require('fs');fs.writeFileSync(f,fs.readFileSync(f,'utf8').split('\n').slice(0,-5).join('\n')+'\n')" "${OUTPUT_JS}"

66
web/emscripten/main.cpp Normal file
View File

@ -0,0 +1,66 @@
#include "../../signalsmith-stretch.h"
#include <vector>
#include <emscripten.h>
int main() {}
using Sample = float;
using Stretch = signalsmith::stretch::SignalsmithStretch<Sample>;
Stretch stretch;
// Allocates memory for buffers, and returns it
std::vector<Sample> buffers;
std::vector<Sample *> buffersIn, buffersOut;
extern "C" {
Sample * EMSCRIPTEN_KEEPALIVE setBuffers(int channels, int length) {
buffers.resize(length*channels*2);
Sample *data = buffers.data();
for (int c = 0; c < channels; ++c) {
buffersIn.push_back(data + length*c);
buffersOut.push_back(data + length*(c + channels));
}
return data;
}
int EMSCRIPTEN_KEEPALIVE blockSamples() {
return stretch.blockSamples();
}
int EMSCRIPTEN_KEEPALIVE intervalSamples() {
return stretch.intervalSamples();
}
int EMSCRIPTEN_KEEPALIVE inputLatency() {
return stretch.inputLatency();
}
int EMSCRIPTEN_KEEPALIVE outputLatency() {
return stretch.outputLatency();
}
void EMSCRIPTEN_KEEPALIVE reset() {
stretch.reset();
}
void EMSCRIPTEN_KEEPALIVE presetDefault(int nChannels, Sample sampleRate) {
stretch.presetDefault(nChannels, sampleRate);
}
void EMSCRIPTEN_KEEPALIVE presetCheaper(int nChannels, Sample sampleRate) {
stretch.presetDefault(nChannels, sampleRate);
}
void EMSCRIPTEN_KEEPALIVE configure(int nChannels, int blockSamples, int intervalSamples) {
stretch.configure(nChannels, blockSamples, intervalSamples);
}
void EMSCRIPTEN_KEEPALIVE setTransposeFactor(Sample multiplier, Sample tonalityLimit) {
stretch.setTransposeFactor(multiplier, tonalityLimit);
}
void EMSCRIPTEN_KEEPALIVE setTransposeSemitones(Sample semitones, Sample tonalityLimit) {
stretch.setTransposeSemitones(semitones, tonalityLimit);
}
// We can't do setFreqMap()
void EMSCRIPTEN_KEEPALIVE seek(int inputSamples, double playbackRate) {
stretch.seek(buffersIn, inputSamples, playbackRate);
}
void EMSCRIPTEN_KEEPALIVE process(int inputSamples, int outputSamples) {
stretch.process(buffersIn, inputSamples, buffersOut, outputSamples);
}
void EMSCRIPTEN_KEEPALIVE flush(int outputSamples) {
stretch.flush(buffersOut, outputSamples);
}
}

15
web/emscripten/main.js Normal file

File diff suppressed because one or more lines are too long

10
web/emscripten/pre.js Normal file
View File

@ -0,0 +1,10 @@
// Adapted from the Emscripten error message when initialising std::random_device
var crypto = globalThis?.crypto || {
getRandomValues: array => {
// Cryptographically insecure, but fine for audio
for (var i = 0; i < array.length; i++) array[i] = (Math.random()*256)|0;
}
};
var performance = globalThis?.performance || {
now: _ => Date.now()
};

118
web/index.html Normal file
View File

@ -0,0 +1,118 @@
<!DOCTYPE html>
<html>
<head>
<title>Signalsmith Stretch Web Audio demo</title>
<style>
body {
font-size: 12pt;
font-family: Arial, sans-serif;
}
#start-overlay {
position: fixed;
top: 0;
left: 0;
height: 100vh;
width: 100vw;
display: flex;
flex-direction: column;
z-index: 100;
}
button {
justify-content: center;
align-items: center;
flex-grow: 1;
font: inherit;
}
.suggestion-code {
border: 1px solid #888;
border-radius: 3px;
padding: 0.3em 0.5em;
margin: 1em 0;
cursor: pointer;
}
.suggestion-code:hover {
background: #8884;
}
</style>
</head>
<body>
<div id="start-overlay">
<button id="start-live">live</button>
<button id="start-scheduled">scheduled</button>
</div>
<div id="suggestions">
<pre><code>stretch.start()</code></pre>
<pre><code>stretch.start(audioContext.currentTime + 1)</code></pre>
<pre><code>stretch.stop()</code></pre>
<pre><code>stretch.stop(audioContext.currentTime + 3)</code></pre>
<pre><code>stretch.schedule({
semitones: 5
})</code></pre>
<pre><code>stretch.schedule({
semitones: 0,
output: audioContext.currentTime + 3
})</code></pre>
<pre><code>stretch.schedule({
rate: 0.8
})</code></pre>
<pre><code>stretch.schedule({
input: 0, // start from beginning
rate: 1.2,
semitones: -2
})</code></pre>
<pre><code>stretch.schedule({
input: 0,
output: audioContext.currentTime + 1
}, true)</code></pre>
</div>
<script type="module">
import SignalsmithStretch from "./release/SignalsmithStretch.mjs";
let $ = document.querySelector.bind(document);
let $$ = document.querySelectorAll.bind(document);
async function start(live) {
$('#start-overlay').remove();
let audioContext = new AudioContext();
let stretchNode = await SignalsmithStretch(audioContext);
stretchNode.connect(audioContext.destination);
let response = await fetch('loop.mp3');
let arrayBuffer = await response.arrayBuffer();
let audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
if (live) {
stretchNode.start();
let sourceNode = audioContext.createBufferSource();
sourceNode.buffer = audioBuffer;
sourceNode.connect(stretchNode);
sourceNode.loop = true;
sourceNode.start();
} else {
let channelArrays = []
for (let c = 0; c < audioBuffer.numberOfChannels; ++c) {
channelArrays.push(audioBuffer.getChannelData(c));
}
let channelBuffers = channelArrays.map(c => c.buffer);
stretchNode.addBuffers(channelArrays, channelBuffers);
}
// put things in the global context, for debugging
Object.assign(globalThis, {
stretch: stretchNode,
audioContext: audioContext
});
}
$('#start-live').onclick = e => start(true);
$('#start-scheduled').onclick = e => start(false);
$$('#suggestions pre').forEach(node => {
let fn = new Function('return ' + node.textContent);
node.classList.add('suggestion-code');
node.onclick = e => fn().then(console.log);
});
</script>
</body>
</html>

BIN
web/loop.mp3 Normal file

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

26
web/release/package.json Normal file
View File

@ -0,0 +1,26 @@
{
"name": "signalsmith-stretch",
"version": "1.0.0",
"description": "JS/WASM release of the Signalsmith Stretch library",
"main": "SignalsmithStretch.mjs",
"exports": {
"import": "./SignalsmithStretch.mjs",
"require": "./SignalsmithStretch.js"
},
"type": "module",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "https://signalsmith-audio.co.uk/code/stretch.git"
},
"keywords": [
"audio",
"pitch",
"time",
"web-audio"
],
"author": "Geraint Luff",
"license": "MIT"
}

414
web/web-wrapper.js Normal file
View File

@ -0,0 +1,414 @@
function registerWorkletProcessor(Module, audioNodeKey) {
class WasmProcessor extends AudioWorkletProcessor {
constructor(options) {
super(options);
this.wasmReady = false;
this.wasmModule = null;
this.channels = 0;
this.buffersIn = [];
this.buffersOut = [];
this.audioBuffers = []; // list of (multi-channel) audio buffers
this.audioBuffersStart = 0; // time-stamp for the first audio buffer
this.audioBuffersEnd = 0; // just to be helpful
this.timeIntervalSamples = sampleRate*0.1;
this.timeIntervalCounter = 0;
this.timeMap = [{
active: false,
input: 0,
output: 0,
rate: 1,
semitones: 0,
loopStart: 0,
loopEnd: 0
}];
let remoteMethods = {
setUpdateInterval: seconds => {
this.timeIntervalSamples = sampleRate*seconds;
},
stop: when => {
if (typeof when !== 'number') when = currentTime;
return remoteMethods.schedule({active: false, output: when});
},
start: (when, offset, duration, rate, semitones) => {
if (typeof when === 'object') {
if (!('active' in when)) when.active = true;
return remoteMethods.schedule(when);
}
let obj = {active: true, input: 0, output: currentTime + this.outputLatencySeconds};
if (typeof when === 'number') obj.output = when;
if (typeof offset === 'number') obj.input = offset;
if (typeof rate === 'number') obj.rate = rate;
if (typeof semitones === 'number') obj.semitones = semitones;
let result = remoteMethods.schedule(obj);
if (typeof duration === 'number') {
remoteMethods.stop(obj.output + duration);
obj.output += duration;
obj.active = false;
remoteMethods.schedule(obj);
}
return result;
},
schedule: (objIn, adjustPrevious) => {
let outputTime = ('outputTime' in objIn) ? objIn.outputTime : currentTime;
let latestSegment = this.timeMap[this.timeMap.length - 1];
while (this.timeMap.length && this.timeMap[this.timeMap.length - 1].output >= outputTime) {
latestSegment = this.timeMap.pop();
}
let obj = {
active: latestSegment.active,
input: null,
output: outputTime,
rate: latestSegment.rate,
semitones: latestSegment.semitones,
loopStart: latestSegment.loopStart,
loopEnd: latestSegment.loopEnd
};
Object.assign(obj, objIn);
if (obj.input === null) {
let rate = (latestSegment.active ? latestSegment.rate : 0);
obj.input = latestSegment.input + (obj.output - latestSegment.output)*rate;
}
this.timeMap.push(obj);
if (adjustPrevious && this.timeMap.length > 1) {
let previous = this.timeMap[this.timeMap.length - 2];
if (previous.output < currentTime) {
let rate = (previous.active ? previous.rate : 0);
previous.input += (currentTime - previous.output)*rate;
previous.output = currentTime;
}
previous.rate = (obj.input - previous.input)/(obj.output - previous.output);
}
let currentMapSegment = this.timeMap[0];
while (this.timeMap.length > 1 && this.timeMap[1].output <= outputTime) {
this.timeMap.shift();
currentMapSegment = this.timeMap[0];
}
let rate = (currentMapSegment.active ? currentMapSegment.rate : 0);
let inputTime = currentMapSegment.input + (outputTime - currentMapSegment.output)*rate;
this.timeIntervalCounter = this.timeIntervalSamples;
this.port.postMessage(['time', inputTime]);
return obj;
},
dropBuffers: toSeconds => {
if (typeof toSeconds !== 'number') {
let buffers = this.audioBuffers.flat(1).map(b => b.buffer);;
this.audioBuffers = [];
this.audioBuffersStart = this.audioBuffersEnd = 0;
return {
value: {start: 0, end: 0},
transfer: buffers
};
}
let transfer = [];
while (this.audioBuffers.length) {
let first = this.audioBuffers[0];
let length = first[0].length;
let endSamples = this.audioBuffersStart + length;
let endSeconds = endSamples/sampleRate;
if (endSeconds > toSeconds) break;
this.audioBuffers.shift().forEach(b => transfer.push(b.buffer));
this.audioBuffersStart += length;
}
return {
value: {
start: this.audioBuffersStart/sampleRate,
end: this.audioBuffersEnd/sampleRate
},
transfer: transfer
};
},
addBuffers: sampleBuffers => {
sampleBuffers = [].concat(sampleBuffers);
this.audioBuffers.push(sampleBuffers);
let length = sampleBuffers[0].length;
this.audioBuffersEnd += length;
return this.audioBuffersEnd/sampleRate;
}
};
let pendingMessages = [];
this.port.onmessage = event => pendingMessages.push(event);
Module().then(wasmModule => {
this.wasmModule = wasmModule;
this.wasmReady = true;
wasmModule._main();
this.channels = options.numberOfOutputs ? options.outputChannelCount[0] : 2; // stereo by default
this.configure();
this.port.onmessage = event => {
let data = event.data;
let messageId = data.shift();
let method = data.shift();
let result = remoteMethods[method](...data);
if (result?.transfer) {
this.port.postMessage([messageId, result.value], result.transfer);
} else {
this.port.postMessage([messageId, result]);
}
};
let methodArgCounts = {};
for (let key in remoteMethods) {
methodArgCounts[key] = remoteMethods[key].length;
}
this.port.postMessage(['ready', methodArgCounts]);
pendingMessages.forEach(this.port.onmessage);
pendingMessages = null;
});
}
configure() {
this.wasmModule._presetDefault(this.channels, sampleRate);
this.updateBuffers();
this.inputLatencySeconds = this.wasmModule._inputLatency()/sampleRate;
this.outputLatencySeconds = this.wasmModule._outputLatency()/sampleRate;
}
updateBuffers() {
let wasmModule = this.wasmModule;
// longer than one STFT block, so we can seek smoothly
this.bufferLength = (wasmModule._inputLatency() + wasmModule._outputLatency());
let lengthBytes = this.bufferLength*4;
let bufferPointer = wasmModule._setBuffers(this.channels, this.bufferLength);
this.buffersIn = [];
this.buffersOut = [];
for (let c = 0; c < this.channels; ++c) {
this.buffersIn.push(bufferPointer + lengthBytes*c);
this.buffersOut.push(bufferPointer + lengthBytes*(c + this.channels));
}
}
process(inputList, outputList, parameters) {
if (!this.wasmReady) {
outputList.forEach(output => {
output.forEach(channel => {
channel.fill(0);
});
});
return true;
}
if (!outputList[0]?.length) return false;
let outputTime = currentTime + this.outputLatencySeconds;
while (this.timeMap.length > 1 && this.timeMap[1].output <= outputTime) {
this.timeMap.shift();
}
let currentMapSegment = this.timeMap[0];
let wasmModule = this.wasmModule;
wasmModule._setTransposeSemitones(currentMapSegment.semitones, 8000/sampleRate)
// Check the input/output channel counts
if (outputList[0].length != this.channels) {
this.channels = outputList[0]?.length || 0;
configure();
}
let outputBlockSize = outputList[0][0].length;
let memory = wasmModule.exports ? wasmModule.exports.memory.buffer : wasmModule.HEAP8.buffer;
// Buffer list (one per channel)
let inputs = inputList[0];
if (!currentMapSegment.active) {
outputList[0].forEach((_, c) => {
let channelBuffer = inputs[c%inputs.length];
let buffer = new Float32Array(memory, this.buffersIn[c], outputBlockSize);
buffer.fill(0);
});
// Should detect silent input and skip processing
wasmModule._process(outputBlockSize, outputBlockSize);
} else if (inputs?.length) {
// Live input
outputList[0].forEach((_, c) => {
let channelBuffer = inputs[c%inputs.length];
let buffer = new Float32Array(memory, this.buffersIn[c], outputBlockSize);
if (channelBuffer) {
buffer.set(channelBuffer);
} else {
buffer.fill(0);
}
})
wasmModule._process(outputBlockSize, outputBlockSize);
} else {
let inputTime = currentMapSegment.input + (outputTime - currentMapSegment.output)*currentMapSegment.rate;
let loopLength = currentMapSegment.loopEnd - currentMapSegment.loopStart;
if (loopLength > 0 && inputTime >= currentMapSegment.loopEnd) {
currentMapSegment.input -= loopLength;
inputTime -= loopLength;
}
inputTime += this.inputLatencySeconds;
let inputSamplesEnd = Math.round(inputTime*sampleRate);
// Fill the buffer with previous input
let buffers = outputList[0].map((_, c) => new Float32Array(memory, this.buffersIn[c], this.bufferLength));
let blockSamples = 0; // current write position in the temporary input buffer
let audioBufferIndex = 0;
let audioSamples = this.audioBuffersStart; // start of current audio buffer
// zero-pad until the start of the audio data
let inputSamples = inputSamplesEnd - this.bufferLength;
if (inputSamples < audioSamples) {
blockSamples = audioSamples - inputSamples;
buffers.forEach(b => b.fill(0, 0, blockSamples));
inputSamples = audioSamples;
}
while (audioBufferIndex < this.audioBuffers.length && audioSamples < inputSamplesEnd) {
let audioBuffer = this.audioBuffers[audioBufferIndex];
let startIndex = inputSamples - audioSamples; // start index within the audio buffer
let bufferEnd = audioSamples + audioBuffer[0].length;
// how many samples to copy: min(how many left in the buffer, how many more we need)
let count = Math.min(audioBuffer[0].length - startIndex, inputSamplesEnd - inputSamples);
if (count > 0) {
buffers.forEach((buffer, c) => {
let channelBuffer = audioBuffer[c%audioBuffer.length];
buffer.subarray(blockSamples).set(channelBuffer.subarray(startIndex, startIndex + count));
});
audioSamples += count;
blockSamples += count;
} else { // we're already past this buffer - skip it
audioSamples += audioBuffer[0].length;
}
++audioBufferIndex;
}
if (blockSamples < this.bufferLength) {
buffers.forEach(buffer => buffer.subarray(blockSamples).fill(0));
}
// constantly seeking, so we don't have to worry about the input buffers needing to be a rate-dependent size
wasmModule._seek(this.bufferLength, currentMapSegment.rate);
wasmModule._process(0, outputBlockSize);
this.timeIntervalCounter -= outputBlockSize;
if (this.timeIntervalCounter <= 0) {
this.timeIntervalCounter = this.timeIntervalSamples;
this.port.postMessage(['time', inputTime]);
}
}
// Re-fetch in case the memory changed (even though there *shouldn't* be any allocations)
memory = wasmModule.exports ? wasmModule.exports.memory.buffer : wasmModule.HEAP8.buffer;
outputList[0].forEach((channelBuffer, c) => {
let buffer = new Float32Array(memory, this.buffersOut[c], outputBlockSize);
channelBuffer.set(buffer);
});
return true;
}
}
registerProcessor(audioNodeKey, WasmProcessor);
}
/**
Creates a Stretch node
@async
@function SignalsmithStretch
@param {AudioContext} audioContext
@param {Object} options - channel configuration (as per [options]{@link https://developer.mozilla.org/en-US/docs/Web/API/AudioWorkletNode/AudioWorkletNode#options})
@returns {Promise<StretchNode>}
*/
SignalsmithStretch = ((Module, audioNodeKey) => {
if (typeof AudioWorkletProcessor === "function" && typeof registerProcessor === "function") {
// AudioWorklet side
registerWorkletProcessor(Module, audioNodeKey);
return {};
}
let promiseKey = Symbol();
let createNode = async function(audioContext, options) {
/**
@classdesc An `AudioWorkletNode` with Signalsmith Stretch extensions
@name StretchNode
@augments AudioWorkletNode
@property {number} inputTime - the current playback (in seconds) within the input audio stored by the node
*/
let audioNode;
options = options || {
numberOfInputs: 1,
numberOfOutputs: 1,
outputChannelCount: [2]
};
try {
audioNode = new AudioWorkletNode(audioContext, audioNodeKey, options);
} catch (e) {
if (!audioContext[promiseKey]) {
let moduleUrl = createNode.moduleUrl;
if (!moduleUrl) {
let moduleCode = `(${registerWorkletProcessor})((_scriptName=>${Module})(),${JSON.stringify(audioNodeKey)})`;
moduleUrl = URL.createObjectURL(new Blob([moduleCode], {type: 'text/javascript'}));
}
audioContext[promiseKey] = audioContext.audioWorklet.addModule(moduleUrl);
}
await audioContext[promiseKey];
audioNode = new AudioWorkletNode(audioContext, audioNodeKey, options);
}
// messages with Promise responses
let requestMap = {};
let idCounter = 0;
let timeUpdateCallback = null;
let post = (transfer, ...data) => {
let id = idCounter++;
return new Promise(resolve => {
requestMap[id] = resolve;
audioNode.port.postMessage([id].concat(data), transfer);
});
};
audioNode.inputTime = 0;
audioNode.port.onmessage = (event) => {
let data = event.data;
let id = data[0], value = data[1];
if (id == 'time') {
audioNode.inputTime = value;
if (timeUpdateCallback) timeUpdateCallback(value);
}
if (id in requestMap) {
requestMap[id](value);
delete requestMap[id];
}
};
return new Promise(resolve => {
requestMap['ready'] = remoteMethodKeys => {
Object.keys(remoteMethodKeys).forEach(key => {
let argCount = remoteMethodKeys[key];
audioNode[key] = (...args) => {
let transfer = null;
if (args.length > argCount) {
transfer = args.pop();
}
return post(transfer, key, ...args);
}
});
/** @lends StretchNode.prototype
@method setUpdateInterval
*/
audioNode.setUpdateInterval = (seconds, callback) => {
timeUpdateCallback = callback;
return post(null, 'setUpdateInterval', seconds);
}
resolve(audioNode);
}
});
};
return createNode;
})(SignalsmithStretch, "signalsmith-stretch");
// register as a CommonJS/AMD module
if (typeof exports === 'object' && typeof module === 'object') {
module.exports = SignalsmithStretch;
} else if (typeof define === 'function' && define['amd']) {
define([], () => SignalsmithStretch);
}