Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
emscripten-core
GitHub Repository: emscripten-core/emscripten
Path: blob/main/src/audio_worklet.js
4128 views
1
// This file is the main bootstrap script for Wasm Audio Worklets loaded in an
2
// Emscripten application. Build with -sAUDIO_WORKLET linker flag to enable
3
// targeting Audio Worklets.
4
5
// AudioWorkletGlobalScope does not have a onmessage/postMessage() functionality
6
// at the global scope, which means that after creating an
7
// AudioWorkletGlobalScope and loading this script into it, we cannot
8
// postMessage() information into it like one would do with Web Workers.
9
10
// Instead, we must create an AudioWorkletProcessor class, then instantiate a
11
// Web Audio graph node from it on the main thread. Using its message port and
12
// the node constructor's "processorOptions" field, we can share the necessary
13
// bootstrap information from the main thread to the AudioWorkletGlobalScope.
14
15
#if MINIMAL_RUNTIME
16
var instantiatePromise;
17
#endif
18
19
if (ENVIRONMENT_IS_AUDIO_WORKLET) {
20
21
function createWasmAudioWorkletProcessor(audioParams) {
22
class WasmAudioWorkletProcessor extends AudioWorkletProcessor {
23
constructor(args) {
24
super();
25
26
// Capture the Wasm function callback to invoke.
27
let opts = args.processorOptions;
28
#if ASSERTIONS
29
assert(opts.callback)
30
assert(opts.samplesPerChannel)
31
#endif
32
this.callback = {{{ makeDynCall('iipipipp', 'opts.callback') }}};
33
this.userData = opts.userData;
34
// Then the samples per channel to process, fixed for the lifetime of the
35
// context that created this processor. Even though this 'render quantum
36
// size' is fixed at 128 samples in the 1.0 spec, it will be variable in
37
// the 1.1 spec. It's passed in now, just to prove it's settable, but will
38
// eventually be a property of the AudioWorkletGlobalScope (globalThis).
39
this.samplesPerChannel = opts.samplesPerChannel;
40
this.bytesPerChannel = this.samplesPerChannel * {{{ getNativeTypeSize('float') }}};
41
42
// Prepare the output views; see createOutputViews(). The 'STACK_ALIGN'
43
// deduction stops the STACK_OVERFLOW_CHECK failing (since the stack will
44
// be full if we allocate all the available space) leaving room for a
45
// single AudioSampleFrame as a minumum. There's an arbitrary maximum of
46
// 64 frames, for the case where a multi-MB stack is passed.
47
this.outputViews = new Array(Math.min(((wwParams.stackSize - {{{ STACK_ALIGN }}}) / this.bytesPerChannel) | 0, /*sensible limit*/ 64));
48
#if ASSERTIONS
49
console.assert(this.outputViews.length > 0, `AudioWorklet needs more stack allocating (at least ${this.bytesPerChannel})`);
50
#endif
51
this.createOutputViews();
52
53
#if ASSERTIONS
54
// Explicitly verify this later in process(). Note to self, stackSave is a
55
// bit of a misnomer as it simply gets the stack address.
56
this.ctorOldStackPtr = stackSave();
57
#endif
58
}
59
60
/**
61
* Create up-front as many typed views for marshalling the output data as
62
* may be required, allocated at the *top* of the worklet's stack (and whose
63
* addresses are fixed).
64
*/
65
createOutputViews() {
66
// These are still alloc'd to take advantage of the overflow checks, etc.
67
var oldStackPtr = stackSave();
68
var viewDataIdx = {{{ getHeapOffset('stackAlloc(this.outputViews.length * this.bytesPerChannel)', 'float') }}};
69
#if WEBAUDIO_DEBUG
70
console.log(`AudioWorklet creating ${this.outputViews.length} buffer one-time views (for a stack size of ${wwParams.stackSize} at address ${ptrToString(viewDataIdx * 4)})`);
71
#endif
72
// Inserted in reverse so the lowest indices are closest to the stack top
73
for (var n = this.outputViews.length - 1; n >= 0; n--) {
74
this.outputViews[n] = HEAPF32.subarray(viewDataIdx, viewDataIdx += this.samplesPerChannel);
75
}
76
stackRestore(oldStackPtr);
77
}
78
79
static get parameterDescriptors() {
80
return audioParams;
81
}
82
83
/**
84
* Marshals all inputs and parameters to the Wasm memory on the thread's
85
* stack, then performs the wasm audio worklet call, and finally marshals
86
* audio output data back.
87
*
88
* @param {Object} parameters
89
*/
90
process(inputList, outputList, parameters) {
91
#if ALLOW_MEMORY_GROWTH
92
// Recreate the output views if the heap has changed
93
// TODO: add support for GROWABLE_ARRAYBUFFERS
94
if (HEAPF32.buffer != this.outputViews[0].buffer) {
95
this.createOutputViews();
96
}
97
#endif
98
99
var numInputs = inputList.length;
100
var numOutputs = outputList.length;
101
102
var entry; // reused list entry or index
103
var subentry; // reused channel or other array in each list entry or index
104
105
// Calculate the required stack and output buffer views (stack is further
106
// split into aligned structs and the raw float data).
107
var stackMemoryStruct = (numInputs + numOutputs) * {{{ C_STRUCTS.AudioSampleFrame.__size__ }}};
108
var stackMemoryData = 0;
109
for (entry of inputList) {
110
stackMemoryData += entry.length;
111
}
112
stackMemoryData *= this.bytesPerChannel;
113
// Collect the total number of output channels (mapped to array views)
114
var outputViewsNeeded = 0;
115
for (entry of outputList) {
116
outputViewsNeeded += entry.length;
117
}
118
stackMemoryData += outputViewsNeeded * this.bytesPerChannel;
119
var numParams = 0;
120
for (entry in parameters) {
121
++numParams;
122
stackMemoryStruct += {{{ C_STRUCTS.AudioParamFrame.__size__ }}};
123
stackMemoryData += parameters[entry].byteLength;
124
}
125
var oldStackPtr = stackSave();
126
#if ASSERTIONS
127
console.assert(oldStackPtr == this.ctorOldStackPtr, 'AudioWorklet stack address has unexpectedly moved');
128
console.assert(outputViewsNeeded <= this.outputViews.length, `Too many AudioWorklet outputs (need ${outputViewsNeeded} but have stack space for ${this.outputViews.length})`);
129
#endif
130
131
// Allocate the necessary stack space. All pointer variables are in bytes;
132
// 'structPtr' starts at the first struct entry (all run sequentially)
133
// and is the working start to each record; 'dataPtr' is the same for the
134
// audio/params data, starting after *all* the structs.
135
// 'structPtr' begins 16-byte aligned, allocated from the internal
136
// _emscripten_stack_alloc(), as are the output views, and so to ensure
137
// the views fall on the correct addresses (and we finish at stacktop) we
138
// request additional bytes, taking this alignment into account, then
139
// offset `dataPtr` by the difference.
140
var stackMemoryAligned = (stackMemoryStruct + stackMemoryData + 15) & ~15;
141
var structPtr = stackAlloc(stackMemoryAligned);
142
var dataPtr = structPtr + (stackMemoryAligned - stackMemoryData);
143
144
// Copy input audio descriptor structs and data to Wasm (recall, structs
145
// first, audio data after). 'inputsPtr' is the start of the C callback's
146
// input AudioSampleFrame.
147
var /*const*/ inputsPtr = structPtr;
148
for (entry of inputList) {
149
// Write the AudioSampleFrame struct instance
150
{{{ makeSetValue('structPtr', C_STRUCTS.AudioSampleFrame.numberOfChannels, 'entry.length', 'u32') }}};
151
{{{ makeSetValue('structPtr', C_STRUCTS.AudioSampleFrame.samplesPerChannel, 'this.samplesPerChannel', 'u32') }}};
152
{{{ makeSetValue('structPtr', C_STRUCTS.AudioSampleFrame.data, 'dataPtr', '*') }}};
153
structPtr += {{{ C_STRUCTS.AudioSampleFrame.__size__ }}};
154
// Marshal the input audio sample data for each audio channel of this input
155
for (subentry of entry) {
156
HEAPF32.set(subentry, {{{ getHeapOffset('dataPtr', 'float') }}});
157
dataPtr += this.bytesPerChannel;
158
}
159
}
160
161
// Copy parameters descriptor structs and data to Wasm. 'paramsPtr' is the
162
// start of the C callback's input AudioParamFrame.
163
var /*const*/ paramsPtr = structPtr;
164
for (entry = 0; subentry = parameters[entry++];) {
165
// Write the AudioParamFrame struct instance
166
{{{ makeSetValue('structPtr', C_STRUCTS.AudioParamFrame.length, 'subentry.length', 'u32') }}};
167
{{{ makeSetValue('structPtr', C_STRUCTS.AudioParamFrame.data, 'dataPtr', '*') }}};
168
structPtr += {{{ C_STRUCTS.AudioParamFrame.__size__ }}};
169
// Marshal the audio parameters array
170
HEAPF32.set(subentry, {{{ getHeapOffset('dataPtr', 'float') }}});
171
dataPtr += subentry.length * {{{ getNativeTypeSize('float') }}};
172
}
173
174
// Copy output audio descriptor structs to Wasm. 'outputsPtr' is the start
175
// of the C callback's output AudioSampleFrame. 'dataPtr' will now be
176
// aligned with the output views, ending at stacktop (which is why this
177
// needs to be last).
178
var /*const*/ outputsPtr = structPtr;
179
for (entry of outputList) {
180
// Write the AudioSampleFrame struct instance
181
{{{ makeSetValue('structPtr', C_STRUCTS.AudioSampleFrame.numberOfChannels, 'entry.length', 'u32') }}};
182
{{{ makeSetValue('structPtr', C_STRUCTS.AudioSampleFrame.samplesPerChannel, 'this.samplesPerChannel', 'u32') }}};
183
{{{ makeSetValue('structPtr', C_STRUCTS.AudioSampleFrame.data, 'dataPtr', '*') }}};
184
structPtr += {{{ C_STRUCTS.AudioSampleFrame.__size__ }}};
185
// Advance the output pointer to the next output (matching the pre-allocated views)
186
dataPtr += this.bytesPerChannel * entry.length;
187
}
188
189
#if ASSERTIONS
190
// If all the maths worked out, we arrived at the original stack address
191
console.assert(dataPtr == oldStackPtr, `AudioWorklet stack missmatch (audio data finishes at ${dataPtr} instead of ${oldStackPtr})`);
192
193
// Sanity checks. If these trip the most likely cause, beyond unforeseen
194
// stack shenanigans, is that the 'render quantum size' changed after
195
// construction (which shouldn't be possible).
196
if (numOutputs) {
197
// First that the output view addresses match the stack positions
198
dataPtr -= this.bytesPerChannel;
199
for (entry = 0; entry < outputViewsNeeded; entry++) {
200
console.assert(dataPtr == this.outputViews[entry].byteOffset, 'AudioWorklet internal error in addresses of the output array views');
201
dataPtr -= this.bytesPerChannel;
202
}
203
// And that the views' size match the passed in output buffers
204
for (entry of outputList) {
205
for (subentry of entry) {
206
console.assert(subentry.byteLength == this.bytesPerChannel, `AudioWorklet unexpected output buffer size (expected ${this.bytesPerChannel} got ${subentry.byteLength})`);
207
}
208
}
209
}
210
#endif
211
212
// Call out to Wasm callback to perform audio processing
213
var didProduceAudio = this.callback(numInputs, inputsPtr, numOutputs, outputsPtr, numParams, paramsPtr, this.userData);
214
if (didProduceAudio) {
215
// Read back the produced audio data to all outputs and their channels.
216
// The preallocated 'outputViews' already have the correct offsets and
217
// sizes into the stack (recall from createOutputViews() that they run
218
// backwards).
219
for (entry of outputList) {
220
for (subentry of entry) {
221
subentry.set(this.outputViews[--outputViewsNeeded]);
222
}
223
}
224
}
225
226
stackRestore(oldStackPtr);
227
228
// Return 'true' to tell the browser to continue running this processor.
229
// (Returning 1 or any other truthy value won't work in Chrome)
230
return !!didProduceAudio;
231
}
232
}
233
return WasmAudioWorkletProcessor;
234
}
235
236
var messagePort;
237
238
// Specify a worklet processor that will be used to receive messages to this
239
// AudioWorkletGlobalScope. We never connect this initial AudioWorkletProcessor
240
// to the audio graph to do any audio processing.
241
class BootstrapMessages extends AudioWorkletProcessor {
242
constructor(arg) {
243
super();
244
startWasmWorker(arg.processorOptions)
245
#if WEBAUDIO_DEBUG
246
console.log('AudioWorklet global scope looks like this:');
247
console.dir(globalThis);
248
#endif
249
// Listen to messages from the main thread. These messages will ask this
250
// scope to create the real AudioWorkletProcessors that call out to Wasm to
251
// do audio processing.
252
messagePort = this.port;
253
/** @suppress {checkTypes} */
254
messagePort.onmessage = async (msg) => {
255
#if MINIMAL_RUNTIME
256
// Wait for the module instantiation before processing messages.
257
await instantiatePromise;
258
#endif
259
let d = msg.data;
260
if (d['_wpn']) {
261
// '_wpn' is short for 'Worklet Processor Node', using an identifier
262
// that will never conflict with user messages
263
// Register a real AudioWorkletProcessor that will actually do audio processing.
264
registerProcessor(d['_wpn'], createWasmAudioWorkletProcessor(d.audioParams));
265
#if WEBAUDIO_DEBUG
266
console.log(`Registered a new WasmAudioWorkletProcessor "${d['_wpn']}" with AudioParams: ${d.audioParams}`);
267
#endif
268
// Post a Wasm Call message back telling that we have now registered the
269
// AudioWorkletProcessor, and should trigger the user onSuccess callback
270
// of the emscripten_create_wasm_audio_worklet_processor_async() call.
271
//
272
// '_wsc' is short for 'wasm call', using an identifier that will never
273
// conflict with user messages.
274
//
275
// Note: we convert the pointer arg manually here since the call site
276
// ($_EmAudioDispatchProcessorCallback) is used with various signatures
277
// and we do not know the types in advance.
278
messagePort.postMessage({'_wsc': d.callback, args: [d.contextHandle, 1/*EM_TRUE*/, {{{ to64('d.userData') }}}] });
279
} else if (d['_wsc']) {
280
getWasmTableEntry(d['_wsc'])(...d.args);
281
};
282
}
283
}
284
285
// No-op, not doing audio processing in this processor. It is just for
286
// receiving bootstrap messages. However browsers require it to still be
287
// present. It should never be called because we never add a node to the graph
288
// with this processor, although it does look like Chrome does still call this
289
// function.
290
process() {
291
// keep this function a no-op. Chrome redundantly wants to call this even
292
// though this processor is never added to the graph.
293
}
294
};
295
296
// Register the dummy processor that will just receive messages.
297
registerProcessor('em-bootstrap', BootstrapMessages);
298
299
} // ENVIRONMENT_IS_AUDIO_WORKLET
300
301