diff --git a/src/tests/live-suite/processors/recorder/recorder-main.js b/src/tests/live-suite/processors/recorder/recorder-main.js deleted file mode 100644 index c9b0f4b75..000000000 --- a/src/tests/live-suite/processors/recorder/recorder-main.js +++ /dev/null @@ -1,38 +0,0 @@ -import concat from '../../util/concat.js'; - -export default async (ctx, scheduleNode) => { - console.assert(ctx instanceof AudioContext); - console.assert(scheduleNode instanceof AudioScheduledSourceNode); - - const mutex = new Promise((resolve) => - scheduleNode.addEventListener('ended', resolve)); - - await ctx.audioWorklet.addModule('./processors/recorder/recorder-processor.js'); - - const recorder = new AudioWorkletNode(ctx, 'recorder'); - - const arrays = []; - recorder.port.onmessage = (e) => { - !(e.data.channel in arrays) && (arrays[e.data.channel] = []); - arrays[e.data.channel].push(e.data.data); - }; - - // eslint-disable-next-line no-async-promise-executor - const buffer = new Promise(async (resolve) => { - await mutex; - const res = []; - arrays.forEach((array, i) => res[i] = concat(...array)); - - const buf = new AudioBuffer({ - length: res[0].byteLength, - sampleRate: ctx.sampleRate, - numberOfChannels: res.length, - }); - - res.forEach((array, i) => buf.copyToChannel(array, i)); - - resolve(res[0]); - }); - - return {recorder, buffer}; -}; diff --git a/src/tests/live-suite/processors/recorder/recorder-processor.js b/src/tests/live-suite/processors/recorder/recorder-processor.js deleted file mode 100644 index 782f7a983..000000000 --- a/src/tests/live-suite/processors/recorder/recorder-processor.js +++ /dev/null @@ -1,20 +0,0 @@ -// bypass-processor.js -class RecorderProcessor extends AudioWorkletProcessor { - constructor() { - super(); - } - - process(inputs, outputs) { - const input = inputs[0]; - const output = outputs[0]; - - for (let channel = 0; channel < input.length; channel++) { - output[channel].set(input[channel]); - this.port.postMessage({channel, data: input[channel]}); - } - - return true; - } -} - -registerProcessor('recorder', RecorderProcessor); diff --git a/src/tests/live-suite/scripts/converter.js b/src/tests/live-suite/scripts/converter.js deleted file mode 100644 index a0e20fff6..000000000 --- a/src/tests/live-suite/scripts/converter.js +++ /dev/null @@ -1,33 +0,0 @@ -export default async tests => { - const htmls = await Promise.all(tests.map(async t => (await fetch(t)).text())); - - const scripts = []; - - for (let html of htmls) { - const dom = new DOMParser().parseFromString(html, 'text/html'); - const scriptContent = dom.querySelector('script').innerText; - - const blob = new Blob([scriptContent], { type: 'text/javascript' }); - const blobUrl = URL.createObjectURL(blob); - - const script = document.createElement('script'); - script.defer = true; - script.type = 'module'; - script.src = blobUrl; - - document.head.appendChild(script); - scripts.push(new Promise(resolve => script.onload = resolve)); - } - - await Promise.all(scripts); - - const template = document.querySelector('#row'); - Object.entries(window.tests).forEach(([name, t]) => { - const tr = template.content.cloneNode(true); - tr.querySelector('slot[name=name]').textContent = name; - // tr.querySelector('slot[name=result]').textContent = t.result; - // tr.querySelector('slot[name=time]').textContent = t.time; - // tr.querySelector('slot[name=output]').textContent = t.output; - document.querySelector('tbody').appendChild(tr); - }); -}; diff --git a/src/tests/live-suite/scripts/main.js b/src/tests/live-suite/scripts/main.js deleted file mode 100644 index bbdee0270..000000000 --- a/src/tests/live-suite/scripts/main.js +++ /dev/null @@ -1,6 +0,0 @@ -import convert from './converter.js'; - -const files = ['realtime-sine.html', 'offline-sine.html', 'audioworklet-addmodule-resolution.html'] - .map(file => `./../playwright/pages/${ file }`); - -convert(files); diff --git a/src/tests/live-suite/util/audioBufferToWav.js b/src/tests/live-suite/util/audioBufferToWav.js deleted file mode 100644 index ca45c2b87..000000000 --- a/src/tests/live-suite/util/audioBufferToWav.js +++ /dev/null @@ -1,162 +0,0 @@ -// REF: https://github.com/hoch/canopy/blob/master/docs/js/canopy-exporter.js - -/** - * Writes a string to an array starting at a specified offset. - * - * @param {string} aString - The string to write to the array. - * @param {Uint8Array} targetArray - The array to write to. - * @param {number} offset - The offset in the array to start writing at. - */ -const _writeStringToArray = (aString, targetArray, offset) => { - for (let i = 0; i < aString.length; ++i) { - targetArray[offset + i] = aString.charCodeAt(i); - } -}; - -/** - * Writes a 16-bit integer to an array at the specified offset. - * - * @param {number} aNumber - The 16-bit integer to be written. - * @param {Uint8Array} targetArray - The array to write the integer to. - * @param {number} offset - The offset at which to write the integer in the - * array. - */ -const _writeInt16ToArray = (aNumber, targetArray, offset) => { - aNumber = Math.floor(aNumber); - targetArray[offset] = aNumber & 255; // byte 1 - targetArray[offset + 1] = (aNumber >> 8) & 255; // byte 2 -}; - -/** - * Writes a 32-bit integer to a target array at the specified offset. - * - * @param {number} aNumber - The number to be written. - * @param {Uint8Array} targetArray - The array to write the number to. - * @param {number} offset - The offset at which to start writing. - */ -const _writeInt32ToArray = (aNumber, targetArray, offset) => { - aNumber = Math.floor(aNumber); - targetArray[offset] = aNumber & 255; // byte 1 - targetArray[offset + 1] = (aNumber >> 8) & 255; // byte 2 - targetArray[offset + 2] = (aNumber >> 16) & 255; // byte 3 - targetArray[offset + 3] = (aNumber >> 24) & 255; // byte 4 -}; - -// Return the bits of the float as a 32-bit integer value. This -// produces the raw bits; no intepretation of the value is done. -const _floatBits = (f) => { - const buf = new ArrayBuffer(4); - (new Float32Array(buf))[0] = f; - const bits = (new Uint32Array(buf))[0]; - // Return as a signed integer. - return bits | 0; -}; - -/** - * Converts an audio buffer to an array with the specified bit depth. - * - * @param {AudioBuffer} audioBuffer - The audio buffer to convert. - * @param {Uint8Array} targetArray - The array to store the converted samples. - * @param {number} offset - The offset in the targetArray to start writing the - * converted samples. - * @param {number} bitDepth - The desired bit depth of the converted samples - * (16 or 32). - */ -const _writeAudioBufferToArray = - (audioBuffer, targetArray, offset, bitDepth) => { - let index; let channel = 0; - const length = audioBuffer.length; - const channels = audioBuffer.numberOfChannels; - let channelData; let sample; - - // Clamping samples onto the 16-bit resolution. - for (index = 0; index < length; ++index) { - for (channel = 0; channel < channels; ++channel) { - channelData = audioBuffer.getChannelData(channel); - - // Branches upon the requested bit depth - if (bitDepth === 16) { - sample = channelData[index] * 32768.0; - if (sample < -32768) { - sample = -32768; - } else if (sample > 32767) { - sample = 32767; - } - _writeInt16ToArray(sample, targetArray, offset); - offset += 2; - } else if (bitDepth === 32) { - // This assumes we're going to out 32-float, not 32-bit linear. - sample = _floatBits(channelData[index]); - _writeInt32ToArray(sample, targetArray, offset); - offset += 4; - } else { - console.error('Invalid bit depth for PCM encoding.'); - return; - } - } - } - }; - -/** - * Converts an AudioBuffer object into a WAV file in the form of a binary blob. - * The resulting WAV file can be used for audio playback or further processing. - * The function takes two parameters: audioBuffer which represents the audio - * data, and as32BitFloat which indicates whether the WAV file should be encoded - * as 32-bit float or 16-bit integer PCM. The unction performs various - * calculations and writes the necessary headers and data to create the WAV - * file. Finally, it returns the WAV file as a Blob object with the MIME type - * audio/wave. - * - * @param {AudioBuffer} audioBuffer - * @param {Boolean} as32BitFloat - * @return {Blob} Resulting binary blob. - */ -const audioBufferToWav = (audioBuffer, as32BitFloat) => { - // Encoding setup. - const frameLength = audioBuffer.length; - const numberOfChannels = audioBuffer.numberOfChannels; - const sampleRate = audioBuffer.sampleRate; - const bitsPerSample = as32BitFloat ? 32 : 16; - const bytesPerSample = bitsPerSample / 8; - const byteRate = sampleRate * numberOfChannels * bitsPerSample / 8; - const blockAlign = numberOfChannels * bitsPerSample / 8; - const wavDataByteLength = frameLength * numberOfChannels * bytesPerSample; - const headerByteLength = 44; - const totalLength = headerByteLength + wavDataByteLength; - const waveFileData = new Uint8Array(totalLength); - const subChunk1Size = 16; - const subChunk2Size = wavDataByteLength; - const chunkSize = 4 + (8 + subChunk1Size) + (8 + subChunk2Size); - - _writeStringToArray('RIFF', waveFileData, 0); - _writeInt32ToArray(chunkSize, waveFileData, 4); - _writeStringToArray('WAVE', waveFileData, 8); - _writeStringToArray('fmt ', waveFileData, 12); - - // SubChunk1Size (4) - _writeInt32ToArray(subChunk1Size, waveFileData, 16); - // AudioFormat (2): 3 means 32-bit float, 1 means integer PCM. - _writeInt16ToArray(as32BitFloat ? 3 : 1, waveFileData, 20); - // NumChannels (2) - _writeInt16ToArray(numberOfChannels, waveFileData, 22); - // SampleRate (4) - _writeInt32ToArray(sampleRate, waveFileData, 24); - // ByteRate (4) - _writeInt32ToArray(byteRate, waveFileData, 28); - // BlockAlign (2) - _writeInt16ToArray(blockAlign, waveFileData, 32); - // BitsPerSample (4) - _writeInt32ToArray(bitsPerSample, waveFileData, 34); - _writeStringToArray('data', waveFileData, 36); - // SubChunk2Size (4) - _writeInt32ToArray(subChunk2Size, waveFileData, 40); - - // Write actual audio data starting at offset 44. - _writeAudioBufferToArray(audioBuffer, waveFileData, 44, bitsPerSample); - - return new Blob([waveFileData], { - type: 'audio/wav', - }); -}; - -export default audioBufferToWav; diff --git a/src/tests/live-suite/util/concat.js b/src/tests/live-suite/util/concat.js deleted file mode 100644 index 7be0fcc18..000000000 --- a/src/tests/live-suite/util/concat.js +++ /dev/null @@ -1,16 +0,0 @@ -export default (...arrays) => { - // Calculate the total length of the new Float32Array - const totalLength = arrays.reduce((acc, curr) => acc + curr.length, 0); - - // Create a new Float32Array with the total length - const result = new Float32Array(totalLength); - - // Copy elements from each input array into the new array - let offset = 0; - arrays.forEach((array) => { - result.set(array, offset); - offset += array.length; - }); - - return result; -}; diff --git a/src/tests/playwright/pages/audioworklet-addmodule-resolution.html b/src/tests/playwright/pages/audioworklet-addmodule-resolution.html index 4608d6664..b5ef9ae64 100644 --- a/src/tests/playwright/pages/audioworklet-addmodule-resolution.html +++ b/src/tests/playwright/pages/audioworklet-addmodule-resolution.html @@ -8,31 +8,28 @@ const filePath = 'processors/dummy-processor.js'; - window.tests = { - ...window.tests, - addModulesPromise: new Promise(async (resolve, reject) => { - await realtimeContext.audioWorklet.addModule(filePath); - await offlineContext.audioWorklet.addModule(filePath); + window.tests = new Promise(async (resolve, reject) => { + await realtimeContext.audioWorklet.addModule(filePath); + await offlineContext.audioWorklet.addModule(filePath); - try { - // Test if the browser does not crash upon addModule() call after the - // realtime context construction. - let realtimeDummyWorklet = new AudioWorkletNode(realtimeContext, 'dummy'); - realtimeDummyWorklet.connect(realtimeContext.destination); - window.realtimeDummyWorkletLoaded = realtimeDummyWorklet instanceof AudioWorkletNode; + try { + // Test if the browser does not crash upon addModule() call after the + // realtime context construction. + let realtimeDummyWorklet = new AudioWorkletNode(realtimeContext, 'dummy'); + realtimeDummyWorklet.connect(realtimeContext.destination); + window.realtimeDummyWorkletLoaded = realtimeDummyWorklet instanceof AudioWorkletNode; - // Test if the browser does not crash upon addModule() call after the - // offline context construction. - let offlineDummyWorklet = new AudioWorkletNode(offlineContext, 'dummy'); - offlineDummyWorklet.connect(offlineContext.destination); - window.offlineDummyWorkletLoaded = offlineDummyWorklet instanceof AudioWorkletNode; + // Test if the browser does not crash upon addModule() call after the + // offline context construction. + let offlineDummyWorklet = new AudioWorkletNode(offlineContext, 'dummy'); + offlineDummyWorklet.connect(offlineContext.destination); + window.offlineDummyWorkletLoaded = offlineDummyWorklet instanceof AudioWorkletNode; - resolve(); - } catch(error) { - reject(error); - } - }) - }; + resolve(); + } catch(error) { + reject(error); + } + }); AudioWorklet Add Module Resolution diff --git a/src/tests/playwright/pages/dsp-graph-evaluation.html b/src/tests/playwright/pages/dsp-graph-evaluation.html index 9d5acc3f3..febe58f09 100644 --- a/src/tests/playwright/pages/dsp-graph-evaluation.html +++ b/src/tests/playwright/pages/dsp-graph-evaluation.html @@ -5,30 +5,28 @@ import { evaluateGraph } from './util/audio-comparer.js'; // My Render Length (seconds) - window.tests = { - ...window.tests, - graphEvalPromise: new Promise(async (resolve) => { - const length = 1; - // My DSP Graph (function) - const myGraph = ctx => { - const osc = new OscillatorNode(ctx); - osc.type = 'sawtooth'; - const gain = new GainNode(ctx); - gain.gain.value = 2.2; - const biq = new BiquadFilterNode(ctx); - biq.type = 'bandpass'; - biq.frequency.value = 1000; - biq.Q.value = 10; - osc.connect(gain).connect(biq).connect(ctx.destination); + // noinspection JSConstantReassignment,JSValidateTypes + window.test = new Promise(async (resolve) => { + const length = 1; + // My DSP Graph (function) + const myGraph = ctx => { + const osc = new OscillatorNode(ctx); + osc.type = 'sawtooth'; + const gain = new GainNode(ctx); + gain.gain.value = 2.2; + const biq = new BiquadFilterNode(ctx); + biq.type = 'bandpass'; + biq.frequency.value = 1000; + biq.Q.value = 10; + osc.connect(gain).connect(biq).connect(ctx.destination); - osc.start(); - osc.stop(ctx.currentTime + length); - } + osc.start(); + osc.stop(ctx.currentTime + length); + } - const score = await evaluateGraph(myGraph, length); - resolve({ "score": score }); - }) - } + const score = await evaluateGraph(myGraph, length); + resolve({score}); + }); DSP Graph Evaluation diff --git a/src/tests/live-suite/index.html b/src/tests/playwright/pages/index.html similarity index 86% rename from src/tests/live-suite/index.html rename to src/tests/playwright/pages/index.html index b1152ee29..c6102187d 100644 --- a/src/tests/live-suite/index.html +++ b/src/tests/playwright/pages/index.html @@ -3,8 +3,8 @@ - - + + Web Audio Test Suite diff --git a/src/tests/playwright/pages/live-suite/scripts/converter.js b/src/tests/playwright/pages/live-suite/scripts/converter.js new file mode 100644 index 000000000..22a39fc57 --- /dev/null +++ b/src/tests/playwright/pages/live-suite/scripts/converter.js @@ -0,0 +1,40 @@ +export default async tests => { + const htmls = await Promise.all(tests.map(async t => (await fetch(t)).text())); + + const template = document.querySelector('#row'); + htmls.forEach((html) => { + const dom = new DOMParser().parseFromString(html, 'text/html'); + const scriptContent = dom.querySelector('script').innerText; + + const tr = template.content.cloneNode(true); + const id = dom.title.replace(/[^a-z0-9]/gi, '-').toLowerCase(); + tr.childNodes[1].id = id; + tr.querySelector('slot[name=name]').textContent = dom.title; + tr.querySelector('button').addEventListener('click', async () => { + const start = performance.now(); + + const script = document.createElement('script'); + script.defer = true; + script.type = 'module'; + script.textContent = scriptContent; + document.head.appendChild(script); + + // TODO: hacky + await new Promise(resolve => setTimeout(resolve, 500)); + + const t = await window.test; + + const diff = performance.now() - start; + + document.querySelector(`#${id} slot[name=result]`).textContent = 'done'; + document.querySelector(`#${id} slot[name=time]`).textContent = `${(diff).toFixed(2)}ms`; + document.querySelector(`#${id} slot[name=output]`).textContent = 'output'; + + console.log(t); + + document.head.removeChild(script); + }); + + document.querySelector('tbody').appendChild(tr); + }); +}; diff --git a/src/tests/playwright/pages/live-suite/scripts/main.js b/src/tests/playwright/pages/live-suite/scripts/main.js new file mode 100644 index 000000000..0c7f8879f --- /dev/null +++ b/src/tests/playwright/pages/live-suite/scripts/main.js @@ -0,0 +1,10 @@ +import convert from './converter.js'; + +const files = [ + 'realtime-sine.html', + 'offline-sine.html', + 'audioworklet-addmodule-resolution.html', + 'dsp-graph-evaluation.html' +]; + +convert(files); diff --git a/src/tests/live-suite/style.css b/src/tests/playwright/pages/live-suite/style.css similarity index 100% rename from src/tests/live-suite/style.css rename to src/tests/playwright/pages/live-suite/style.css diff --git a/src/tests/playwright/pages/offline-sine.html b/src/tests/playwright/pages/offline-sine.html index 2edcfdec4..dfc4a9ef5 100644 --- a/src/tests/playwright/pages/offline-sine.html +++ b/src/tests/playwright/pages/offline-sine.html @@ -7,9 +7,8 @@ const numChannels = 1; const freq = 441; - !('tests' in window) && (window.tests = {}); - - window.tests.bufferDataPromise = new Promise((resolve, reject) => { + // noinspection JSConstantReassignment,JSValidateTypes + window.test = new Promise((resolve) => { const offlineContext = new OfflineAudioContext(numChannels, length * sampleRate, sampleRate); const osc = offlineContext.createOscillator(); osc.type = 'sine'; @@ -18,7 +17,7 @@ osc.start(); offlineContext.startRendering().then((buffer) => { - resolve(buffer.getChannelData(0)); + resolve({buffer: buffer.getChannelData(0)}); }); }); diff --git a/src/tests/playwright/pages/realtime-sine.html b/src/tests/playwright/pages/realtime-sine.html index a64f571c3..9ca32fb8a 100644 --- a/src/tests/playwright/pages/realtime-sine.html +++ b/src/tests/playwright/pages/realtime-sine.html @@ -5,24 +5,22 @@ import record from './processors/recorder/recorder-main.js'; // eslint-disable-next-line no-async-promise-executor - window.tests = { - ...window.tests, - recordBufferPromise: new Promise(async (resolve) => { - const ctx = new AudioContext({sampleRate: 48000}); - const helloSine = new OscillatorNode(ctx); - const length = 1; // second - const {recorder, buffer} = await record(ctx, length); - helloSine.connect(recorder).connect(ctx.destination); + // noinspection JSConstantReassignment,JSValidateTypes + window.test = new Promise(async (resolve) => { + const ctx = new AudioContext({sampleRate: 48000}); + const helloSine = new OscillatorNode(ctx); + const length = 1; // second + const {recorder, buffer} = await record(ctx, length); + helloSine.connect(recorder).connect(ctx.destination); - helloSine.start(); - helloSine.stop(ctx.currentTime + 1); - const audioBuffer = await buffer; - const float32Buffer = audioBuffer.getChannelData(0); - - await ctx.close(); - resolve({buffer: float32Buffer}); - }) - } + helloSine.start(); + helloSine.stop(ctx.currentTime + 1); + const audioBuffer = await buffer; + const float32Buffer = audioBuffer.getChannelData(0); + + await ctx.close(); + resolve({buffer: float32Buffer}); + }); Realtime Sine diff --git a/src/tests/playwright/runner.spec.ts b/src/tests/playwright/runner.spec.ts index 3c354e8a1..8697d4485 100644 --- a/src/tests/playwright/runner.spec.ts +++ b/src/tests/playwright/runner.spec.ts @@ -7,7 +7,7 @@ test('Hello Sine (realtime)', async ({ page }) => { // wait for the recordBufferPromise to resolve to recorded audio buffer const recordBufferPromise = - await page.evaluate(() => (window as any).tests.recordBufferPromise); + await page.evaluate(() => (window as any).test); const bufferData = new Float32Array((Object as any).values(recordBufferPromise.buffer)); // load in reference samples (python numpy generated) @@ -20,8 +20,8 @@ test('Hello Sine (realtime)', async ({ page }) => { numCorrect += beCloseTo(bufferData[i], myRefData[i], 0.001) ? 1 : 0; } - // expect 99.99% - expect(numCorrect / bufferData.length).toBeGreaterThan(.9999); + // expect 99.99% + expect(numCorrect / bufferData.length).toBeGreaterThan(.9999); }); // @ts-ignore @@ -34,9 +34,9 @@ test('Hello Sine (offline)', async ({page}) => { await page.goto('pages/offline-sine.html'); // Await promise from bufferData containing float32Array - const bufferObject = await page.evaluate(() => (window as any).tests.bufferDataPromise); + const bufferObject = await page.evaluate(() => (window as any).test); const bufferData = - new Float32Array((Object as any).values(bufferObject)); + new Float32Array((Object as any).values(bufferObject.buffer)); // Check bufferData period / frequency expect(bufferData.length).toBe(sampleRate * length * numChannels); @@ -49,7 +49,7 @@ test('Hello Sine (offline)', async ({page}) => { test('AudioWorklet Add Module Resolution', async ({page}) => { await page.goto('pages/audioworklet-addmodule-resolution.html'); - const addModulesPromise = await page.evaluate(() => (window as any).tests.addModulesPromise); + const addModulesPromise = await page.evaluate(() => (window as any).test); // module loading after realtime context creation const realtimeDummyWorkletLoaded = await page.evaluate(() => realtimeDummyWorkletLoaded); @@ -68,6 +68,6 @@ test('AudioWorklet Add Module Resolution', async ({page}) => { test('DSP Graph Evaluation', async ({page}) => { await page.goto('pages/dsp-graph-evaluation.html'); - const graphEvalPromise = await page.evaluate(() => (window as any).tests.graphEvalPromise); + const graphEvalPromise = await page.evaluate(() => (window as any).test); expect(graphEvalPromise.score).toBeGreaterThan(.9999); });