From 471c8f9e3149f9e32b83fc20146f33d4d0332504 Mon Sep 17 00:00:00 2001 From: Greg Trihus Date: Tue, 5 May 2026 16:24:16 -0500 Subject: [PATCH 1/2] TT-7276 Enhance WSAudioPlayer and WavRecorder for improved audio preview handling - Introduced a timeslice constant for live waveform preview in WSAudioPlayer. - Added logic to suppress late preview ticks during recording stops. - Updated WavRecorder to emit only delta chunks for previews, optimizing performance. - Implemented new tests for audio media recorder and delta chunk handling to ensure functionality. --- src/renderer/src/components/WSAudioPlayer.tsx | 51 +++++++--- src/renderer/src/crud/AudioMediaRecorder.ts | 7 +- src/renderer/src/crud/WavRecorder.ts | 93 +++++++++++++++++-- .../crud/__tests__/AudioMediaRecorder.test.ts | 64 +++++++++++++ .../takeRecordingDeltaChunks.test.ts | 27 ++++++ 5 files changed, 220 insertions(+), 22 deletions(-) create mode 100644 src/renderer/src/crud/__tests__/AudioMediaRecorder.test.ts create mode 100644 src/renderer/src/crud/__tests__/takeRecordingDeltaChunks.test.ts diff --git a/src/renderer/src/components/WSAudioPlayer.tsx b/src/renderer/src/components/WSAudioPlayer.tsx index 9b269e44..eb094f37 100644 --- a/src/renderer/src/components/WSAudioPlayer.tsx +++ b/src/renderer/src/components/WSAudioPlayer.tsx @@ -222,6 +222,8 @@ const TIMER_KEY = 'F6,CTRL+6'; const RECORD_KEY = 'F9,CTRL+9'; const LEFT_KEY = 'CTRL+ARROWLEFT'; const RIGHT_KEY = 'CTRL+ARROWRIGHT'; +/** MediaRecorder / WavRecorder timeslice for live waveform preview (not final quality). */ +const RECORD_PREVIEW_TIMESLICE_MS = 2000; function WSAudioPlayer(props: IProps) { const { @@ -386,6 +388,10 @@ function WSAudioPlayer(props: IProps) { const [pxPerSec, setPxPerSecx] = useState(maxZoom); const pxPerSecRef = useRef(maxZoom); const insertingRef = useRef(false); + /** Bumped when user stops recording so in-flight preview inserts are ignored after await. */ + const recordPreviewGenerationRef = useRef(0); + /** True after Stop until final `onRecordStop` finishes — blocks late preview ticks. */ + const recordPreviewSuppressedRef = useRef(false); const currentSegmentRef = useRef(undefined); // Recording timer refs for local progress/duration while recording const recElapsedRef = useRef(0); @@ -677,13 +683,14 @@ function WSAudioPlayer(props: IProps) { ) return false; if (!recordingRef.current) { + recordPreviewSuppressedRef.current = false; setPxPerSec(100); setBlobReady && setBlobReady(false); wsPause(); //stop if playing recordStartPosition.current = wsPosition(); wsStartRecord(); recordingStartPendingRef.current = true; - startRecording(500).then((value) => { + startRecording(RECORD_PREVIEW_TIMESLICE_MS).then((value) => { recordingStartPendingRef.current = false; setRecording(value); }); @@ -693,6 +700,8 @@ function WSAudioPlayer(props: IProps) { ? recordStartPosition.current : undefined; } else { + recordPreviewGenerationRef.current += 1; + recordPreviewSuppressedRef.current = true; setProcessingRecording(true); recordingStartPendingRef.current = false; stopRecording(); @@ -1046,19 +1055,24 @@ function WSAudioPlayer(props: IProps) { async function onRecordStop(blob: Blob) { recordingStartPendingRef.current = false; - await wsInsertAudio( - blob, - undefined, - recordStartPosition.current, - recordOverwritePosition.current - ); - recordOverwritePosition.current = undefined; - setProcessingRecording(false); - void handleChanged(); + try { + await wsInsertAudio( + blob, + undefined, + recordStartPosition.current, + recordOverwritePosition.current + ); + recordOverwritePosition.current = undefined; + void handleChanged(); + } finally { + recordPreviewSuppressedRef.current = false; + setProcessingRecording(false); + } } function onRecordError(e: any) { recordingStartPendingRef.current = false; + recordPreviewSuppressedRef.current = false; setProcessingRecording(false); if (autostartTimer.current && e.error === 'No mediaRecorder') { @@ -1070,14 +1084,29 @@ function WSAudioPlayer(props: IProps) { } async function onRecordDataAvailable(blob: Blob) { - if (blob.size > 0) { + if (blob.size <= 0) return; + if (recordPreviewSuppressedRef.current) return; + const previewGen = recordPreviewGenerationRef.current; + try { const newPos = await wsInsertAudio( blob, undefined, recordStartPosition.current, recordOverwritePosition.current ); + if ( + recordPreviewSuppressedRef.current || + previewGen !== recordPreviewGenerationRef.current + ) { + return; + } if (insertingRef.current) recordOverwritePosition.current = newPos; + } catch (err) { + logError( + Severity.error, + errorReporter, + err instanceof Error ? err : new Error(String(err)) + ); } } diff --git a/src/renderer/src/crud/AudioMediaRecorder.ts b/src/renderer/src/crud/AudioMediaRecorder.ts index f9c25f76..bcf6a364 100644 --- a/src/renderer/src/crud/AudioMediaRecorder.ts +++ b/src/renderer/src/crud/AudioMediaRecorder.ts @@ -48,10 +48,9 @@ export function createAudioMediaRecorder( mediaRecorder.ondataavailable = (event) => { if (event.data && event.data.size > 0) { recordedChunks.push(event.data); - // Combine all accumulated chunks into a single blob (complete recording so far) - const accumulatedBlob = new Blob(recordedChunks); - // Pass complete accumulated blob to onDataAvailable - wavesurfer will decode it - onDataAvailable(accumulatedBlob); + // Preview: emit only this timeslice chunk so WaveSurfer does not re-decode the full + // recording every tick (O(n²) decode/load). Final blob still merges all chunks in stop(). + onDataAvailable(event.data); } }; diff --git a/src/renderer/src/crud/WavRecorder.ts b/src/renderer/src/crud/WavRecorder.ts index c09763ec..934753ad 100644 --- a/src/renderer/src/crud/WavRecorder.ts +++ b/src/renderer/src/crud/WavRecorder.ts @@ -1,6 +1,21 @@ import { convertToWav } from '../utils/wav'; import { APMRecorder } from './useWavRecorder'; +/** New Float32 chunks since last preview emit (exported for unit tests). */ +export function takeRecordingDeltaChunks( + chunks: Float32Array[], + lastEmittedExclusiveIndex: number +): { delta: Float32Array[]; nextIndex: number } { + const len = chunks.length; + if (len <= lastEmittedExclusiveIndex) { + return { delta: [], nextIndex: lastEmittedExclusiveIndex }; + } + return { + delta: chunks.slice(lastEmittedExclusiveIndex), + nextIndex: len, + }; +} + // Web Audio API-based WAV recorder using AudioWorklet export function createWavRecorder( stream: MediaStream, @@ -14,6 +29,9 @@ export function createWavRecorder( let workletLoaded = false; let dataAvailableTimer: ReturnType | null = null; let timeSlice: number = 1000; // Default 1 second + let lastEmittedChunkIndex = 0; + let pendingRecordingCompleteResolve: (() => void) | null = null; + let previewTickInFlight = false; async function initializeWorklet(): Promise { if (workletLoaded) return; @@ -112,6 +130,8 @@ export function createWavRecorder( case 'recordingComplete': // All audio data has been collected audioData = data; + pendingRecordingCompleteResolve?.(); + pendingRecordingCompleteResolve = null; break; } }; @@ -141,6 +161,8 @@ export function createWavRecorder( isRecording = true; audioData = []; + lastEmittedChunkIndex = 0; + pendingRecordingCompleteResolve = null; // Send start message to worklet workletNode?.port.postMessage({ type: 'startRecording' }); @@ -158,11 +180,27 @@ export function createWavRecorder( clearInterval(dataAvailableTimer); } - dataAvailableTimer = setInterval(async () => { - if (isRecording && audioData.length > 0) { - // Convert AudioBuffer to WAV blob before calling onDataAvailable - onDataAvailable(await convertAudioDataToWav()); - } + dataAvailableTimer = setInterval(() => { + void (async () => { + if (!isRecording || previewTickInFlight) return; + previewTickInFlight = true; + try { + const { delta, nextIndex } = takeRecordingDeltaChunks( + audioData, + lastEmittedChunkIndex + ); + if (delta.length === 0) return; + const blob = await deltaChunksToWavBlob(delta); + if (blob.size > 0) { + onDataAvailable(blob); + } + lastEmittedChunkIndex = nextIndex; + } catch (e) { + console.error('WavRecorder preview tick failed:', e); + } finally { + previewTickInFlight = false; + } + })(); }, timeSlice); } @@ -193,6 +231,26 @@ export function createWavRecorder( } } + function deltaChunksToWavBlob(chunks: Float32Array[]): Promise { + const sampleRate = audioContext.sampleRate; + const channels = 1; + if (chunks.length === 0) { + return Promise.resolve(new Blob([], { type: 'audio/wav' })); + } + const length = chunks.reduce((sum, chunk) => sum + chunk.length, 0); + if (length === 0) { + return Promise.resolve(new Blob([], { type: 'audio/wav' })); + } + const audioBuffer = audioContext.createBuffer(channels, length, sampleRate); + const combinedData = audioBuffer.getChannelData(0); + let offset = 0; + for (const chunk of chunks) { + combinedData.set(chunk, offset); + offset += chunk.length; + } + return audioBufferToWavBlob(audioBuffer); + } + function createAudioBuffer(): AudioBuffer { const sampleRate = audioContext.sampleRate; const channels = 1; @@ -233,11 +291,32 @@ export function createWavRecorder( workletNode.disconnect(); } + const waitComplete = new Promise((resolve) => { + pendingRecordingCompleteResolve = resolve; + }); + // Send stop message to worklet workletNode?.port.postMessage({ type: 'stopRecording' }); - // Wait a bit for the worklet to process the stop message - await new Promise((resolve) => setTimeout(resolve, 100)); + const RECORDING_COMPLETE_MS = 15000; + try { + await Promise.race([ + waitComplete, + new Promise((_, reject) => + setTimeout( + () => + reject( + new Error('WavRecorder: recordingComplete timeout from worklet') + ), + RECORDING_COMPLETE_MS + ) + ), + ]); + } catch (e) { + console.error(e); + pendingRecordingCompleteResolve = null; + } + return convertAudioDataToWav(); } diff --git a/src/renderer/src/crud/__tests__/AudioMediaRecorder.test.ts b/src/renderer/src/crud/__tests__/AudioMediaRecorder.test.ts new file mode 100644 index 00000000..44152cb1 --- /dev/null +++ b/src/renderer/src/crud/__tests__/AudioMediaRecorder.test.ts @@ -0,0 +1,64 @@ +import { createAudioMediaRecorder } from '../AudioMediaRecorder'; + +class MockMediaRecorder { + static instances: MockMediaRecorder[] = []; + state: RecordingState = 'inactive'; + ondataavailable: ((ev: BlobEvent) => void) | null = null; + onstop: (() => void) | null = null; + onerror: ((ev: Event) => void) | null = null; + + constructor(public stream: MediaStream) { + MockMediaRecorder.instances.push(this); + } + + start(_timeSlice?: number): void { + this.state = 'recording'; + } + + stop(): void { + if (this.state === 'inactive') return; + this.state = 'inactive'; + queueMicrotask(() => this.onstop?.()); + } + + requestData(): void { + // no-op for tests + } + + /** Test helper: simulate browser firing one chunk */ + simulateChunk(data: Blob): void { + this.ondataavailable?.({ data } as BlobEvent); + } +} + +describe('createAudioMediaRecorder', () => { + beforeEach(() => { + MockMediaRecorder.instances = []; + global.MediaRecorder = MockMediaRecorder as unknown as typeof MediaRecorder; + global.AudioContext = jest.fn().mockImplementation(() => ({ + state: 'running', + resume: jest.fn().mockResolvedValue(undefined), + close: jest.fn().mockResolvedValue(undefined), + })) as unknown as typeof AudioContext; + }); + + it('passes each timeslice blob to onDataAvailable (delta), stop() returns merged blob', async () => { + const onDataAvailable = jest.fn(); + const stream = {} as MediaStream; + const rec = createAudioMediaRecorder(stream, onDataAvailable); + + await rec.start(1000); + const inst = MockMediaRecorder.instances[0]; + const a = new Blob(['a'], { type: 'audio/webm' }); + const b = new Blob(['bb'], { type: 'audio/webm' }); + inst.simulateChunk(a); + inst.simulateChunk(b); + + expect(onDataAvailable).toHaveBeenCalledTimes(2); + expect(onDataAvailable.mock.calls[0][0]).toBe(a); + expect(onDataAvailable.mock.calls[1][0]).toBe(b); + + const finalBlob = await rec.stop(); + expect(finalBlob.size).toBe(a.size + b.size); + }); +}); diff --git a/src/renderer/src/crud/__tests__/takeRecordingDeltaChunks.test.ts b/src/renderer/src/crud/__tests__/takeRecordingDeltaChunks.test.ts new file mode 100644 index 00000000..6b55041d --- /dev/null +++ b/src/renderer/src/crud/__tests__/takeRecordingDeltaChunks.test.ts @@ -0,0 +1,27 @@ +import { takeRecordingDeltaChunks } from '../WavRecorder'; + +describe('takeRecordingDeltaChunks', () => { + it('returns empty delta when no new chunks', () => { + const c1 = new Float32Array([1, 2]); + const chunks = [c1]; + expect(takeRecordingDeltaChunks(chunks, 1)).toEqual({ + delta: [], + nextIndex: 1, + }); + }); + + it('returns only new chunks since last index', () => { + const c1 = new Float32Array([1]); + const c2 = new Float32Array([2, 3]); + const c3 = new Float32Array([4]); + const chunks = [c1, c2, c3]; + expect(takeRecordingDeltaChunks(chunks, 0)).toEqual({ + delta: [c1, c2, c3], + nextIndex: 3, + }); + expect(takeRecordingDeltaChunks(chunks, 2)).toEqual({ + delta: [c3], + nextIndex: 3, + }); + }); +}); From 1749c5c9fd364f7564130604072c03b4e97112c2 Mon Sep 17 00:00:00 2001 From: Greg Trihus Date: Tue, 5 May 2026 20:41:13 -0500 Subject: [PATCH 2/2] Refactor audio handling in WSAudioPlayer and AudioMediaRecorder for improved preview functionality - AudioMediaRecorder: emit only current timeslice chunk - WavRecorder: delta-only preview ticks with overlap guard - WavRecorder: replace setTimeout(100) with worklet recordingComplete promise - WSAudioPlayer: stale preview guard with generation counter - Timeslice constant and usage - Updated RECORD_PREVIEW_TIMESLICE_MS in WSAudioPlayer to enhance waveform preview responsiveness. - Modified AudioMediaRecorder to emit an accumulated blob for each data available event, ensuring decodable previews. - New tests for delta chunk logic and AudioMediaRecorder --- src/renderer/src/components/WSAudioPlayer.tsx | 11 +++++++++-- src/renderer/src/crud/AudioMediaRecorder.ts | 12 +++++++----- src/renderer/src/crud/WavRecorder.ts | 16 ++++++++++++---- .../crud/__tests__/AudioMediaRecorder.test.ts | 10 ++++++---- 4 files changed, 34 insertions(+), 15 deletions(-) diff --git a/src/renderer/src/components/WSAudioPlayer.tsx b/src/renderer/src/components/WSAudioPlayer.tsx index eb094f37..d19bec0b 100644 --- a/src/renderer/src/components/WSAudioPlayer.tsx +++ b/src/renderer/src/components/WSAudioPlayer.tsx @@ -222,7 +222,10 @@ const TIMER_KEY = 'F6,CTRL+6'; const RECORD_KEY = 'F9,CTRL+9'; const LEFT_KEY = 'CTRL+ARROWLEFT'; const RIGHT_KEY = 'CTRL+ARROWRIGHT'; -/** MediaRecorder / WavRecorder timeslice for live waveform preview (not final quality). */ +/** + * MediaRecorder / WavRecorder timeslice for live waveform preview (not final quality). + * 1000ms balances preview responsiveness vs. decode/insert overhead. + */ const RECORD_PREVIEW_TIMESLICE_MS = 2000; function WSAudioPlayer(props: IProps) { @@ -1100,7 +1103,11 @@ function WSAudioPlayer(props: IProps) { ) { return; } - if (insertingRef.current) recordOverwritePosition.current = newPos; + // With delta-only preview chunks, each tick contains only NEW audio. + // Always advance the overwrite position so the next delta is appended + // after this one (instead of replacing it at the same start position). + // Without this, the live waveform only shows the latest delta chunk. + recordOverwritePosition.current = newPos; } catch (err) { logError( Severity.error, diff --git a/src/renderer/src/crud/AudioMediaRecorder.ts b/src/renderer/src/crud/AudioMediaRecorder.ts index bcf6a364..43b788dd 100644 --- a/src/renderer/src/crud/AudioMediaRecorder.ts +++ b/src/renderer/src/crud/AudioMediaRecorder.ts @@ -43,14 +43,16 @@ export function createAudioMediaRecorder( try { mediaRecorder = new MediaRecorder(mediaStream); - // Collect chunks as they become available (for final output) - // Combine all accumulated chunks and pass to onDataAvailable callback - let wavesurfer decode + // Collect chunks as they become available. We must emit the accumulated blob + // (not just event.data) because individual MediaRecorder chunks from container + // formats (webm/mp4) after the first are not independently decodable — only + // the first chunk carries the container header. WaveSurfer needs a complete, + // decodable blob for preview. WavRecorder (raw PCM) uses a true delta path. mediaRecorder.ondataavailable = (event) => { if (event.data && event.data.size > 0) { recordedChunks.push(event.data); - // Preview: emit only this timeslice chunk so WaveSurfer does not re-decode the full - // recording every tick (O(n²) decode/load). Final blob still merges all chunks in stop(). - onDataAvailable(event.data); + const accumulatedBlob = new Blob(recordedChunks); + onDataAvailable(accumulatedBlob); } }; diff --git a/src/renderer/src/crud/WavRecorder.ts b/src/renderer/src/crud/WavRecorder.ts index 934753ad..bf79ceb7 100644 --- a/src/renderer/src/crud/WavRecorder.ts +++ b/src/renderer/src/crud/WavRecorder.ts @@ -299,22 +299,30 @@ export function createWavRecorder( workletNode?.port.postMessage({ type: 'stopRecording' }); const RECORDING_COMPLETE_MS = 15000; + let timeoutHandle: ReturnType | null = null; try { await Promise.race([ waitComplete, - new Promise((_, reject) => - setTimeout( + new Promise((_, reject) => { + timeoutHandle = setTimeout( () => reject( new Error('WavRecorder: recordingComplete timeout from worklet') ), RECORDING_COMPLETE_MS - ) - ), + ); + }), ]); } catch (e) { console.error(e); pendingRecordingCompleteResolve = null; + } finally { + // Clear the timeout on success so it cannot fire later and produce + // an unhandled rejection long after stop() has resolved. + if (timeoutHandle) { + clearTimeout(timeoutHandle); + timeoutHandle = null; + } } return convertAudioDataToWav(); diff --git a/src/renderer/src/crud/__tests__/AudioMediaRecorder.test.ts b/src/renderer/src/crud/__tests__/AudioMediaRecorder.test.ts index 44152cb1..6cca964d 100644 --- a/src/renderer/src/crud/__tests__/AudioMediaRecorder.test.ts +++ b/src/renderer/src/crud/__tests__/AudioMediaRecorder.test.ts @@ -11,7 +11,7 @@ class MockMediaRecorder { MockMediaRecorder.instances.push(this); } - start(_timeSlice?: number): void { + start(): void { this.state = 'recording'; } @@ -42,7 +42,7 @@ describe('createAudioMediaRecorder', () => { })) as unknown as typeof AudioContext; }); - it('passes each timeslice blob to onDataAvailable (delta), stop() returns merged blob', async () => { + it('passes accumulated blob to onDataAvailable each tick, stop() returns merged blob', async () => { const onDataAvailable = jest.fn(); const stream = {} as MediaStream; const rec = createAudioMediaRecorder(stream, onDataAvailable); @@ -54,9 +54,11 @@ describe('createAudioMediaRecorder', () => { inst.simulateChunk(a); inst.simulateChunk(b); + // Preview must emit an accumulated (decodable) blob each tick because + // individual container-format chunks are not independently decodable. expect(onDataAvailable).toHaveBeenCalledTimes(2); - expect(onDataAvailable.mock.calls[0][0]).toBe(a); - expect(onDataAvailable.mock.calls[1][0]).toBe(b); + expect(onDataAvailable.mock.calls[0][0].size).toBe(a.size); + expect(onDataAvailable.mock.calls[1][0].size).toBe(a.size + b.size); const finalBlob = await rec.stop(); expect(finalBlob.size).toBe(a.size + b.size);