AudioPlayer: make them cancelable & renames

This commit is contained in:
Enrico Ros
2026-03-09 23:23:38 -07:00
parent 39beda5519
commit 8373c1c785
5 changed files with 148 additions and 17 deletions
+4 -3
View File
@@ -13,10 +13,11 @@ export function usePlayUrlInterval(url: string | null, firstDelay: number = 0, r
React.useEffect(() => {
if (!url) return;
const abortController = new AbortController();
let timer2: any = null;
const playFirstTime = () => {
const playAudio = () => AudioPlayer.playUrl(url);
const playAudio = () => AudioPlayer.playUrl(url, abortController.signal);
void playAudio();
timer2 = repeatMs > 0 ? setInterval(playAudio, repeatMs) : null;
};
@@ -25,8 +26,8 @@ export function usePlayUrlInterval(url: string | null, firstDelay: number = 0, r
return () => {
clearTimeout(timer1);
if (timer2)
clearInterval(timer2);
timer2 && clearInterval(timer2);
abortController?.abort();
};
}, [firstDelay, repeatMs, url]);
}
+2 -2
View File
@@ -82,7 +82,7 @@ export class AudioAutoPlayer {
// combine all chunks and play
const combined = combine_ArrayBuffers_To_Uint8Array(this.chunksAccumulator).buffer;
this.chunksAccumulator = []; // Clear after combining
AudioPlayer.playAudioFull(combined).finally(() => {
AudioPlayer.playFullBuffer(combined).finally(() => {
if (!this.isStopped)
this.playbackEndResolve?.();
});
@@ -107,7 +107,7 @@ export class AudioAutoPlayer {
console.warn('[DEV] AudioAutoPlayer: playFullBuffer called twice');
this.isPlayingFullBuffer = true;
AudioPlayer.playAudioFull(buffer).finally(() => {
AudioPlayer.playFullBuffer(buffer).finally(() => {
if (!this.isStopped)
this.playbackEndResolve?.();
});
+59 -11
View File
@@ -1,24 +1,56 @@
export namespace AudioPlayer {
/**
* Plays an audio file from a URL (e.g. an MP3 file).
* Plays an audio file from a URL. Resolves when playback ends.
* If a signal is provided and aborted, playback stops and the promise resolves.
*/
export async function playUrl(url: string): Promise<void> {
return new Promise((resolve, reject) => {
export function playUrl(url: string, signal?: AbortSignal): Promise<void> {
if (signal?.aborted || !url) return Promise.resolve();
return new Promise<void>((resolve, reject) => {
const audio = new Audio(url);
audio.onended = () => resolve();
audio.onerror = (e) => reject(new Error(`Error playing audio: ${e}`));
audio.play().catch(reject);
const cleanup = () => {
signal?.removeEventListener('abort', onSignalAbort);
audio.onended = null;
audio.onerror = null;
};
const onSignalAbort = () => {
cleanup();
audio.pause();
audio.src = '';
resolve();
};
signal?.addEventListener('abort', onSignalAbort, { once: true });
audio.onended = () => {
cleanup();
resolve();
};
audio.onerror = (e) => {
cleanup();
reject(new Error(`Error playing audio: ${e}`));
};
audio.play().catch((err) => {
cleanup();
reject(err);
});
});
}
/**
* Plays an audio buffer (e.g. from an ArrayBuffer).
* Resolves when playback completes, or immediately if buffer is empty/invalid.
* Plays an audio buffer. Resolves when playback ends or buffer is empty/invalid.
* If a signal is provided and aborted, playback stops and the promise resolves.
*
* Mainly called by AudioAutoPlayer.
*/
export async function playAudioFull(audioBuffer: ArrayBuffer): Promise<void> {
export async function playFullBuffer(audioBuffer: ArrayBuffer, signal?: AbortSignal): Promise<void> {
// sanity check
if (!audioBuffer || audioBuffer.byteLength === 0) return;
if (!audioBuffer || audioBuffer.byteLength === 0 || signal?.aborted) return;
let audioContext: AudioContext | undefined;
try {
@@ -27,12 +59,28 @@ export namespace AudioPlayer {
const audioDataCopy = audioBuffer.slice(0); // slice to avoid detached buffer issues
const decodedBuffer = await audioContext.decodeAudioData(audioDataCopy);
// check again after async decode
if (signal?.aborted) {
audioContext.close().catch(() => {
});
return;
}
const bufferSource = audioContext.createBufferSource();
bufferSource.buffer = decodedBuffer;
bufferSource.connect(audioContext.destination);
return new Promise((resolve) => {
return new Promise<void>((resolve) => {
const onSignalAbort = () => {
bufferSource.onended = null;
bufferSource.stop();
audioContext?.close().catch(() => {
});
resolve();
};
signal?.addEventListener('abort', onSignalAbort, { once: true });
bufferSource.onended = () => {
signal?.removeEventListener('abort', onSignalAbort);
audioContext?.close().catch(() => {
});
resolve();
+82
View File
@@ -0,0 +1,82 @@
# Audio Call Flow
As of 2026-03-09 on `main` branch.
```
speakText(text, voice, signal) ← TTS, NB grant in caller
└─ _speakRawText_withHandle(rawText, engine)
├─ [RPC: elevenlabs/openai/inworld/localai]
│ │
│ ├─ speexSynthesize_RPC() ← tRPC streaming synthesis
│ │
│ └─ AudioAutoPlayer ← picks strategy per browser
│ │
│ ├─ [streaming: Chrome/Safari/Edge]
│ │ │
│ │ └─ AudioLivePlayer
│ │ ├─ new Audio()
│ │ ├─ new MediaSource()
│ │ ├─ addSourceBuffer('audio/mpeg')
│ │ ├─ enqueueChunk() → appendBuffer() ← plays live as chunks arrive
│ │ ├─ endPlayback() → endOfStream()
│ │ ├─ waitForPlaybackEnd() → 'ended' event
│ │ └─ stop() → pause + abort + close
│ │
│ └─ [accumulated: Firefox fallback]
│ │
│ ├─ enqueueChunk() → buffer.slice() into chunksAccumulator[]
│ ├─ endPlayback() → combine chunks →
│ │ └─ AudioPlayer.playFullBuffer(combined)
│ │
│ ├─ playFullBuffer(buf) → ← when server returns whole buffer
│ │ └─ AudioPlayer.playFullBuffer(buf)
│ │
│ ├─ waitForPlaybackEnd() → deferred promise
│ └─ stop() → resolve deferred, clear chunks
└─ [WebSpeech: browser-native]
└─ speexSynthesize_WebSpeech()
├─ new SpeechSynthesisUtterance()
└─ speechSynthesis.speak() ← no AudioPlayer at all
AudioPlayer.playUrl(url, signal?) ← one-shot URL playback
│ ├─ new Audio(url)
│ ├─ audio.play()
│ ├─ onended → resolve
│ └─ signal?.abort → pause + clear src + resolve
├─ Telephone.tsx:130 pickup/hangup MP3s (no signal)
├─ usePlayUrlInterval.ts:20 ringtone loop (with AbortController signal)
├─ aix.client.ts:723 AI inline audio (no signal) ← SHOULD be NB-managed
├─ Composer.tsx:372 mic-off sound (no signal)
└─ SpeexVoiceSelect.tsx:71 voice preview (with useQuery signal)
AudioPlayer.playFullBuffer(buffer, signal?) ← one-shot buffer playback
│ ├─ new AudioContext()
│ ├─ decodeAudioData()
│ ├─ createBufferSource() → start()
│ ├─ onended → close context + resolve
│ └─ signal?.abort → stop source + close context + resolve
└─ AudioAutoPlayer (only caller, see above)
AudioGenerator.*() ← procedural Web Audio API
│ ├─ singleContext() → shared AudioContext + masterGain
│ ├─ OscillatorNode / noise buffer → GainNode → destination
│ └─ fire-and-forget, sub-500ms
├─ NotificationProcessor.ts chatNotifyResponse / chatNotifyError
├─ Composer.tsx:361 chatAutoSend
├─ ChatMessage.tsx chatAutoSend
├─ beam.scatter.ts chatNotifyResponse (x2)
├─ beam.gather.execution.tsx chatNotifyResponse
├─ ChatViewOptionsModal.tsx chatNotifyResponse (x2)
├─ BeamViewOptionsModal.tsx chatNotifyResponse
└─ NotificationProcessor.ts basicAstralChimes (debug)
```
@@ -68,7 +68,7 @@ export function SpeexVoiceSelect(props: {
enabled: !!previewUrl,
queryKey: ['speex-voice-preview', previewUrl],
queryFn: async ({ signal }) =>
AudioPlayer.playUrl(previewUrl!),
AudioPlayer.playUrl(previewUrl!, signal),
});