mirror of
https://github.com/enricoros/big-AGI.git
synced 2026-05-10 21:50:14 -07:00
Audio: Port Player
This commit is contained in:
@@ -18,12 +18,12 @@ import { SystemPurposeId, SystemPurposes } from '../../data';
|
||||
import { llmStreamingChatGenerate, VChatMessageIn } from '~/modules/llms/llm.client';
|
||||
import { useElevenLabsVoiceDropdown } from '~/modules/elevenlabs/useElevenLabsVoiceDropdown';
|
||||
|
||||
import { AudioPlayer } from '~/common/util/audio/AudioPlayer';
|
||||
import { Link } from '~/common/components/Link';
|
||||
import { SpeechResult, useSpeechRecognition } from '~/common/components/useSpeechRecognition';
|
||||
import { conversationTitle } from '~/common/stores/chat/chat.conversation';
|
||||
import { createDMessageTextContent, DMessage, messageFragmentsReduceText, messageSingleTextOrThrow } from '~/common/stores/chat/chat.message';
|
||||
import { launchAppChat, navigateToIndex } from '~/common/app.routes';
|
||||
import { playSoundUrl, usePlaySoundUrl } from '~/common/util/audioUtils';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { usePluggableOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
|
||||
@@ -138,11 +138,11 @@ export function Telephone(props: {
|
||||
|
||||
// pickup / hangup
|
||||
React.useEffect(() => {
|
||||
!isRinging && playSoundUrl(isConnected ? '/sounds/chat-begin.mp3' : '/sounds/chat-end.mp3');
|
||||
!isRinging && AudioPlayer.playUrl(isConnected ? '/sounds/chat-begin.mp3' : '/sounds/chat-end.mp3');
|
||||
}, [isRinging, isConnected]);
|
||||
|
||||
// ringtone
|
||||
usePlaySoundUrl(isRinging ? '/sounds/chat-ringtone.mp3' : null, 300, 2800 * 2);
|
||||
AudioPlayer.usePlayUrl(isRinging ? '/sounds/chat-ringtone.mp3' : null, 300, 2800 * 2);
|
||||
|
||||
|
||||
/// CONNECTED
|
||||
|
||||
@@ -21,6 +21,7 @@ import type { DLLM } from '~/modules/llms/store-llms';
|
||||
import type { LLMOptionsOpenAI } from '~/modules/llms/vendors/openai/openai.vendor';
|
||||
import { useBrowseCapability } from '~/modules/browse/store-module-browsing';
|
||||
|
||||
import { AudioPlayer } from '~/common/util/audio/AudioPlayer';
|
||||
import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon';
|
||||
import { ConversationsManager } from '~/common/chats/ConversationsManager';
|
||||
import { DMessageMetadata, messageFragmentsReduceText } from '~/common/stores/chat/chat.message';
|
||||
@@ -36,7 +37,6 @@ import { isMacUser } from '~/common/util/pwaUtils';
|
||||
import { launchAppCall } from '~/common/app.routes';
|
||||
import { lineHeightTextareaMd } from '~/common/app.theme';
|
||||
import { platformAwareKeystrokes } from '~/common/components/KeyStroke';
|
||||
import { playSoundUrl } from '~/common/util/audioUtils';
|
||||
import { supportsScreenCapture } from '~/common/util/screenCaptureUtils';
|
||||
import { useAppStateStore } from '~/common/state/store-appstate';
|
||||
import { useChatOverlayStore } from '~/common/chats/store-chat-overlay';
|
||||
@@ -405,11 +405,11 @@ export function Composer(props: {
|
||||
const notUserStop = result.doneReason !== 'manual';
|
||||
if (autoSend) {
|
||||
if (notUserStop)
|
||||
playSoundUrl('/sounds/mic-off-mid.mp3');
|
||||
void AudioPlayer.playUrl('/sounds/mic-off-mid.mp3');
|
||||
void handleSendAction(chatExecuteMode, nextText); // fire/forget
|
||||
} else {
|
||||
if (!micContinuation && notUserStop)
|
||||
playSoundUrl('/sounds/mic-off-mid.mp3');
|
||||
void AudioPlayer.playUrl('/sounds/mic-off-mid.mp3');
|
||||
if (nextText) {
|
||||
composerTextAreaRef.current?.focus();
|
||||
setComposeText(nextText);
|
||||
|
||||
@@ -0,0 +1,82 @@
|
||||
import * as React from 'react';
|
||||
|
||||
export namespace AudioPlayer {
|
||||
|
||||
/**
|
||||
* Plays an audio file from a URL (e.g. an MP3 file).
|
||||
*/
|
||||
export async function playUrl(url: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const audio = new Audio(url);
|
||||
audio.onended = () => resolve();
|
||||
audio.onerror = (e) => reject(new Error(`Error playing audio: ${e}`));
|
||||
audio.play().catch(reject);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Plays an audio buffer (e.g. from an ArrayBuffer).
|
||||
*/
|
||||
export async function playBuffer(audioBuffer: ArrayBuffer): Promise<void> {
|
||||
const audioContext = new AudioContext();
|
||||
const bufferSource = audioContext.createBufferSource();
|
||||
bufferSource.buffer = await audioContext.decodeAudioData(audioBuffer);
|
||||
bufferSource.connect(audioContext.destination);
|
||||
bufferSource.start();
|
||||
return new Promise((resolve) => {
|
||||
bufferSource.onended = () => resolve();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Plays a sound from a URL, and optionally repeats it after a delay.
|
||||
* @param url The URL of the sound to play.
|
||||
* @param firstDelay The delay before the first play, in milliseconds.
|
||||
* @param repeatMs The delay between each repeat, in milliseconds. If 0, the sound will only play once.
|
||||
*/
|
||||
export function usePlayUrl(url: string | null, firstDelay: number = 0, repeatMs: number = 0) {
|
||||
React.useEffect(() => {
|
||||
if (!url) return;
|
||||
|
||||
let timer2: any = null;
|
||||
|
||||
const playFirstTime = () => {
|
||||
const playAudio = () => playUrl(url);
|
||||
void playAudio();
|
||||
timer2 = repeatMs > 0 ? setInterval(playAudio, repeatMs) : null;
|
||||
};
|
||||
|
||||
const timer1 = setTimeout(playFirstTime, firstDelay);
|
||||
|
||||
return () => {
|
||||
clearTimeout(timer1);
|
||||
if (timer2)
|
||||
clearInterval(timer2);
|
||||
};
|
||||
}, [firstDelay, repeatMs, url]);
|
||||
}
|
||||
|
||||
/*export function useAudioPlayer() {
|
||||
const [isPlaying, setIsPlaying] = React.useState(false);
|
||||
const [currentUrl, setCurrentUrl] = React.useState<string | null>(null);
|
||||
|
||||
const play = React.useCallback(async (url: string) => {
|
||||
setCurrentUrl(url);
|
||||
setIsPlaying(true);
|
||||
try {
|
||||
await playUrl(url);
|
||||
} catch (error) {
|
||||
console.error('Error playing audio:', error);
|
||||
} finally {
|
||||
setIsPlaying(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const stop = React.useCallback(() => {
|
||||
setIsPlaying(false);
|
||||
setCurrentUrl(null);
|
||||
}, []);
|
||||
|
||||
return { play, stop, isPlaying, currentUrl };
|
||||
}*/
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
export function playSoundUrl(url: string) {
|
||||
const audio = new Audio(url);
|
||||
audio.play().catch(error => console.error('Error playing audio:', url, error));
|
||||
}
|
||||
|
||||
export async function playSoundBuffer(audioBuffer: ArrayBuffer) {
|
||||
const audioContext = new AudioContext();
|
||||
const bufferSource = audioContext.createBufferSource();
|
||||
bufferSource.buffer = await audioContext.decodeAudioData(audioBuffer);
|
||||
bufferSource.connect(audioContext.destination);
|
||||
bufferSource.start();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Plays a sound from a URL, and optionally repeats it after a delay.
|
||||
* @param url The URL of the sound to play.
|
||||
* @param firstDelay The delay before the first play, in milliseconds.
|
||||
* @param repeatMs The delay between each repeat, in milliseconds. If 0, the sound will only play once.
|
||||
*/
|
||||
export function usePlaySoundUrl(url: string | null, firstDelay: number = 0, repeatMs: number = 0) {
|
||||
React.useEffect(() => {
|
||||
if (!url) return;
|
||||
|
||||
let timer2: any = null;
|
||||
|
||||
const playFirstTime = () => {
|
||||
const playAudio = () => playSoundUrl(url);
|
||||
playAudio();
|
||||
timer2 = repeatMs > 0 ? setInterval(playAudio, repeatMs) : null;
|
||||
};
|
||||
|
||||
const timer1 = setTimeout(playFirstTime, firstDelay);
|
||||
|
||||
return () => {
|
||||
clearTimeout(timer1);
|
||||
if (timer2)
|
||||
clearInterval(timer2);
|
||||
};
|
||||
}, [firstDelay, repeatMs, url]);
|
||||
}
|
||||
|
||||
|
||||
/* Note: the following function was an earlier implementation of AudioLivePlayer, but it didn't work well.
|
||||
|
||||
export async function playLiveAudioStream(stream: ReadableStream<Uint8Array>, mimeType: string = 'audio/mpeg') {
|
||||
const mediaSource = new MediaSource();
|
||||
const audio = new Audio(URL.createObjectURL(mediaSource));
|
||||
audio.autoplay = true;
|
||||
|
||||
mediaSource.addEventListener('sourceopen', async () => {
|
||||
const sourceBuffer = mediaSource.addSourceBuffer(mimeType);
|
||||
const reader = stream.getReader();
|
||||
|
||||
const processStream = async () => {
|
||||
const { done, value } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
mediaSource.endOfStream();
|
||||
return;
|
||||
}
|
||||
|
||||
if (sourceBuffer.updating) {
|
||||
await new Promise(resolve => sourceBuffer.addEventListener('updateend', resolve, { once: true }));
|
||||
}
|
||||
|
||||
sourceBuffer.appendBuffer(value);
|
||||
processStream();
|
||||
};
|
||||
|
||||
processStream();
|
||||
});
|
||||
}*/
|
||||
@@ -1,9 +1,9 @@
|
||||
import { getBackendCapabilities } from '~/modules/backend/store-backend-capabilities';
|
||||
|
||||
import { AudioLivePlayer } from '~/common/util/AudioLivePlayer';
|
||||
import { AudioLivePlayer } from '~/common/util/audio/AudioLivePlayer';
|
||||
import { AudioPlayer } from '~/common/util/audio/AudioPlayer';
|
||||
import { CapabilityElevenLabsSpeechSynthesis } from '~/common/components/useCapabilities';
|
||||
import { frontendSideFetch } from '~/common/util/clientFetchers';
|
||||
import { playSoundBuffer } from '~/common/util/audioUtils';
|
||||
import { useUIPreferencesStore } from '~/common/state/store-ui';
|
||||
|
||||
import type { SpeechInputSchema } from './elevenlabs.router';
|
||||
@@ -38,7 +38,7 @@ export async function speakText(text: string, voiceId?: string) {
|
||||
try {
|
||||
const edgeResponse = await frontendFetchAPIElevenLabsSpeech(text, elevenLabsApiKey, voiceId || elevenLabsVoiceId, nonEnglish, false);
|
||||
const audioBuffer = await edgeResponse.arrayBuffer();
|
||||
await playSoundBuffer(audioBuffer);
|
||||
await AudioPlayer.playBuffer(audioBuffer);
|
||||
} catch (error) {
|
||||
console.error('Error playing first text:', error);
|
||||
}
|
||||
|
||||
@@ -4,10 +4,9 @@ import { CircularProgress, Option, Select } from '@mui/joy';
|
||||
import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown';
|
||||
import RecordVoiceOverTwoToneIcon from '@mui/icons-material/RecordVoiceOverTwoTone';
|
||||
|
||||
import { AudioPlayer } from '~/common/util/audio/AudioPlayer';
|
||||
import { apiQuery } from '~/common/util/trpc.client';
|
||||
|
||||
import { playSoundUrl } from '~/common/util/audioUtils';
|
||||
|
||||
import { VoiceSchema } from './elevenlabs.router';
|
||||
import { isElevenLabsEnabled } from './elevenlabs.client';
|
||||
import { useElevenLabsApiKey, useElevenLabsVoiceId } from './store-module-elevenlabs';
|
||||
@@ -82,7 +81,7 @@ export function useElevenLabsVoiceDropdown(autoSpeak: boolean, disabled?: boolea
|
||||
const previewUrl = (autoSpeak && voice?.previewUrl) || null;
|
||||
React.useEffect(() => {
|
||||
if (previewUrl)
|
||||
playSoundUrl(previewUrl);
|
||||
void AudioPlayer.playUrl(previewUrl);
|
||||
}, [previewUrl]);
|
||||
|
||||
const voicesDropdown = React.useMemo(() =>
|
||||
|
||||
Reference in New Issue
Block a user