From fcdc3266afbee5bfb9ff157eb36a48707e3e596a Mon Sep 17 00:00:00 2001 From: Enrico Ros Date: Mon, 10 Jul 2023 21:58:03 -0700 Subject: [PATCH] Improve showing errors --- pages/api/llms/stream.ts | 13 ++++++++----- src/apps/chat/editors/chat-stream.ts | 7 ++++--- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/pages/api/llms/stream.ts b/pages/api/llms/stream.ts index 07273bfa5..2e5594ca5 100644 --- a/pages/api/llms/stream.ts +++ b/pages/api/llms/stream.ts @@ -127,19 +127,22 @@ export default async function handler(req: NextRequest): Promise { // inputs - reuse the tRPC schema const { access, model, history } = chatGenerateSchema.parse(await req.json()); - // prepare the API request data - const { headers, url } = openAIAccess(access, '/v1/chat/completions'); - const body = openAIChatCompletionPayload(model, history, null, 1, true); - // begin event streaming from the OpenAI API let upstreamResponse: Response; try { + + // prepare the API request data + const { headers, url } = openAIAccess(access, '/v1/chat/completions'); + const body = openAIChatCompletionPayload(model, history, null, 1, true); + + // POST to the API upstreamResponse = await fetch(url, { headers, method: 'POST', body: JSON.stringify(body) }); await throwOpenAINotOkay(upstreamResponse); + } catch (error: any) { const fetchOrVendorError = (error?.message || typeof error === 'string' ? error : JSON.stringify(error)) + (error?.cause ? ' ยท ' + error.cause : ''); console.log(`/api/llms/stream: fetch issue: ${fetchOrVendorError}`); - throw new Error('[OpenAI Issue] ' + fetchOrVendorError); + return new NextResponse('[OpenAI Issue] ' + fetchOrVendorError, { status: 500 }); } /* The following code is heavily inspired by the Vercel AI SDK, but simplified to our needs and in full control. diff --git a/src/apps/chat/editors/chat-stream.ts b/src/apps/chat/editors/chat-stream.ts index af87b36d0..7ec9eec73 100644 --- a/src/apps/chat/editors/chat-stream.ts +++ b/src/apps/chat/editors/chat-stream.ts @@ -122,9 +122,10 @@ async function streamAssistantMessage( signal: abortSignal, }); - if (!response.body) { - // noinspection ExceptionCaughtLocallyJS - throw new Error('No response body'); + if (!response.ok || !response.body) { + const errorMessage = response.body ? await response.text() : 'No response from server'; + editMessage(conversationId, assistantMessageId, { text: errorMessage, typing: false }, false); + return; } const responseReader = response.body.getReader();