Cleaner errors

This commit is contained in:
Enrico Ros
2024-06-17 19:49:20 -07:00
parent da3990b614
commit edfaf6f002
5 changed files with 59 additions and 61 deletions
@@ -8,6 +8,7 @@ import { DMessage, messageFragmentsReduceText } from '~/common/stores/chat/chat.
import { TokenBadgeMemo } from '../composer/TokenBadge';
import { makeMessageAvatar, messageBackground } from './ChatMessage';
import { isErrorChatMessage } from './explainServiceErrors';
/**
@@ -58,7 +59,7 @@ export function CleanerMessage(props: { message: DMessage, selected: boolean, re
const fromAssistant = messageRole === 'assistant';
const isAssistantError = fromAssistant && (messageText.startsWith('[Issue] ') || messageText.startsWith('[OpenAI Issue]'));
const isAssistantError = fromAssistant && isErrorChatMessage(messageText);
const backgroundColor = messageBackground(messageRole, !!messageUpdated, isAssistantError);
@@ -51,7 +51,7 @@ export function ContentPartText(props: {
onFragmentReplace?.(fragmentId, createTextContentFragment(newText));
}, [fragmentId, messageText, onFragmentReplace]);
const { errorMessage } = React.useMemo(
const errorMessage = React.useMemo(
() => explainServiceErrors(messageText, fromAssistant, props.messageOriginLLM),
[fromAssistant, messageText, props.messageOriginLLM],
);
@@ -59,8 +59,8 @@ export function ContentPartText(props: {
// if errored, render an Auto-Error message
if (errorMessage) {
return (
<GoodTooltip placement='top' title={messageText}>
<div><InlineError error={errorMessage} /></div>
<GoodTooltip placement='top' arrow title={messageText}>
<div><InlineError error={`${errorMessage}. Hover this message for more details.`} /></div>
</GoodTooltip>
);
}
@@ -3,62 +3,59 @@ import * as React from 'react';
import { Link } from '~/common/components/Link';
export function isErrorChatMessage(text: string) {
return ['**[Service Issue] ', '[Issue] ', '[OpenAI Issue] '].some(prefix => text.startsWith(prefix));
}
export function explainServiceErrors(text: string, isAssistant: boolean, modelId?: string) {
const isAssistantError = isAssistant && (text.startsWith('[Issue] ') || text.startsWith('[OpenAI Issue]'));
let errorMessage: React.JSX.Element | null = null;
const isAssistantError = isAssistant && isErrorChatMessage(text);
if (!isAssistantError)
return { errorMessage, isAssistantError };
return null;
// [OpenAI] "Service Temporarily Unavailable (503)", {"code":503,"message":"Service Unavailable.","param":null,"type":"cf_service_unavailable"}
if (text.includes('"cf_service_unavailable"')) {
errorMessage = <>
The OpenAI servers appear to be having trouble at the moment. Kindly follow
the <Link noLinkStyle href='https://status.openai.com/' target='_blank'>OpenAI Status</Link> page
for up to date information, and at your option try again.
</>;
}
// ...
else if (text.startsWith('OpenAI API error: 429 Too Many Requests')) {
// TODO: retry at the api/chat level a few times instead of showing this error
errorMessage = <>
The model appears to be occupied at the moment. Kindly select <b>GPT-3.5 Turbo</b>,
or give it another go by selecting <b>Run again</b> from the message menu.
</>;
} else if (text.includes('"model_not_found"')) {
// note that "model_not_found" is different than "The model `gpt-xyz` does not exist" message
errorMessage = <>
The API key appears to be unauthorized for {modelId || 'this model'}. You can change to <b>GPT-3.5
Turbo</b> and simultaneously <Link noLinkStyle href='https://openai.com/waitlist/gpt-4-api' target='_blank'>request
access</Link> to the desired model.
</>;
} else if (text.includes('"context_length_exceeded"')) {
// TODO: propose to summarize or split the input?
const pattern = /maximum context length is (\d+) tokens.+resulted in (\d+) tokens/;
const match = pattern.exec(text);
const usedText = match ? <b>{parseInt(match[2] || '0').toLocaleString()} tokens &gt; {parseInt(match[1] || '0').toLocaleString()}</b> : '';
errorMessage = <>
This thread <b>surpasses the maximum size</b> allowed for {modelId || 'this model'}. {usedText}.
Please consider removing some earlier messages from the conversation, start a new conversation,
choose a model with larger context, or submit a shorter new message.
{!usedText && ` -- ${text}`}
</>;
}
// [OpenAI] {"error":{"message":"Incorrect API key provided: ...","type":"invalid_request_error","param":null,"code":"invalid_api_key"}}
else if (text.includes('"invalid_api_key"')) {
errorMessage = <>
The API key appears to be incorrect or to have expired.
Please <Link noLinkStyle href='https://platform.openai.com/account/api-keys' target='_blank'>check your
API key</Link> and update it in <b>Models</b>.
</>;
} else if (text.includes('"insufficient_quota"')) {
errorMessage = <>
The API key appears to have <b>insufficient quota</b>. Please
check <Link noLinkStyle href='https://platform.openai.com/account/usage' target='_blank'>your usage</Link> and
make sure the usage is under <Link noLinkStyle href='https://platform.openai.com/account/billing/limits' target='_blank'>the limits</Link>.
</>;
}
// else
// errorMessage = <>{text || 'Unknown error'}</>;
switch (true) {
case text.includes('"insufficient_quota"'):
return <>
{/*The model appears to be occupied at the moment. Kindly try another model, try again after some time,*/}
{/*or give it another go by selecting <b>Run again</b> from the message menu.*/}
The OpenAI API key appears to have <b>insufficient quota</b>. Please
check <Link noLinkStyle href='https://platform.openai.com/usage' target='_blank'>your usage</Link> and
make sure the usage is under <Link noLinkStyle href='https://platform.openai.com/account/billing/limits' target='_blank'>the limits</Link>.
</>;
return { isAssistantError, errorMessage };
case text.includes('"invalid_api_key"'):
return <>
The OpenAI API key appears to be incorrect or to have expired.
Please <Link noLinkStyle href='https://platform.openai.com/api-keys' target='_blank'>check your
API key</Link> and update it in <b>Models</b>.
</>;
// [OpenAI] "Service Temporarily Unavailable (503)", {"code":503,"message":"Service Unavailable.","param":null,"type":"cf_service_unavailable"}
case text.includes('"cf_service_unavailable"'):
return <>
The OpenAI servers appear to be having trouble at the moment. Kindly follow
the <Link noLinkStyle href='https://status.openai.com/' target='_blank'>OpenAI Status</Link> page
for up to date information, and at your option try again.
</>;
case text.includes('"model_not_found"'):
return <>
The API key appears to be unauthorized for {modelId || 'this model'}. You can change to <b>GPT-3.5
Turbo</b> and simultaneously <Link noLinkStyle href='https://openai.com/waitlist/gpt-4-api' target='_blank'>request
access</Link> to the desired model.
</>;
case text.includes('"context_length_exceeded"'):
const pattern = /maximum context length is (\d+) tokens.+resulted in (\d+) tokens/;
const match = pattern.exec(text);
const usedText = match ? <b>{parseInt(match[2] || '0').toLocaleString()} tokens &gt; {parseInt(match[1] || '0').toLocaleString()}</b> : '';
return <>
This thread <b>surpasses the maximum size</b> allowed for {modelId || 'this model'}. {usedText}.
Please consider removing some earlier messages from the conversation, start a new conversation,
choose a model with larger context, or submit a shorter new message.
{!usedText && ` -- ${text}`}
</>;
default:
return undefined;
}
}
+2 -2
View File
@@ -20,14 +20,14 @@ export const runBrowseGetPageUpdatingState = async (cHandler: ConversationHandle
try {
const page = await callBrowseFetchPage(url);
const pageContent = page.content.markdown || page.content.text || page.content.html || 'Issue: page load did not produce an answer: no text found';
const pageContent = page.content.markdown || page.content.text || page.content.html || 'Issue: Browsing did not produce a page content.';
cHandler.messageFragmentReplace(assistantMessageId, placeholderFragmentId, createTextContentFragment(pageContent), true);
return true;
} catch (error: any) {
console.error(error);
const pageError = 'Issue: browse did not produce an answer (error: ' + (error?.message || error?.toString() || 'unknown') + ').';
const pageError = 'Issue: Browsing did not produce a page.\n(error: ' + (error?.message || error?.toString() || 'unknown') + ').';
cHandler.messageFragmentReplace(assistantMessageId, placeholderFragmentId, createErrorContentFragment(pageError), true);
return false;
+1 -1
View File
@@ -49,7 +49,7 @@ export async function runImageGenerationUpdatingState(cHandler: ConversationHand
return true;
} catch (error: any) {
const drawError = `Issue: Sorry, I couldn't create an image for you. ${error?.message || error?.toString() || 'Unknown error'}`;
const drawError = `Issue: Sorry, I couldn't create an image for you.\n${error?.message || error?.toString() || 'Unknown error'}`;
cHandler.messageFragmentReplace(assistantMessageId, placeholderFragmentId, createErrorContentFragment(drawError), true);
return false;