Merge branch 'refs/heads/main' into feature-multipart

# Conflicts:
#	src/apps/chat/AppChat.tsx
#	src/apps/chat/components/ChatMessageList.tsx
#	src/apps/personas/creator/Creator.tsx
This commit is contained in:
Enrico Ros
2024-07-07 04:43:21 -07:00
23 changed files with 302 additions and 129 deletions
+1 -1
View File
@@ -143,7 +143,7 @@ You can easily configure 100s of AI models in big-AGI:
| **AI models** | _supported vendors_ |
|:--------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Opensource Servers | [LocalAI](https://localai.com) (multimodal) · [Ollama](https://ollama.com/) · [Oobabooga](https://github.com/oobabooga/text-generation-webui) |
| Opensource Servers | [LocalAI](https://localai.io/) (multimodal) · [Ollama](https://ollama.com/) · [Oobabooga](https://github.com/oobabooga/text-generation-webui) |
| Local Servers | [LM Studio](https://lmstudio.ai/) |
| Multimodal services | [Azure](https://azure.microsoft.com/en-us/products/ai-services/openai-service) · [Google Gemini](https://ai.google.dev/) · [OpenAI](https://platform.openai.com/docs/overview) |
| Language services | [Anthropic](https://anthropic.com) · [Groq](https://wow.groq.com/) · [Mistral](https://mistral.ai/) · [OpenRouter](https://openrouter.ai/) · [Perplexity](https://www.perplexity.ai/) · [Together AI](https://www.together.ai/) |
+2 -2
View File
@@ -19,7 +19,7 @@ import { ConfirmationModal } from '~/common/components/ConfirmationModal';
import { ConversationsManager } from '~/common/chats/ConversationsManager';
import { DConversation, DConversationId } from '~/common/stores/chat/chat.conversation';
import { DMessageAttachmentFragment, DMessageContentFragment, duplicateDMessageFragments } from '~/common/stores/chat/chat.fragments';
import { GlobalShortcutItem, ShortcutKeyName, useGlobalShortcuts } from '~/common/components/useGlobalShortcut';
import { GlobalShortcutDefinition, ShortcutKeyName, useGlobalShortcuts } from '~/common/components/useGlobalShortcuts';
import { PanelResizeInset } from '~/common/components/panes/GoodPanelResizeHandler';
import { PreferencesTab, useOptimaLayout, usePluggableOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
@@ -394,7 +394,7 @@ export function AppChat() {
openLlmOptions(chatLLMId);
}, [openLlmOptions]);
const shortcuts = React.useMemo((): GlobalShortcutItem[] => [
const shortcuts = React.useMemo((): GlobalShortcutDefinition[] => [
// focused conversation
['b', true, true, false, handleMessageBeamLastInFocusedPane],
['r', true, true, false, handleMessageRegenerateLastInFocusedPane],
+3 -3
View File
@@ -11,7 +11,7 @@ import type { DConversationId } from '~/common/stores/chat/chat.conversation';
import type { DMessageFragment, DMessageFragmentId } from '~/common/stores/chat/chat.fragments';
import { InlineError } from '~/common/components/InlineError';
import { PreferencesTab, useOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
import { ShortcutKeyName, useGlobalShortcut } from '~/common/components/useGlobalShortcut';
import { ShortcutKeyName, useGlobalShortcuts } from '~/common/components/useGlobalShortcuts';
import { createDMessageTextContent, DMessageId, DMessageUserFlag, messageToggleUserFlag } from '~/common/stores/chat/chat.message';
import { getConversation, useChatStore } from '~/common/stores/chat/store-chats';
import { useBrowserTranslationWarning } from '~/common/components/useIsBrowserTranslating';
@@ -192,9 +192,9 @@ export function ChatMessageList(props: {
setSelectedMessages(new Set());
}, [props.conversationHandler, selectedMessages]);
useGlobalShortcut(props.isMessageSelectionMode && ShortcutKeyName.Esc, false, false, false, () => {
useGlobalShortcuts([[props.isMessageSelectionMode && ShortcutKeyName.Esc, false, false, false, () => {
props.setIsMessageSelectionMode(false);
});
}]]);
// text-diff functionality: only diff the last complete message, and they're similar in size
@@ -41,7 +41,7 @@ import { supportsScreenCapture } from '~/common/util/screenCaptureUtils';
import { useAppStateStore } from '~/common/state/store-appstate';
import { useChatOverlayStore } from '~/common/chats/store-chat-overlay';
import { useDebouncer } from '~/common/components/useDebouncer';
import { useGlobalShortcut } from '~/common/components/useGlobalShortcut';
import { useGlobalShortcuts } from '~/common/components/useGlobalShortcuts';
import { useUICounter, useUIPreferencesStore } from '~/common/state/store-ui';
import { useUXLabsStore } from '~/common/state/store-ux-labs';
@@ -420,7 +420,7 @@ export function Composer(props: {
const { isSpeechEnabled, isSpeechError, isRecordingAudio, isRecordingSpeech, toggleRecording } =
useSpeechRecognition(onSpeechResultCallback, chatMicTimeoutMs || 2000);
useGlobalShortcut('m', true, false, false, toggleRecording);
useGlobalShortcuts([['m', true, false, false, toggleRecording]]);
const micIsRunning = !!speechInterimResult;
const micContinuationTrigger = micContinuation && !micIsRunning && !assistantAbortible && !isSpeechError;
@@ -472,7 +472,7 @@ export function Composer(props: {
}
}, [attachAppendFile]);
useGlobalShortcut(supportsClipboardRead ? 'v' : false, true, true, false, attachAppendClipboardItems);
useGlobalShortcuts([[supportsClipboardRead ? 'v' : false, true, true, false, attachAppendClipboardItems]]);
// Attachments Down
@@ -10,7 +10,7 @@ import { BeamStoreApi, useBeamStore } from '~/modules/beam/store-beam.hooks';
import { ConfirmationModal } from '~/common/components/ConfirmationModal';
import { GoodTooltip } from '~/common/components/GoodTooltip';
import { KeyStroke } from '~/common/components/KeyStroke';
import { ShortcutKeyName, useGlobalShortcut } from '~/common/components/useGlobalShortcut';
import { ShortcutKeyName, useGlobalShortcuts } from '~/common/components/useGlobalShortcuts';
import { animationBackgroundBeamGather, animationColorBeamScatterINV, animationEnterBelow } from '~/common/util/animUtils';
@@ -59,7 +59,7 @@ export function ChatBarAltBeam(props: {
// intercept esc this beam is focused
useGlobalShortcut(ShortcutKeyName.Esc, false, false, false, handleCloseBeam);
useGlobalShortcuts([[ShortcutKeyName.Esc, false, false, false, handleCloseBeam]]);
return (
+14 -9
View File
@@ -31,12 +31,15 @@ const Prompts: string[] = [
'Compare the draft character sheet with the original transcript, validating its content and ensuring it captures both the speakers overt characteristics and the subtler undertones. Omit unknown information, fine-tune any areas that require clarity, have been overlooked, or require more authenticity. Use clear and illustrative examples from the transcript to refine your sheet and offer meaningful, tangible reference points. Your output is a coherent, comprehensive, and nuanced instruction that begins with \'You are a...\' and serves as a go-to guide for an actor recreating the persona.',
];
const PromptTitles: string[] = [
'Common: Creator System Prompt',
'Analyze the transcript',
'Define the character',
'Cross the t\'s',
];
const getTitlesForTab = (selectedTab: number): string[] => {
const analyzeSubject: string = selectedTab ? 'text' : 'transcript';
return [
'Common: Creator System Prompt',
`Analyze the ${analyzeSubject}`,
'Define the character',
'Cross the t\'s',
];
};
// chain to convert a text input string (e.g. youtube transcript) into a persona prompt
function createChain(instructions: string[], titles: string[]): LLMChainStep[] {
@@ -99,16 +102,18 @@ export function Creator(props: { display: boolean }) {
// editable prompts
const promptTitles = React.useMemo(() => getTitlesForTab(selectedTab), [selectedTab]);
const {
strings: editedInstructions, stringEditors: instructionEditors,
} = useFormEditTextArray(Prompts, PromptTitles);
} = useFormEditTextArray(Prompts, promptTitles);
const { steps: creationChainSteps, id: chainId } = React.useMemo(() => {
return {
steps: createChain(editedInstructions, PromptTitles),
steps: createChain(editedInstructions, promptTitles),
id: agiUuid('persona-creator-chain'),
};
}, [editedInstructions]);
}, [editedInstructions, promptTitles]);
const llmLabel = personaLlm?.label || undefined;
const savePersona = React.useCallback((personaPrompt: string, inputText: string) => {
+27 -26
View File
@@ -3,37 +3,38 @@ import * as React from 'react';
import { AutoBlocksRenderer } from '~/modules/blocks/AutoBlocksRenderer';
import { GoodModal } from '~/common/components/GoodModal';
import { isMacUser } from '~/common/util/pwaUtils';
import { platformAwareKeystrokes } from '~/common/components/KeyStroke';
import { useIsMobile } from '~/common/components/useMatchMedia';
const shortcutsMd = platformAwareKeystrokes(`
| Shortcut | Description |
|---------------------|-------------------------------------------------|
| **Edit** | |
| Shift + Enter | Newline |
| Alt + Enter | Append (no response) |
| Ctrl + Shift + B | **Beam** last message |
| Ctrl + Shift + R | **Regenerate** last message |
| Ctrl + Shift + V | Attach clipboard (better than Ctrl + V) |
| Ctrl + M | Microphone (voice typing) |
| **Chats** | |
| Ctrl + O | Open Chat File ... |
| Ctrl + S | Save Chat File ... |
| Ctrl + Alt + N | **New** chat |
| Ctrl + Alt + X | **Reset** chat |
| Ctrl + Alt + D | **Delete** chat |
| Ctrl + Alt + B | **Branch** chat |
| Ctrl + Alt + Left | **Previous** chat (in history) |
| Ctrl + Alt + Right | **Next** chat (in history) |
| **Settings** | |
| Ctrl + Shift + P | ⚙️ Preferences |
| Ctrl + Shift + M | 🧠 Models |
| Ctrl + Shift + O | 💬 Options (current Chat Model) |
| Ctrl + Shift + + | Increase Text Size |
| Ctrl + Shift + - | Decrease Text Size |
| Ctrl + Shift + ? | Shortcuts |
| Shortcut | Description |
|-----------------------------------------|-------------------------------------------------|
| **Edit** | |
| Shift + Enter | Newline |
| Alt + Enter | Append (no response) |
| Ctrl + Shift + B | **Beam** last message |
| Ctrl + Shift + R | **Regenerate** last message |
| Ctrl + Shift + V | Attach clipboard (better than Ctrl + V) |
| Ctrl + M | Microphone (voice typing) |
| **Chats** | |
| Ctrl + O | Open Chat ... |
| Ctrl + S | Save Chat ... |
| Ctrl + ${isMacUser ? '' : 'Alt +'} N | **New** chat |
| Ctrl + ${isMacUser ? '' : 'Alt +'} X | **Reset** chat |
| Ctrl + ${isMacUser ? '' : 'Alt +'} D | **Delete** chat |
| Ctrl + ${isMacUser ? '' : 'Alt +'} B | **Branch** chat |
| Ctrl + Alt + Left | **Previous** chat (in history) |
| Ctrl + Alt + Right | **Next** chat (in history) |
| **Settings** | |
| Ctrl + Shift + P | ⚙️ Preferences |
| Ctrl + Shift + M | 🧠 Models |
| Ctrl + Shift + O | 💬 Options (current Chat Model) |
| Ctrl + Shift + + | Increase Text Size |
| Ctrl + Shift + - | Decrease Text Size |
| Ctrl + Shift + ${isMacUser ? '/' : '?'} | Shortcuts |
`).trim();
@@ -55,4 +56,4 @@ export function ShortcutsModal(props: { onClose: () => void }) {
/>
</GoodModal>
);
}
}
+2 -2
View File
@@ -12,7 +12,7 @@ import { AutoBlocksRenderer } from '~/modules/blocks/AutoBlocksRenderer';
import { AgiSquircleIcon } from '~/common/components/icons/AgiSquircleIcon';
import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon';
import { GlobalShortcutItem, ShortcutKeyName, useGlobalShortcuts } from '~/common/components/useGlobalShortcut';
import { GlobalShortcutDefinition, ShortcutKeyName, useGlobalShortcuts } from '~/common/components/useGlobalShortcuts';
import { hasGoogleAnalytics } from '~/common/components/GoogleAnalytics';
import { useIsMobile } from '~/common/components/useMatchMedia';
import { animationTextShadowLimey } from '~/common/util/animUtils';
@@ -159,7 +159,7 @@ export function ExplainerCarousel(props: {
}, [props.explainerId]);
const shortcuts = React.useMemo((): GlobalShortcutItem[] => [
const shortcuts = React.useMemo((): GlobalShortcutDefinition[] => [
[ShortcutKeyName.Left, false, false, false, handlePrevPage],
[ShortcutKeyName.Right, false, false, false, handleNextPage],
], [handleNextPage, handlePrevPage]);
+1 -1
View File
@@ -10,7 +10,7 @@ import { isMacUser } from '~/common/util/pwaUtils';
export function platformAwareKeystrokes(text: string) {
return isMacUser
? text
.replaceAll('Ctrl', '' /* Command */)
.replaceAll('Ctrl', '' /* Control */)
.replaceAll('Alt', '⌥' /* Option */)
.replaceAll('Shift', '⇧')
// Optional: Replace "Enter" with "Return" if you want to align with Mac keyboard labeling
+9
View File
@@ -0,0 +1,9 @@
import * as React from 'react';
import { SvgIcon, SvgIconProps } from '@mui/joy';
export function DeepseekIcon(props: SvgIconProps) {
return <SvgIcon viewBox="0 0 56.2 41.3594" width="56.2" height="41.359375" strokeWidth={0} stroke='none' fill='currentColor' strokeLinecap='butt' strokeLinejoin='miter' {...props}>
<path id="path" d="M55.6128 3.47119C55.0175 3.17944 54.7611 3.73535 54.413 4.01782C54.2939 4.10889 54.1932 4.22729 54.0924 4.33667C53.2223 5.26587 52.2057 5.87646 50.8776 5.80347C48.9359 5.69409 47.2781 6.30469 45.8126 7.78979C45.5012 5.9585 44.4663 4.86499 42.8909 4.16357C42.0667 3.79907 41.2332 3.43457 40.6561 2.64185C40.2532 2.07715 40.1432 1.44849 39.9418 0.828857C39.8135 0.455322 39.6853 0.0725098 39.2548 0.00878906C38.7877 -0.0639648 38.6045 0.327637 38.4213 0.655762C37.6886 1.99512 37.4047 3.47119 37.4321 4.96533C37.4962 8.32739 38.9159 11.0059 41.7369 12.9102C42.0575 13.1289 42.1399 13.3474 42.0392 13.6665C41.8468 14.3225 41.6178 14.9602 41.4164 15.6162C41.2881 16.0354 41.0957 16.1265 40.647 15.9441C39.0991 15.2974 37.7618 14.3406 36.5803 13.1836C34.5745 11.2429 32.761 9.10181 30.4988 7.42529C29.9675 7.03345 29.4363 6.66919 28.8867 6.32275C26.5786 4.08154 29.189 2.24097 29.7935 2.02246C30.4254 1.79468 30.0133 1.01099 27.9708 1.02026C25.9283 1.0293 24.0599 1.71265 21.6786 2.62378C21.3306 2.7605 20.9641 2.8606 20.5886 2.94263C18.4271 2.53271 16.1831 2.44141 13.8384 2.70581C9.42371 3.19775 5.89758 5.28418 3.30554 8.84668C0.191406 13.1289 -0.54126 17.9941 0.356323 23.0691C1.29968 28.4172 4.02905 32.8452 8.22388 36.3076C12.5745 39.8972 17.5845 41.6558 23.2997 41.3186C26.771 41.1182 30.6361 40.6536 34.9958 36.9636C36.0948 37.5103 37.2489 37.7288 39.1632 37.8928C40.6378 38.0295 42.0575 37.8201 43.1565 37.5923C44.8784 37.2278 44.7594 35.6333 44.1366 35.3418C39.09 32.9912 40.1981 33.9478 39.1907 33.1733C41.7552 30.1394 45.6204 26.9868 47.1316 16.7732C47.2506 15.9624 47.1499 15.4521 47.1316 14.7961C47.1224 14.3953 47.214 14.2405 47.672 14.1948C48.9359 14.0491 50.1632 13.7029 51.2898 13.0833C54.5596 11.2976 55.8784 8.36377 56.1898 4.84692C56.2357 4.30933 56.1807 3.75342 55.6128 3.47119ZM27.119 35.123C22.2281 31.2783 19.856 30.0117 18.8759 30.0664C17.96 30.1211 18.1249 31.1689 18.3263 31.8523C18.537 32.5264 18.8118 32.9912 19.1964 33.5833C19.462 33.9751 19.6453 34.5581 18.9309 34.9956C17.3555 35.9705 14.6169 34.6675 14.4886 34.6038C11.3014 32.7268 8.63611 30.2485 6.75842 26.8594C4.94495 23.5974 3.89172 20.0989 3.71765 16.3633C3.67188 15.4614 3.9375 15.1423 4.83508 14.9785C6.0166 14.7598 7.23474 14.7141 8.41626 14.8872C13.408 15.6162 17.6577 17.8484 21.2206 21.3835C23.2539 23.397 24.7926 25.8025 26.3772 28.1531C28.0624 30.6494 29.8759 33.0276 32.184 34.9773C32.9991 35.6606 33.6494 36.1799 34.2722 36.5627C32.3947 36.7722 29.2622 36.8179 27.119 35.123ZM29.4637 20.0442C29.4637 19.6433 29.7843 19.3245 30.1874 19.3245C30.2789 19.3245 30.3613 19.3425 30.4346 19.3699C30.5354 19.4065 30.627 19.4612 30.7002 19.543C30.8285 19.6707 30.9017 19.8528 30.9017 20.0442C30.9017 20.4451 30.5812 20.7639 30.1782 20.7639C29.7751 20.7639 29.4637 20.4451 29.4637 20.0442ZM36.7452 23.7798C36.2781 23.9712 35.811 24.135 35.3622 24.1533C34.6661 24.1897 33.9059 23.9072 33.4938 23.561C32.8527 23.0234 32.3947 22.7229 32.2023 21.7844C32.1199 21.3835 32.1656 20.7639 32.239 20.4087C32.4038 19.6433 32.2206 19.1514 31.6803 18.7048C31.2406 18.3403 30.6819 18.2402 30.0682 18.2402C29.8392 18.2402 29.6287 18.1399 29.4729 18.0579C29.2164 17.9304 29.0059 17.6116 29.2073 17.2197C29.2714 17.0923 29.5829 16.7825 29.6561 16.7278C30.4896 16.2539 31.4513 16.4089 32.3397 16.7642C33.1641 17.1013 33.7869 17.7209 34.6844 18.5955C35.6003 19.6523 35.7651 19.9441 36.2872 20.7366C36.6995 21.3562 37.075 21.9939 37.3314 22.7229C37.4871 23.1785 37.2856 23.552 36.7452 23.7798Z"/>
</SvgIcon>;
}
@@ -1,72 +0,0 @@
import * as React from 'react';
export const ShortcutKeyName = {
Esc: 'Escape',
Left: 'ArrowLeft',
Right: 'ArrowRight',
};
/**
* Registers a global keyboard shortcut (if not undefined) to activate a callback.
*
* @param shortcutKey If undefined, the shortcut will not be registered.
* @param useCtrl If true, the Ctrl key must be pressed for the shortcut to be activated.
* @param useShift If true, the Shift key must be pressed for the shortcut to be activated.
* @param useAlt If true, the Alt key must be pressed for the shortcut to be activated.
* @param callback Make sure this is a memoized callback, otherwise the effect will be re-registered every time.
*/
export const useGlobalShortcut = (shortcutKey: string | false, useCtrl: boolean, useShift: boolean, useAlt: boolean, callback: () => void) => {
React.useEffect(() => {
if (!shortcutKey) return;
const lcShortcut = shortcutKey.toLowerCase();
const handleKeyDown = (event: KeyboardEvent) => {
const isCtrlOrCmd = (event.ctrlKey && !event.metaKey) || (event.metaKey && !event.ctrlKey);
if (
(useCtrl === isCtrlOrCmd) &&
(useShift === event.shiftKey) &&
(useAlt === event.altKey) &&
event.key.toLowerCase() === lcShortcut
) {
event.preventDefault();
event.stopPropagation();
callback();
}
};
window.addEventListener('keydown', handleKeyDown);
return () => window.removeEventListener('keydown', handleKeyDown);
}, [callback, shortcutKey, useAlt, useCtrl, useShift]);
};
export type GlobalShortcutItem = [key: string, ctrl: boolean, shift: boolean, alt: boolean, action: () => void];
/**
* Registers multiple global keyboard shortcuts to activate callbacks.
*
* @param shortcuts An array of shortcut objects.
*/
export const useGlobalShortcuts = (shortcuts: GlobalShortcutItem[]) => {
React.useEffect(() => {
const handleKeyDown = (event: KeyboardEvent) => {
for (const [key, useCtrl, useShift, useAlt, action] of shortcuts) {
const isCtrlOrCmd = (event.ctrlKey && !event.metaKey) || (event.metaKey && !event.ctrlKey);
if (
key &&
(useCtrl === isCtrlOrCmd) &&
(useShift === event.shiftKey) &&
(useAlt === event.altKey) &&
event.key.toLowerCase() === key.toLowerCase()
) {
event.preventDefault();
event.stopPropagation();
action();
break;
}
}
};
window.addEventListener('keydown', handleKeyDown);
return () => window.removeEventListener('keydown', handleKeyDown);
}, [shortcuts]);
};
@@ -0,0 +1,44 @@
import * as React from 'react';
import { isMacUser } from '../util/pwaUtils';
export const ShortcutKeyName = {
Esc: 'Escape',
Left: 'ArrowLeft',
Right: 'ArrowRight',
};
export type GlobalShortcutDefinition = [key: string | false, useCtrl: boolean, useShift: boolean, useAltForNonMac: boolean, action: () => void];
/**
* Registers multiple global keyboard shortcuts -> function mappings.
*
* Important notes below:
* - [MAC only] the Alt key is ignored even if defined in the shortcut
* - [MAC only] are not using the command key at the moment, as it interfered with browser shortcuts
* - stabilize the shortcuts definition (e.g. React.useMemo()) to avoid re-registering the shortcuts at every render
*
*/
export const useGlobalShortcuts = (shortcuts: GlobalShortcutDefinition[]) => {
React.useEffect(() => {
const handleKeyDown = (event: KeyboardEvent) => {
for (const [key, useCtrl, useShift, useAltForNonMac, action] of shortcuts) {
if (
key &&
(useCtrl === event.ctrlKey) &&
(useShift === event.shiftKey) &&
(isMacUser /* Mac users won't need the Alt keys */ || useAltForNonMac === event.altKey) &&
event.key.toLowerCase() === key.toLowerCase()
) {
event.preventDefault();
event.stopPropagation();
action();
break;
}
}
};
window.addEventListener('keydown', handleKeyDown);
return () => window.removeEventListener('keydown', handleKeyDown);
}, [shortcuts]);
};
+4 -3
View File
@@ -2,7 +2,8 @@ import * as React from 'react';
import type { DLLMId } from '~/modules/llms/store-llms';
import { GlobalShortcutItem, useGlobalShortcuts } from '~/common/components/useGlobalShortcut';
import { GlobalShortcutDefinition, useGlobalShortcuts } from '~/common/components/useGlobalShortcuts';
import { isMacUser } from '~/common/util/pwaUtils';
const DEBUG_OPTIMA_LAYOUT_PLUGGING = false;
@@ -114,8 +115,8 @@ export function OptimaLayoutProvider(props: { children: React.ReactNode }) {
// global shortcuts for Optima
const shortcuts = React.useMemo((): GlobalShortcutItem[] => [
['?', true, true, false, actions.openShortcuts],
const shortcuts = React.useMemo((): GlobalShortcutDefinition[] => [
[isMacUser ? '/' : '?', true, true, false, actions.openShortcuts],
['m', true, true, false, actions.openModelsSetup],
['p', true, true, false, actions.openPreferencesTab],
], [actions]);
+1
View File
@@ -49,6 +49,7 @@ export const backendRouter = createTRPCRouter({
hasImagingProdia: !!env.PRODIA_API_KEY,
hasLlmAnthropic: !!env.ANTHROPIC_API_KEY,
hasLlmAzureOpenAI: !!env.AZURE_OPENAI_API_KEY && !!env.AZURE_OPENAI_API_ENDPOINT,
hasLlmDeepseek: !!env.DEEPSEEK_API_KEY,
hasLlmGemini: !!env.GEMINI_API_KEY,
hasLlmGroq: !!env.GROQ_API_KEY,
hasLlmLocalAIHost: !!env.LOCALAI_API_HOST,
@@ -13,6 +13,7 @@ export interface BackendCapabilities {
hasImagingProdia: boolean;
hasLlmAnthropic: boolean;
hasLlmAzureOpenAI: boolean;
hasLlmDeepseek: boolean;
hasLlmGemini: boolean;
hasLlmGroq: boolean;
hasLlmLocalAIHost: boolean;
@@ -42,6 +43,7 @@ const useBackendCapabilitiesStore = create<BackendStore>()(
hasImagingProdia: false,
hasLlmAnthropic: false,
hasLlmAzureOpenAI: false,
hasLlmDeepseek: false,
hasLlmGemini: false,
hasLlmGroq: false,
hasLlmLocalAIHost: false,
@@ -526,6 +526,7 @@ function _prepareRequestData({ access, model, history, context: _context }: Chat
};
case 'azure':
case 'deepseek':
case 'groq':
case 'lmstudio':
case 'localai':
@@ -321,6 +321,49 @@ export function azureModelToModelDescription(azureDeploymentRef: string, openAIM
}
// [Deepseek AI]
const _knownDeepseekChatModels: ManualMappings = [
// [Models and Pricing](https://platform.deepseek.com/api-docs/pricing)
// [List Models](https://platform.deepseek.com/api-docs/api/list-models)
{
idPrefix: 'deepseek-chat',
label: 'Deepseek Chat V2',
description: 'Good at general tasks, 128K context length',
contextWindow: 128000,
interfaces: [LLM_IF_OAI_Chat],
maxCompletionTokens: 4096,
pricing: {
chatIn: 0.14,
chatOut: 0.28,
},
},
{
idPrefix: 'deepseek-coder',
label: 'Deepseek Coder V2',
description: 'Good at coding and math tasks, 128K context length',
contextWindow: 128000,
interfaces: [LLM_IF_OAI_Chat],
maxCompletionTokens: 4096,
pricing: {
chatIn: 0.14,
chatOut: 0.28,
},
},
];
export function deepseekModelToModelDescription(deepseekModelId: string): ModelDescriptionSchema {
return fromManualMapping(_knownDeepseekChatModels, deepseekModelId, undefined, undefined, {
idPrefix: deepseekModelId,
label: deepseekModelId.replaceAll(/[_-]/g, ' '),
description: 'New Deepseek Model',
contextWindow: 128000,
maxCompletionTokens: 4096,
interfaces: [LLM_IF_OAI_Chat], // assume..
hidden: true,
});
}
// [LM Studio]
export function lmStudioModelToModelDescription(modelId: string): ModelDescriptionSchema {
@@ -11,7 +11,7 @@ import { Brand } from '~/common/app.config';
import { fixupHost } from '~/common/util/urlUtils';
import { OpenAIWire, WireOpenAICreateImageOutput, wireOpenAICreateImageOutputSchema, WireOpenAICreateImageRequest } from './openai.wiretypes';
import { azureModelToModelDescription, groqModelSortFn, groqModelToModelDescription, lmStudioModelToModelDescription, localAIModelToModelDescription, mistralModelsSort, mistralModelToModelDescription, oobaboogaModelToModelDescription, openAIModelFilter, openAIModelToModelDescription, openRouterModelFamilySortFn, openRouterModelToModelDescription, perplexityAIModelDescriptions, perplexityAIModelSort, togetherAIModelsToModelDescriptions } from './models.data';
import { azureModelToModelDescription, deepseekModelToModelDescription, groqModelSortFn, groqModelToModelDescription, lmStudioModelToModelDescription, localAIModelToModelDescription, mistralModelsSort, mistralModelToModelDescription, oobaboogaModelToModelDescription, openAIModelFilter, openAIModelToModelDescription, openRouterModelFamilySortFn, openRouterModelToModelDescription, perplexityAIModelDescriptions, perplexityAIModelSort, togetherAIModelsToModelDescriptions } from './models.data';
import { llmsChatGenerateWithFunctionsOutputSchema, llmsGenerateContextSchema, llmsListModelsOutputSchema, ModelDescriptionSchema } from '../llm.server.types';
import { wilreLocalAIModelsApplyOutputSchema, wireLocalAIModelsAvailableOutputSchema, wireLocalAIModelsListOutputSchema } from './localai.wiretypes';
@@ -21,7 +21,7 @@ const ABERRATION_FIXUP_SQUASH = '\n\n\n---\n\n\n';
const openAIDialects = z.enum([
'azure', 'groq', 'lmstudio', 'localai', 'mistral', 'oobabooga', 'openai', 'openrouter', 'perplexity', 'togetherai',
'azure', 'deepseek', 'groq', 'lmstudio', 'localai', 'mistral', 'oobabooga', 'openai', 'openrouter', 'perplexity', 'togetherai',
]);
type OpenAIDialects = z.infer<typeof openAIDialects>;
@@ -173,6 +173,11 @@ export const llmOpenAIRouter = createTRPCRouter({
// every dialect has a different way to enumerate models - we execute the mapping on the server side
switch (access.dialect) {
case 'deepseek':
models = openAIModels
.map(({ id }) => deepseekModelToModelDescription(id));
break;
case 'groq':
models = openAIModels
.map(groqModelToModelDescription)
@@ -420,6 +425,7 @@ export const llmOpenAIRouter = createTRPCRouter({
const DEFAULT_HELICONE_OPENAI_HOST = 'oai.hconeai.com';
const DEFAULT_DEEPSEEK_HOST = 'https://api.deepseek.com';
const DEFAULT_GROQ_HOST = 'https://api.groq.com/openai';
const DEFAULT_LOCALAI_HOST = 'http://127.0.0.1:8080';
const DEFAULT_MISTRAL_HOST = 'https://api.mistral.ai';
@@ -456,6 +462,22 @@ export function openAIAccess(access: OpenAIAccessSchema, modelRefId: string | nu
};
case 'deepseek':
// https://platform.deepseek.com/api-docs/
const deepseekKey = access.oaiKey || env.DEEPSEEK_API_KEY || '';
const deepseekHost = fixupHost(access.oaiHost || DEFAULT_DEEPSEEK_HOST, apiPath);
if (!deepseekKey || !deepseekHost)
throw new Error('Missing Deepseek API Key or Host. Add it on the UI (Models Setup) or server side (your deployment).');
return {
headers: {
'Authorization': `Bearer ${deepseekKey}`,
'Content-Type': 'application/json',
},
url: deepseekHost + apiPath,
};
case 'lmstudio':
case 'oobabooga':
case 'openai':
@@ -0,0 +1,61 @@
import * as React from 'react';
import { AlreadySet } from '~/common/components/AlreadySet';
import { FormInputKey } from '~/common/components/forms/FormInputKey';
import { InlineError } from '~/common/components/InlineError';
import { Link } from '~/common/components/Link';
import { SetupFormRefetchButton } from '~/common/components/forms/SetupFormRefetchButton';
import { useToggleableBoolean } from '~/common/util/useToggleableBoolean';
import { DModelSourceId } from '../../store-llms';
import { useLlmUpdateModels } from '../../llm.client.hooks';
import { useSourceSetup } from '../useSourceSetup';
import { ModelVendorDeepseek } from './deepseekai.vendor';
const DEEPSEEK_REG_LINK = 'https://platform.deepseek.com/api_keys';
export function DeepseekAISourceSetup(props: { sourceId: DModelSourceId }) {
// state
const advanced = useToggleableBoolean();
// external state
const {
source, sourceHasLLMs, access,
sourceSetupValid, hasNoBackendCap: needsUserKey, updateSetup,
} = useSourceSetup(props.sourceId, ModelVendorDeepseek);
// derived state
const { oaiKey: deepseekKey } = access;
// validate if url is a well formed proper url with zod
const shallFetchSucceed = !needsUserKey || (!!deepseekKey && sourceSetupValid);
const showKeyError = !!deepseekKey && !sourceSetupValid;
// fetch models
const { isFetching, refetch, isError, error } =
useLlmUpdateModels(!sourceHasLLMs && shallFetchSucceed, source);
return <>
<FormInputKey
autoCompleteId='deepseek-key' label='Deepseek Key'
rightLabel={<>{needsUserKey
? !deepseekKey && <Link level='body-sm' href={DEEPSEEK_REG_LINK} target='_blank'>request Key</Link>
: <AlreadySet />}
</>}
value={deepseekKey} onChange={value => updateSetup({ deepseekKey: value })}
required={needsUserKey} isError={showKeyError}
placeholder='...'
/>
<SetupFormRefetchButton refetch={refetch} disabled={/*!shallFetchSucceed ||*/ isFetching} loading={isFetching} error={isError} advanced={advanced} />
{isError && <InlineError error={error} />}
</>;
}
+49
View File
@@ -0,0 +1,49 @@
import { DeepseekIcon } from '~/common/components/icons/vendors/DeepseekIcon';
import type { IModelVendor } from '../IModelVendor';
import type { OpenAIAccessSchema } from '../../server/openai/openai.router';
import { LLMOptionsOpenAI, ModelVendorOpenAI } from '../openai/openai.vendor';
import { OpenAILLMOptions } from '../openai/OpenAILLMOptions';
import { DeepseekAISourceSetup } from './DeepseekAISourceSetup';
export interface SourceSetupDeepseek {
deepseekKey: string;
}
export const ModelVendorDeepseek: IModelVendor<SourceSetupDeepseek, OpenAIAccessSchema, LLMOptionsOpenAI> = {
id: 'deepseek',
name: 'Deepseek',
rank: 19,
location: 'cloud',
instanceLimit: 1,
hasBackendCapKey: 'hasLlmDeepseek',
// components
Icon: DeepseekIcon,
SourceSetupComponent: DeepseekAISourceSetup,
LLMOptionsComponent: OpenAILLMOptions,
// functions
initializeSetup: () => ({
deepseekKey: '',
}),
validateSetup: (setup) => {
return setup.deepseekKey?.length >= 35;
},
getTransportAccess: (partialSetup) => ({
dialect: 'deepseek',
oaiKey: partialSetup?.deepseekKey || '',
oaiOrg: '',
oaiHost: '',
heliKey: '',
moderationCheck: false,
}),
// OpenAI transport ('Deepseek' dialect in 'access')
rpcUpdateModelsOrThrow: ModelVendorOpenAI.rpcUpdateModelsOrThrow,
rpcChatGenerateOrThrow: ModelVendorOpenAI.rpcChatGenerateOrThrow,
streamingChatGenerateOrThrow: ModelVendorOpenAI.streamingChatGenerateOrThrow,
};
+4 -1
View File
@@ -14,6 +14,7 @@ import { ModelVendorTogetherAI } from './togetherai/togetherai.vendor';
import type { IModelVendor } from './IModelVendor';
import { DLLMId, DModelSource, DModelSourceId, findLLMOrThrow, findSourceOrThrow } from '../store-llms';
import { ModelVendorDeepseek } from './deepseek/deepseekai.vendor';
export type ModelVendorId =
| 'anthropic'
@@ -28,7 +29,8 @@ export type ModelVendorId =
| 'openai'
| 'openrouter'
| 'perplexity'
| 'togetherai';
| 'togetherai'
| 'deepseek';
/** Global: Vendor Instances Registry **/
const MODEL_VENDOR_REGISTRY: Record<ModelVendorId, IModelVendor> = {
@@ -45,6 +47,7 @@ const MODEL_VENDOR_REGISTRY: Record<ModelVendorId, IModelVendor> = {
openrouter: ModelVendorOpenRouter,
perplexity: ModelVendorPerplexity,
togetherai: ModelVendorTogetherAI,
deepseek: ModelVendorDeepseek,
} as Record<string, IModelVendor>;
const MODEL_VENDOR_DEFAULT: ModelVendorId = 'openai';
+2 -2
View File
@@ -57,7 +57,7 @@ export function DallESettings() {
{isDallE3 && <FormRadioControl
title='Style'
description={(isDallE3 && dalleStyle === 'vivid') ? 'Hyper-Real' : 'Relistic'}
description={(isDallE3 && dalleStyle === 'vivid') ? 'Hyper-Real' : 'Realistic'}
disabled={!isDallE3}
options={[
{ value: 'natural', label: 'Natural' },
@@ -121,4 +121,4 @@ export function DallESettings() {
<FormLabelStart title={advanced.on ? 'Hide Advanced' : 'Advanced'} onClick={advanced.toggle} />
</>;
}
}
+3
View File
@@ -53,6 +53,9 @@ export const env = createEnv({
// LLM: Toghether AI
TOGETHERAI_API_KEY: z.string().optional(),
// LLM: Deepseek AI
DEEPSEEK_API_KEY: z.string().optional(),
// Helicone - works on both OpenAI and Anthropic vendors
HELICONE_API_KEY: z.string().optional(),