diff --git a/src/modules/llms/llm.types.ts b/src/modules/llms/llm.types.ts
index 4c9a8ec1d..e1bf36cf0 100644
--- a/src/modules/llms/llm.types.ts
+++ b/src/modules/llms/llm.types.ts
@@ -9,7 +9,13 @@ export type DLLMId = string;
export type DLLMOptions = LLMOptionsOpenAI; //DLLMValuesOpenAI | DLLMVaLocalAIDLLMValues;
export type DModelSourceId = string;
export type DModelSourceSetup = SourceSetupOpenAI | SourceSetupLocalAI;
-export type ModelVendorId = 'localai' | 'openai'; // | 'anthropic' | 'azure_openai' | 'google_vertex';
+export type ModelVendorId =
+ | 'localai'
+ | 'oobabooga'
+ | 'openai';
+// | 'anthropic'
+// | 'azure_openai'
+// | 'google_vertex'
/// Large Language Model - a model that can generate text
diff --git a/src/modules/llms/oobabooga/OobaboogaIcon.tsx b/src/modules/llms/oobabooga/OobaboogaIcon.tsx
new file mode 100644
index 000000000..ba97803ef
--- /dev/null
+++ b/src/modules/llms/oobabooga/OobaboogaIcon.tsx
@@ -0,0 +1,10 @@
+import * as React from 'react';
+
+import { SvgIcon } from '@mui/joy';
+import { SxProps } from '@mui/joy/styles/types';
+
+export function OobaboogaIcon(props: { sx?: SxProps }) {
+ return
+
+ ;
+}
\ No newline at end of file
diff --git a/src/modules/llms/oobabooga/OobaboogaSourceSetup.tsx b/src/modules/llms/oobabooga/OobaboogaSourceSetup.tsx
new file mode 100644
index 000000000..4f1cf3573
--- /dev/null
+++ b/src/modules/llms/oobabooga/OobaboogaSourceSetup.tsx
@@ -0,0 +1,106 @@
+import * as React from 'react';
+
+import { Alert, Box, Button, FormControl, FormHelperText, FormLabel, Input, Typography } from '@mui/joy';
+import SyncIcon from '@mui/icons-material/Sync';
+
+import { apiQuery } from '~/modules/trpc/trpc.client';
+import { Link } from '~/common/components/Link';
+import { settingsCol1Width, settingsGap } from '~/common/theme';
+
+import { DLLM, DModelSource, DModelSourceId } from '../llm.types';
+import { LLMOptionsOpenAI, normalizeOAISetup } from '../openai/openai.vendor';
+import { OpenAI } from '../openai/openai.types';
+import { normalizeOobaboogaSetup, SourceSetupOobabooga } from './oobabooga.vendor';
+import { useModelsStore, useSourceSetup } from '../store-llms';
+
+
+export function OobaboogaSourceSetup(props: { sourceId: DModelSourceId }) {
+
+ // external state
+ const {
+ source, sourceLLMs, updateSetup, normSetup,
+ } = useSourceSetup(props.sourceId, normalizeOobaboogaSetup);
+
+
+ // fetch models - the OpenAI way
+ const { isFetching, refetch, isError, error } = apiQuery.openai.listModels.useQuery({
+ access: normalizeOAISetup(normSetup),
+ }, {
+ enabled: false, //!hasModels && !!asValidURL(normSetup.oaiHost),
+ onSuccess: models => {
+ console.log('OobaboogaSourceSetup: models', models);
+ const llms = source ? models.map(model => oobaboogaModelToDLLM(model, source)).filter(model => !!model) : [];
+ useModelsStore.getState().addLLMs(llms);
+ },
+ staleTime: Infinity,
+ refetchOnMount: 'always',
+ });
+
+ return
+
+
+ You can use a running
+ text-generation-webui instance as a source for models.
+ Follow
+ the instructions to set up the server.
+
+
+
+
+
+ API Base
+
+
+ Excluding /v1
+
+
+ updateSetup({ oaiHost: event.target.value })}
+ sx={{ flexGrow: 1 }}
+ />
+
+
+
+
+
+
+ {isError && Issue: {error?.message || error?.toString() || 'unknown'}}
+
+ ;
+}
+
+const NotChatModels: string[] = [
+ 'text-curie-001', 'text-davinci-002',
+];
+
+
+function oobaboogaModelToDLLM(model: OpenAI.Wire.Models.ModelDescription, source: DModelSource): (DLLM & { options: LLMOptionsOpenAI }) {
+ const label = model.id.replaceAll(/[_-]/g, ' ').split(' ').map(word => word[0].toUpperCase() + word.slice(1)).join(' ');
+ // TODO - figure out how to the context window size from Oobabooga
+ const contextTokens = 4096;
+ return {
+ id: `${source.id}-${model.id}`,
+ label,
+ created: model.created || Math.round(Date.now() / 1000),
+ description: 'Oobabooga model',
+ tags: [], // ['stream', 'chat'],
+ contextTokens,
+ hidden: NotChatModels.includes(model.id),
+ sId: source.id,
+ _source: source,
+ options: {
+ llmRef: model.id,
+ llmTemperature: 0.5,
+ llmResponseTokens: Math.round(contextTokens / 8),
+ },
+ };
+}
\ No newline at end of file
diff --git a/src/modules/llms/oobabooga/oobabooga.vendor.ts b/src/modules/llms/oobabooga/oobabooga.vendor.ts
new file mode 100644
index 000000000..92d2c35c7
--- /dev/null
+++ b/src/modules/llms/oobabooga/oobabooga.vendor.ts
@@ -0,0 +1,36 @@
+import { ModelVendor } from '../llm.types';
+
+import { OpenAILLMOptions } from '~/modules/llms/openai/OpenAILLMOptions';
+import { openAICallChat, openAICallChatWithFunctions } from '~/modules/llms/openai/openai.client';
+
+import { OobaboogaIcon } from './OobaboogaIcon';
+import { OobaboogaSourceSetup } from './OobaboogaSourceSetup';
+
+
+export const ModelVendorOoobabooga: ModelVendor = {
+ id: 'oobabooga',
+ name: 'Oobabooga (Alpha)',
+ rank: 15,
+ location: 'local',
+ instanceLimit: 1,
+
+ // components
+ Icon: OobaboogaIcon,
+ SourceSetupComponent: OobaboogaSourceSetup,
+ LLMOptionsComponent: OpenAILLMOptions,
+
+ // functions
+ callChat: openAICallChat,
+ callChatWithFunctions: openAICallChatWithFunctions,
+};
+
+export interface SourceSetupOobabooga {
+ oaiHost: string; // use OpenAI-compatible non-default hosts (full origin path)
+}
+
+export function normalizeOobaboogaSetup(partialSetup?: Partial): SourceSetupOobabooga {
+ return {
+ oaiHost: '',
+ ...partialSetup,
+ };
+}
\ No newline at end of file
diff --git a/src/modules/llms/vendor.registry.ts b/src/modules/llms/vendor.registry.ts
index ef046587d..812062073 100644
--- a/src/modules/llms/vendor.registry.ts
+++ b/src/modules/llms/vendor.registry.ts
@@ -1,5 +1,6 @@
import { DModelSource, DModelSourceId, ModelVendor, ModelVendorId } from './llm.types';
import { ModelVendorLocalAI } from './localai/localai.vendor';
+import { ModelVendorOoobabooga } from './oobabooga/oobabooga.vendor';
import { ModelVendorOpenAI } from './openai/openai.vendor';
@@ -37,8 +38,9 @@ export const createDefaultModelSource = (otherSources: DModelSource[]): DModelSo
/// Internals ///
const MODEL_VENDOR_REGISTRY: Record = {
- openai: ModelVendorOpenAI,
localai: ModelVendorLocalAI,
+ oobabooga: ModelVendorOoobabooga,
+ openai: ModelVendorOpenAI,
// azure_openai: { id: 'azure_openai', name: 'Azure OpenAI', instanceLimit: 1, location: 'cloud', rank: 30 },
// google_vertex: { id: 'google_vertex', name: 'Google Vertex', instanceLimit: 1, location: 'cloud', rank: 40 },
// anthropic: { id: 'anthropic', name: 'Anthropic', instanceLimit: 1, location: 'cloud', rank: 50 },