Initial Support for Oobabooga/text-generation-webui

This commit is contained in:
Enrico Ros
2023-07-08 23:03:47 -07:00
parent 9f0d6bb17e
commit da647c0e7d
5 changed files with 162 additions and 2 deletions
+7 -1
View File
@@ -9,7 +9,13 @@ export type DLLMId = string;
export type DLLMOptions = LLMOptionsOpenAI; //DLLMValuesOpenAI | DLLMVaLocalAIDLLMValues;
export type DModelSourceId = string;
export type DModelSourceSetup = SourceSetupOpenAI | SourceSetupLocalAI;
export type ModelVendorId = 'localai' | 'openai'; // | 'anthropic' | 'azure_openai' | 'google_vertex';
export type ModelVendorId =
| 'localai'
| 'oobabooga'
| 'openai';
// | 'anthropic'
// | 'azure_openai'
// | 'google_vertex'
/// Large Language Model - a model that can generate text
@@ -0,0 +1,10 @@
import * as React from 'react';
import { SvgIcon } from '@mui/joy';
import { SxProps } from '@mui/joy/styles/types';
export function OobaboogaIcon(props: { sx?: SxProps }) {
return <SvgIcon viewBox='0 0 512 512' width='24' height='24' stroke='currentColor' fill='none' strokeLinecap='round' strokeLinejoin='round' {...props}>
<path d='M324.3 34.79c-25.7 0-46.5 27.02-46.5 60.36 0 20.05 7.7 38.85 20.6 50.05 5.6 81.2-4.7 152.3-53.6 160.1-60.2 6.2-73.2-68.2-84.6-139 8.9-15.7 9.2-37.5.6-57.3-13.4-30.79-43.6-47.33-67.56-36.92-23.92 10.4-32.47 43.72-19.11 74.52 9.44 21.7 27.77 37.1 47.07 39.6 33.9 57.6 71.8 152.4 51 180.2-11.5 15.2-32.7 10.4-55.6-4 3.7-16.3-4.5-34.6-20.81-46.7-22.74-16.9-53.8-16.5-69.39.8-15.6 17.3-9.77 45 13 61.9 17.78 13.1 41.33 16.1 58.44 7.4 67.86 32.9 121.86 99.5 175.56 90.6 76.2-12.6 76.5-119.3 148.3-149.9 13.7 9.1 33.5 8.3 49.7-2.1 21.2-13.7 29.1-39.2 17.7-56.9-11.4-17.8-37.9-21.1-59.2-7.4-15.9 10.3-24.9 27.8-22.5 43.9-29.9 22.4-68.5 39.7-88.7 27.3-16.2-9.9 1.8-122.3 19-178.4 19.6-7.7 33.1-31.1 33.1-57.75 0-33.34-20.8-60.37-46.5-60.36z' />
</SvgIcon>;
}
@@ -0,0 +1,106 @@
import * as React from 'react';
import { Alert, Box, Button, FormControl, FormHelperText, FormLabel, Input, Typography } from '@mui/joy';
import SyncIcon from '@mui/icons-material/Sync';
import { apiQuery } from '~/modules/trpc/trpc.client';
import { Link } from '~/common/components/Link';
import { settingsCol1Width, settingsGap } from '~/common/theme';
import { DLLM, DModelSource, DModelSourceId } from '../llm.types';
import { LLMOptionsOpenAI, normalizeOAISetup } from '../openai/openai.vendor';
import { OpenAI } from '../openai/openai.types';
import { normalizeOobaboogaSetup, SourceSetupOobabooga } from './oobabooga.vendor';
import { useModelsStore, useSourceSetup } from '../store-llms';
export function OobaboogaSourceSetup(props: { sourceId: DModelSourceId }) {
// external state
const {
source, sourceLLMs, updateSetup, normSetup,
} = useSourceSetup<SourceSetupOobabooga>(props.sourceId, normalizeOobaboogaSetup);
// fetch models - the OpenAI way
const { isFetching, refetch, isError, error } = apiQuery.openai.listModels.useQuery({
access: normalizeOAISetup(normSetup),
}, {
enabled: false, //!hasModels && !!asValidURL(normSetup.oaiHost),
onSuccess: models => {
console.log('OobaboogaSourceSetup: models', models);
const llms = source ? models.map(model => oobaboogaModelToDLLM(model, source)).filter(model => !!model) : [];
useModelsStore.getState().addLLMs(llms);
},
staleTime: Infinity,
refetchOnMount: 'always',
});
return <Box sx={{ display: 'flex', flexDirection: 'column', gap: settingsGap }}>
<Typography>
You can use a running <Link href='https://github.com/oobabooga/text-generation-webui' target='_blank'>
text-generation-webui</Link> instance as a source for models.
Follow <Link href='https://github.com/enricoros/big-agi/blob/main/docs/local-llm-text-web-ui.md' target='_blank'>
the instructions</Link> to set up the server.
</Typography>
<FormControl orientation='horizontal' sx={{ flexWrap: 'wrap', justifyContent: 'space-between' }}>
<Box sx={{ minWidth: settingsCol1Width }}>
<FormLabel>
API Base
</FormLabel>
<FormHelperText sx={{ display: 'block' }}>
Excluding /v1
</FormHelperText>
</Box>
<Input
variant='outlined' placeholder='http://127.0.0.1:5001'
value={normSetup.oaiHost} onChange={event => updateSetup({ oaiHost: event.target.value })}
sx={{ flexGrow: 1 }}
/>
</FormControl>
<Box sx={{ display: 'flex', alignItems: 'end', justifyContent: 'space-between' }}>
<Button
variant='solid' color={isError ? 'warning' : 'primary'}
disabled={!(normSetup.oaiHost.length >= 7) || isFetching}
endDecorator={<SyncIcon />}
onClick={() => refetch()}
sx={{ minWidth: 120, ml: 'auto' }}
>
Models
</Button>
</Box>
{isError && <Alert variant='soft' color='warning' sx={{ mt: 1 }}><Typography>Issue: {error?.message || error?.toString() || 'unknown'}</Typography></Alert>}
</Box>;
}
const NotChatModels: string[] = [
'text-curie-001', 'text-davinci-002',
];
function oobaboogaModelToDLLM(model: OpenAI.Wire.Models.ModelDescription, source: DModelSource): (DLLM & { options: LLMOptionsOpenAI }) {
const label = model.id.replaceAll(/[_-]/g, ' ').split(' ').map(word => word[0].toUpperCase() + word.slice(1)).join(' ');
// TODO - figure out how to the context window size from Oobabooga
const contextTokens = 4096;
return {
id: `${source.id}-${model.id}`,
label,
created: model.created || Math.round(Date.now() / 1000),
description: 'Oobabooga model',
tags: [], // ['stream', 'chat'],
contextTokens,
hidden: NotChatModels.includes(model.id),
sId: source.id,
_source: source,
options: {
llmRef: model.id,
llmTemperature: 0.5,
llmResponseTokens: Math.round(contextTokens / 8),
},
};
}
@@ -0,0 +1,36 @@
import { ModelVendor } from '../llm.types';
import { OpenAILLMOptions } from '~/modules/llms/openai/OpenAILLMOptions';
import { openAICallChat, openAICallChatWithFunctions } from '~/modules/llms/openai/openai.client';
import { OobaboogaIcon } from './OobaboogaIcon';
import { OobaboogaSourceSetup } from './OobaboogaSourceSetup';
export const ModelVendorOoobabooga: ModelVendor = {
id: 'oobabooga',
name: 'Oobabooga (Alpha)',
rank: 15,
location: 'local',
instanceLimit: 1,
// components
Icon: OobaboogaIcon,
SourceSetupComponent: OobaboogaSourceSetup,
LLMOptionsComponent: OpenAILLMOptions,
// functions
callChat: openAICallChat,
callChatWithFunctions: openAICallChatWithFunctions,
};
export interface SourceSetupOobabooga {
oaiHost: string; // use OpenAI-compatible non-default hosts (full origin path)
}
export function normalizeOobaboogaSetup(partialSetup?: Partial<SourceSetupOobabooga>): SourceSetupOobabooga {
return {
oaiHost: '',
...partialSetup,
};
}
+3 -1
View File
@@ -1,5 +1,6 @@
import { DModelSource, DModelSourceId, ModelVendor, ModelVendorId } from './llm.types';
import { ModelVendorLocalAI } from './localai/localai.vendor';
import { ModelVendorOoobabooga } from './oobabooga/oobabooga.vendor';
import { ModelVendorOpenAI } from './openai/openai.vendor';
@@ -37,8 +38,9 @@ export const createDefaultModelSource = (otherSources: DModelSource[]): DModelSo
/// Internals ///
const MODEL_VENDOR_REGISTRY: Record<ModelVendorId, ModelVendor> = {
openai: ModelVendorOpenAI,
localai: ModelVendorLocalAI,
oobabooga: ModelVendorOoobabooga,
openai: ModelVendorOpenAI,
// azure_openai: { id: 'azure_openai', name: 'Azure OpenAI', instanceLimit: 1, location: 'cloud', rank: 30 },
// google_vertex: { id: 'google_vertex', name: 'Google Vertex', instanceLimit: 1, location: 'cloud', rank: 40 },
// anthropic: { id: 'anthropic', name: 'Anthropic', instanceLimit: 1, location: 'cloud', rank: 50 },