LLMs: getTransportAccess

This commit is contained in:
Enrico Ros
2023-12-12 14:45:55 -08:00
parent fbeb604b26
commit d0dcb2ac02
17 changed files with 31 additions and 30 deletions
+2 -2
View File
@@ -272,7 +272,7 @@ export function useChatLLM() {
/**
* Source-specific read/write - great time saver
*/
export function useSourceSetup<TSourceSetup, TAccess>(sourceId: DModelSourceId, getAccess: (partialSetup?: Partial<TSourceSetup>) => TAccess) {
export function useSourceSetup<TSourceSetup, TAccess>(sourceId: DModelSourceId, getTransportAccess: (partialSetup?: Partial<TSourceSetup>) => TAccess) {
// invalidate when the setup changes
const { updateSourceSetup, ...rest } = useModelsStore(state => {
const source: DModelSource<TSourceSetup> | null = state.sources.find(source => source.id === sourceId) ?? null;
@@ -281,7 +281,7 @@ export function useSourceSetup<TSourceSetup, TAccess>(sourceId: DModelSourceId,
source,
sourceLLMs,
sourceHasLLMs: !!sourceLLMs.length,
access: getAccess(source?.setup),
access: getTransportAccess(source?.setup),
updateSourceSetup: state.updateSourceSetup,
};
}, shallow);
+1 -1
View File
@@ -27,7 +27,7 @@ export async function streamChat(
onUpdate: (update: Partial<{ text: string, typing: boolean, originLLM: string }>, done: boolean) => void,
): Promise<void> {
const { llm, vendor } = findVendorForLlmOrThrow(llmId);
const access = vendor.getAccess(llm._source.setup) as ChatStreamInputSchema['access'];
const access = vendor.getTransportAccess(llm._source.setup) as ChatStreamInputSchema['access'];
return await vendorStreamChat(access, llm, messages, abortSignal, onUpdate);
}
+2 -1
View File
@@ -24,7 +24,8 @@ export interface IModelVendor<TSourceSetup = unknown, TLLMOptions = unknown, TAc
// functions
readonly initializeSetup?: () => TSourceSetup;
getAccess(setup?: Partial<TSourceSetup>): TAccess;
// get a TAccess object, translating from TSourceSetup
getTransportAccess(setup?: Partial<TSourceSetup>): TAccess;
callChatGenerate(llm: TDLLM, messages: VChatMessageIn[], maxTokens?: number): Promise<VChatMessageOut>;
@@ -23,7 +23,7 @@ export function AnthropicSourceSetup(props: { sourceId: DModelSourceId }) {
// external state
const { source, sourceHasLLMs, access, updateSetup } =
useSourceSetup(props.sourceId, ModelVendorAnthropic.getAccess);
useSourceSetup(props.sourceId, ModelVendorAnthropic.getTransportAccess);
// derived state
const { anthropicKey, anthropicHost, heliconeKey } = access;
+2 -2
View File
@@ -36,14 +36,14 @@ export const ModelVendorAnthropic: IModelVendor<SourceSetupAnthropic, LLMOptions
LLMOptionsComponent: OpenAILLMOptions,
// functions
getAccess: (partialSetup): AnthropicAccessSchema => ({
getTransportAccess: (partialSetup): AnthropicAccessSchema => ({
dialect: 'anthropic',
anthropicKey: partialSetup?.anthropicKey || '',
anthropicHost: partialSetup?.anthropicHost || null,
heliconeKey: partialSetup?.heliconeKey || null,
}),
callChatGenerate(llm, messages: VChatMessageIn[], maxTokens?: number): Promise<VChatMessageOut> {
return anthropicCallChatGenerate(this.getAccess(llm._source.setup), llm.options, messages, /*null, null,*/ maxTokens);
return anthropicCallChatGenerate(this.getTransportAccess(llm._source.setup), llm.options, messages, /*null, null,*/ maxTokens);
},
callChatGenerateWF(): Promise<VChatMessageOrFunctionCallOut> {
throw new Error('Anthropic does not support "Functions" yet');
+1 -1
View File
@@ -18,7 +18,7 @@ export function AzureSourceSetup(props: { sourceId: DModelSourceId }) {
// external state
const { source, sourceHasLLMs, access, updateSetup } =
useSourceSetup(props.sourceId, ModelVendorAzure.getAccess);
useSourceSetup(props.sourceId, ModelVendorAzure.getTransportAccess);
// derived state
const { oaiKey: azureKey, oaiHost: azureEndpoint } = access;
+3 -3
View File
@@ -50,7 +50,7 @@ export const ModelVendorAzure: IModelVendor<SourceSetupAzure, LLMOptionsOpenAI,
LLMOptionsComponent: OpenAILLMOptions,
// functions
getAccess: (partialSetup): OpenAIAccessSchema => ({
getTransportAccess: (partialSetup): OpenAIAccessSchema => ({
dialect: 'azure',
oaiKey: partialSetup?.azureKey || '',
oaiOrg: '',
@@ -59,9 +59,9 @@ export const ModelVendorAzure: IModelVendor<SourceSetupAzure, LLMOptionsOpenAI,
moderationCheck: false,
}),
callChatGenerate(llm, messages: VChatMessageIn[], maxTokens?: number): Promise<VChatMessageOut> {
return openAICallChatGenerate(this.getAccess(llm._source.setup), llm.options, messages, null, null, maxTokens);
return openAICallChatGenerate(this.getTransportAccess(llm._source.setup), llm.options, messages, null, null, maxTokens);
},
callChatGenerateWF(llm, messages: VChatMessageIn[], functions: VChatFunctionIn[] | null, forceFunctionName: string | null, maxTokens?: number): Promise<VChatMessageOrFunctionCallOut> {
return openAICallChatGenerate(this.getAccess(llm._source.setup), llm.options, messages, functions, forceFunctionName, maxTokens);
return openAICallChatGenerate(this.getTransportAccess(llm._source.setup), llm.options, messages, functions, forceFunctionName, maxTokens);
},
};
+1 -1
View File
@@ -19,7 +19,7 @@ export function LocalAISourceSetup(props: { sourceId: DModelSourceId }) {
// external state
const { source, access, updateSetup } =
useSourceSetup(props.sourceId, ModelVendorLocalAI.getAccess);
useSourceSetup(props.sourceId, ModelVendorLocalAI.getTransportAccess);
// derived state
const { oaiHost } = access;
+3 -3
View File
@@ -30,7 +30,7 @@ export const ModelVendorLocalAI: IModelVendor<SourceSetupLocalAI, LLMOptionsOpen
initializeSetup: () => ({
oaiHost: 'http://localhost:8080',
}),
getAccess: (partialSetup) => ({
getTransportAccess: (partialSetup) => ({
dialect: 'localai',
oaiKey: '',
oaiOrg: '',
@@ -39,9 +39,9 @@ export const ModelVendorLocalAI: IModelVendor<SourceSetupLocalAI, LLMOptionsOpen
moderationCheck: false,
}),
callChatGenerate(llm, messages: VChatMessageIn[], maxTokens?: number): Promise<VChatMessageOut> {
return openAICallChatGenerate(this.getAccess(llm._source.setup), llm.options, messages, null, null, maxTokens);
return openAICallChatGenerate(this.getTransportAccess(llm._source.setup), llm.options, messages, null, null, maxTokens);
},
callChatGenerateWF(llm, messages: VChatMessageIn[], functions: VChatFunctionIn[] | null, forceFunctionName: string | null, maxTokens?: number): Promise<VChatMessageOrFunctionCallOut> {
return openAICallChatGenerate(this.getAccess(llm._source.setup), llm.options, messages, functions, forceFunctionName, maxTokens);
return openAICallChatGenerate(this.getTransportAccess(llm._source.setup), llm.options, messages, functions, forceFunctionName, maxTokens);
},
};
+1 -1
View File
@@ -22,7 +22,7 @@ export function OllamaSourceSetup(props: { sourceId: DModelSourceId }) {
// external state
const { source, access, updateSetup } =
useSourceSetup(props.sourceId, ModelVendorOllama.getAccess);
useSourceSetup(props.sourceId, ModelVendorOllama.getTransportAccess);
// derived state
const { ollamaHost } = access;
+2 -2
View File
@@ -32,12 +32,12 @@ export const ModelVendorOllama: IModelVendor<SourceSetupOllama, LLMOptionsOpenAI
LLMOptionsComponent: OpenAILLMOptions,
// functions
getAccess: (partialSetup): OllamaAccessSchema => ({
getTransportAccess: (partialSetup): OllamaAccessSchema => ({
dialect: 'ollama',
ollamaHost: partialSetup?.ollamaHost || '',
}),
callChatGenerate(llm, messages: VChatMessageIn[], maxTokens?: number): Promise<VChatMessageOut> {
return ollamaCallChatGenerate(this.getAccess(llm._source.setup), llm.options, messages, maxTokens);
return ollamaCallChatGenerate(this.getTransportAccess(llm._source.setup), llm.options, messages, maxTokens);
},
callChatGenerateWF(): Promise<VChatMessageOrFunctionCallOut> {
throw new Error('Ollama does not support "Functions" yet');
@@ -18,7 +18,7 @@ export function OobaboogaSourceSetup(props: { sourceId: DModelSourceId }) {
// external state
const { source, sourceHasLLMs, access, updateSetup } =
useSourceSetup(props.sourceId, ModelVendorOoobabooga.getAccess);
useSourceSetup(props.sourceId, ModelVendorOoobabooga.getTransportAccess);
// derived state
const { oaiHost } = access;
+3 -3
View File
@@ -30,7 +30,7 @@ export const ModelVendorOoobabooga: IModelVendor<SourceSetupOobabooga, LLMOption
initializeSetup: (): SourceSetupOobabooga => ({
oaiHost: 'http://127.0.0.1:5000',
}),
getAccess: (partialSetup): OpenAIAccessSchema => ({
getTransportAccess: (partialSetup): OpenAIAccessSchema => ({
dialect: 'oobabooga',
oaiKey: '',
oaiOrg: '',
@@ -39,9 +39,9 @@ export const ModelVendorOoobabooga: IModelVendor<SourceSetupOobabooga, LLMOption
moderationCheck: false,
}),
callChatGenerate(llm, messages: VChatMessageIn[], maxTokens?: number): Promise<VChatMessageOut> {
return openAICallChatGenerate(this.getAccess(llm._source.setup), llm.options, messages, null, null, maxTokens);
return openAICallChatGenerate(this.getTransportAccess(llm._source.setup), llm.options, messages, null, null, maxTokens);
},
callChatGenerateWF(llm, messages: VChatMessageIn[], functions: VChatFunctionIn[] | null, forceFunctionName: string | null, maxTokens?: number): Promise<VChatMessageOrFunctionCallOut> {
return openAICallChatGenerate(this.getAccess(llm._source.setup), llm.options, messages, functions, forceFunctionName, maxTokens);
return openAICallChatGenerate(this.getTransportAccess(llm._source.setup), llm.options, messages, functions, forceFunctionName, maxTokens);
},
};
+1 -1
View File
@@ -29,7 +29,7 @@ export function OpenAISourceSetup(props: { sourceId: DModelSourceId }) {
// external state
const { source, sourceHasLLMs, access, updateSetup } =
useSourceSetup(props.sourceId, ModelVendorOpenAI.getAccess);
useSourceSetup(props.sourceId, ModelVendorOpenAI.getTransportAccess);
// derived state
const { oaiKey, oaiOrg, oaiHost, heliKey, moderationCheck } = access;
+3 -3
View File
@@ -42,7 +42,7 @@ export const ModelVendorOpenAI: IModelVendor<SourceSetupOpenAI, LLMOptionsOpenAI
LLMOptionsComponent: OpenAILLMOptions,
// functions
getAccess: (partialSetup): OpenAIAccessSchema => ({
getTransportAccess: (partialSetup): OpenAIAccessSchema => ({
dialect: 'openai',
oaiKey: '',
oaiOrg: '',
@@ -52,11 +52,11 @@ export const ModelVendorOpenAI: IModelVendor<SourceSetupOpenAI, LLMOptionsOpenAI
...partialSetup,
}),
callChatGenerate(llm, messages: VChatMessageIn[], maxTokens?: number): Promise<VChatMessageOut> {
const access = this.getAccess(llm._source.setup);
const access = this.getTransportAccess(llm._source.setup);
return openAICallChatGenerate(access, llm.options, messages, null, null, maxTokens);
},
callChatGenerateWF(llm, messages: VChatMessageIn[], functions: VChatFunctionIn[] | null, forceFunctionName: string | null, maxTokens?: number): Promise<VChatMessageOrFunctionCallOut> {
const access = this.getAccess(llm._source.setup);
const access = this.getTransportAccess(llm._source.setup);
return openAICallChatGenerate(access, llm.options, messages, functions, forceFunctionName, maxTokens);
},
};
@@ -19,7 +19,7 @@ export function OpenRouterSourceSetup(props: { sourceId: DModelSourceId }) {
// external state
const { source, sourceHasLLMs, access, updateSetup } =
useSourceSetup(props.sourceId, ModelVendorOpenRouter.getAccess);
useSourceSetup(props.sourceId, ModelVendorOpenRouter.getTransportAccess);
// derived state
const { oaiKey } = access;
+3 -3
View File
@@ -51,7 +51,7 @@ export const ModelVendorOpenRouter: IModelVendor<SourceSetupOpenRouter, LLMOptio
oaiHost: 'https://openrouter.ai/api',
oaiKey: '',
}),
getAccess: (partialSetup): OpenAIAccessSchema => ({
getTransportAccess: (partialSetup): OpenAIAccessSchema => ({
dialect: 'openrouter',
oaiKey: partialSetup?.oaiKey || '',
oaiOrg: '',
@@ -60,9 +60,9 @@ export const ModelVendorOpenRouter: IModelVendor<SourceSetupOpenRouter, LLMOptio
moderationCheck: false,
}),
callChatGenerate(llm, messages: VChatMessageIn[], maxTokens?: number): Promise<VChatMessageOut> {
return openAICallChatGenerate(this.getAccess(llm._source.setup), llm.options, messages, null, null, maxTokens);
return openAICallChatGenerate(this.getTransportAccess(llm._source.setup), llm.options, messages, null, null, maxTokens);
},
callChatGenerateWF(llm, messages: VChatMessageIn[], functions: VChatFunctionIn[] | null, forceFunctionName: string | null, maxTokens?: number): Promise<VChatMessageOrFunctionCallOut> {
return openAICallChatGenerate(this.getAccess(llm._source.setup), llm.options, messages, functions, forceFunctionName, maxTokens);
return openAICallChatGenerate(this.getTransportAccess(llm._source.setup), llm.options, messages, functions, forceFunctionName, maxTokens);
},
};