diff --git a/src/modules/llms/server/openai/models/lmstudio.models.ts b/src/modules/llms/server/openai/models/lmstudio.models.ts new file mode 100644 index 000000000..7943c23b3 --- /dev/null +++ b/src/modules/llms/server/openai/models/lmstudio.models.ts @@ -0,0 +1,26 @@ +import { LLM_IF_OAI_Chat } from '~/common/stores/llms/llms.types'; + +import type { ModelDescriptionSchema } from '../../llm.server.types'; +import { fromManualMapping } from './models.data'; + + +export function lmStudioModelToModelDescription(modelId: string): ModelDescriptionSchema { + + // LM Studio model ID's are the file names of the model files + function getFileName(filePath: string): string { + const normalizedPath = filePath.replace(/\\/g, '/'); + return normalizedPath.split('/').pop() || ''; + } + + return fromManualMapping([], modelId, undefined, undefined, { + idPrefix: modelId, + label: getFileName(modelId) + .replace('.gguf', '') + .replace('.bin', ''), + // .replaceAll('-', ' '), + description: `Unknown LM Studio model. File: ${modelId}`, + contextWindow: null, // 'not provided' + interfaces: [LLM_IF_OAI_Chat], // assume.. + chatPrice: { input: 'free', output: 'free' }, + }); +} diff --git a/src/modules/llms/server/openai/models/localai.models.ts b/src/modules/llms/server/openai/models/localai.models.ts new file mode 100644 index 000000000..32bc9d1d3 --- /dev/null +++ b/src/modules/llms/server/openai/models/localai.models.ts @@ -0,0 +1,65 @@ +import { LLM_IF_OAI_Chat, LLM_IF_OAI_Fn, LLM_IF_OAI_Reasoning, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types'; +import { capitalizeFirstLetter } from '~/common/util/textUtils'; + +import type { ModelDescriptionSchema } from '../../llm.server.types'; +import { fromManualMapping, type ManualMappings } from './models.data'; + + +// [LocalAI] +const _knownLocalAIChatModels: ManualMappings = []; +const _knownLocalAIPrice = { input: 'free', output: 'free' } as const; +const _hideLocalAIModels = [ + 'jina-reranker-v1-base-en', // vector search + 'stablediffusion', // text-to-image + 'text-embedding-ada-002', // embedding generator + 'tts-1', // text-to-speech + 'whisper-1', // speech-to-text +]; + +export function localAIModelSortFn(a: ModelDescriptionSchema, b: ModelDescriptionSchema): number { + // hidden to the bottom + if (a.hidden && !b.hidden) return 1; + if (!a.hidden && b.hidden) return -1; + + // keep the order from the API + return 0; +} + + +export function localAIModelToModelDescription(modelId: string): ModelDescriptionSchema { + + // heuristics to extract a label from the model ID + const label = modelId + .replace('.gguf', '') + .replace('ggml-', '') + .replace('.bin', '') + .replaceAll('-', ' ') + .replace(' Q4_K_M', ' (Q4_K_M)') + .replace(' F16', ' (F16)') + .split(' ') + .map(capitalizeFirstLetter) + .join(' '); + + const description = `LocalAI model. File: ${modelId}`; + + // very dull heuristics + const interfaces = [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn]; + if (modelId.includes('vision') || modelId.includes('llava')) + interfaces.push(LLM_IF_OAI_Vision); + if (modelId.includes('r1')) + interfaces.push(LLM_IF_OAI_Reasoning); + + return fromManualMapping(_knownLocalAIChatModels, modelId, undefined, undefined, { + idPrefix: modelId, + label, + description, + contextWindow: null, // 'not provided' + interfaces, + // parameterSpecs + // maxCompletionTokens + // trainingDataCutoff + // benchmark + chatPrice: _knownLocalAIPrice, + hidden: _hideLocalAIModels.includes(modelId), + }); +} diff --git a/src/modules/llms/server/openai/models/models.data.ts b/src/modules/llms/server/openai/models/models.data.ts index 60e2c1608..f52bf9403 100644 --- a/src/modules/llms/server/openai/models/models.data.ts +++ b/src/modules/llms/server/openai/models/models.data.ts @@ -1,92 +1,7 @@ -import { LLM_IF_OAI_Chat, LLM_IF_OAI_Fn, LLM_IF_OAI_Reasoning, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types'; -import { capitalizeFirstLetter } from '~/common/util/textUtils'; - import type { ModelDescriptionSchema } from '../../llm.server.types'; -// [LM Studio] -export function lmStudioModelToModelDescription(modelId: string): ModelDescriptionSchema { - - // LM Studio model ID's are the file names of the model files - function getFileName(filePath: string): string { - const normalizedPath = filePath.replace(/\\/g, '/'); - return normalizedPath.split('/').pop() || ''; - } - - return fromManualMapping([], modelId, undefined, undefined, { - idPrefix: modelId, - label: getFileName(modelId) - .replace('.gguf', '') - .replace('.bin', ''), - // .replaceAll('-', ' '), - description: `Unknown LM Studio model. File: ${modelId}`, - contextWindow: null, // 'not provided' - interfaces: [LLM_IF_OAI_Chat], // assume.. - chatPrice: { input: 'free', output: 'free' }, - }); -} - - -// [LocalAI] -const _knownLocalAIChatModels: ManualMappings = []; -const _knownLocalAIPrice = { input: 'free', output: 'free' } as const; -const _hideLocalAIModels = [ - 'jina-reranker-v1-base-en', // vector search - 'stablediffusion', // text-to-image - 'text-embedding-ada-002', // embedding generator - 'tts-1', // text-to-speech - 'whisper-1', // speech-to-text -]; - -export function localAIModelSortFn(a: ModelDescriptionSchema, b: ModelDescriptionSchema): number { - // hidden to the bottom - if (a.hidden && !b.hidden) return 1; - if (!a.hidden && b.hidden) return -1; - - // keep the order from the API - return 0; -} - - -export function localAIModelToModelDescription(modelId: string): ModelDescriptionSchema { - - // heurisics to extract a label from the model ID - const label = modelId - .replace('.gguf', '') - .replace('ggml-', '') - .replace('.bin', '') - .replaceAll('-', ' ') - .replace(' Q4_K_M', ' (Q4_K_M)') - .replace(' F16', ' (F16)') - .split(' ') - .map(capitalizeFirstLetter) - .join(' '); - - const description = `LocalAI model. File: ${modelId}`; - - // very dull heuristics - const interfaces = [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn]; - if (modelId.includes('vision') || modelId.includes('llava')) - interfaces.push(LLM_IF_OAI_Vision); - if (modelId.includes('r1')) - interfaces.push(LLM_IF_OAI_Reasoning); - - return fromManualMapping(_knownLocalAIChatModels, modelId, undefined, undefined, { - idPrefix: modelId, - label, - description, - contextWindow: null, // 'not provided' - interfaces, - // parameterSpecs - // maxCompletionTokens - // trainingDataCutoff - // benchmark - chatPrice: _knownLocalAIPrice, - hidden: _hideLocalAIModels.includes(modelId), - }); -} - -// Helpers +// -- Manual model mappings: types and helper -- export type ManualMappings = (KnownModel | KnownLink)[]; diff --git a/src/modules/llms/server/openai/openai.router.ts b/src/modules/llms/server/openai/openai.router.ts index 30c762034..68ea5936d 100644 --- a/src/modules/llms/server/openai/openai.router.ts +++ b/src/modules/llms/server/openai/openai.router.ts @@ -21,7 +21,8 @@ import { deepseekModelFilter, deepseekModelSort, deepseekModelToModelDescription import { fastAPIHeuristic, fastAPIModels } from './models/fastapi.models'; import { fireworksAIHeuristic, fireworksAIModelsToModelDescriptions } from './models/fireworksai.models'; import { groqModelFilter, groqModelSortFn, groqModelToModelDescription } from './models/groq.models'; -import { lmStudioModelToModelDescription, localAIModelSortFn, localAIModelToModelDescription } from './models/models.data'; +import { lmStudioModelToModelDescription } from './models/lmstudio.models'; +import { localAIModelSortFn, localAIModelToModelDescription } from './models/localai.models'; import { mistralModels } from './models/mistral.models'; import { moonshotModelFilter, moonshotModelSortFn, moonshotModelToModelDescription } from './models/moonshot.models'; import { openaiDevCheckForModelsOverlap_DEV, openAIInjectVariants, openAIModelFilter, openAIModelToModelDescription, openAISortModels } from './models/openai.models';