LLMs: extract LocalAI, LMStudio

This commit is contained in:
Enrico Ros
2025-11-17 01:42:26 -08:00
parent 66aa8ed177
commit ef0ff55f1f
4 changed files with 94 additions and 87 deletions
@@ -0,0 +1,26 @@
import { LLM_IF_OAI_Chat } from '~/common/stores/llms/llms.types';
import type { ModelDescriptionSchema } from '../../llm.server.types';
import { fromManualMapping } from './models.data';
export function lmStudioModelToModelDescription(modelId: string): ModelDescriptionSchema {
// LM Studio model ID's are the file names of the model files
function getFileName(filePath: string): string {
const normalizedPath = filePath.replace(/\\/g, '/');
return normalizedPath.split('/').pop() || '';
}
return fromManualMapping([], modelId, undefined, undefined, {
idPrefix: modelId,
label: getFileName(modelId)
.replace('.gguf', '')
.replace('.bin', ''),
// .replaceAll('-', ' '),
description: `Unknown LM Studio model. File: ${modelId}`,
contextWindow: null, // 'not provided'
interfaces: [LLM_IF_OAI_Chat], // assume..
chatPrice: { input: 'free', output: 'free' },
});
}
@@ -0,0 +1,65 @@
import { LLM_IF_OAI_Chat, LLM_IF_OAI_Fn, LLM_IF_OAI_Reasoning, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
import { capitalizeFirstLetter } from '~/common/util/textUtils';
import type { ModelDescriptionSchema } from '../../llm.server.types';
import { fromManualMapping, type ManualMappings } from './models.data';
// [LocalAI]
const _knownLocalAIChatModels: ManualMappings = [];
const _knownLocalAIPrice = { input: 'free', output: 'free' } as const;
const _hideLocalAIModels = [
'jina-reranker-v1-base-en', // vector search
'stablediffusion', // text-to-image
'text-embedding-ada-002', // embedding generator
'tts-1', // text-to-speech
'whisper-1', // speech-to-text
];
export function localAIModelSortFn(a: ModelDescriptionSchema, b: ModelDescriptionSchema): number {
// hidden to the bottom
if (a.hidden && !b.hidden) return 1;
if (!a.hidden && b.hidden) return -1;
// keep the order from the API
return 0;
}
export function localAIModelToModelDescription(modelId: string): ModelDescriptionSchema {
// heuristics to extract a label from the model ID
const label = modelId
.replace('.gguf', '')
.replace('ggml-', '')
.replace('.bin', '')
.replaceAll('-', ' ')
.replace(' Q4_K_M', ' (Q4_K_M)')
.replace(' F16', ' (F16)')
.split(' ')
.map(capitalizeFirstLetter)
.join(' ');
const description = `LocalAI model. File: ${modelId}`;
// very dull heuristics
const interfaces = [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn];
if (modelId.includes('vision') || modelId.includes('llava'))
interfaces.push(LLM_IF_OAI_Vision);
if (modelId.includes('r1'))
interfaces.push(LLM_IF_OAI_Reasoning);
return fromManualMapping(_knownLocalAIChatModels, modelId, undefined, undefined, {
idPrefix: modelId,
label,
description,
contextWindow: null, // 'not provided'
interfaces,
// parameterSpecs
// maxCompletionTokens
// trainingDataCutoff
// benchmark
chatPrice: _knownLocalAIPrice,
hidden: _hideLocalAIModels.includes(modelId),
});
}
@@ -1,92 +1,7 @@
import { LLM_IF_OAI_Chat, LLM_IF_OAI_Fn, LLM_IF_OAI_Reasoning, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
import { capitalizeFirstLetter } from '~/common/util/textUtils';
import type { ModelDescriptionSchema } from '../../llm.server.types';
// [LM Studio]
export function lmStudioModelToModelDescription(modelId: string): ModelDescriptionSchema {
// LM Studio model ID's are the file names of the model files
function getFileName(filePath: string): string {
const normalizedPath = filePath.replace(/\\/g, '/');
return normalizedPath.split('/').pop() || '';
}
return fromManualMapping([], modelId, undefined, undefined, {
idPrefix: modelId,
label: getFileName(modelId)
.replace('.gguf', '')
.replace('.bin', ''),
// .replaceAll('-', ' '),
description: `Unknown LM Studio model. File: ${modelId}`,
contextWindow: null, // 'not provided'
interfaces: [LLM_IF_OAI_Chat], // assume..
chatPrice: { input: 'free', output: 'free' },
});
}
// [LocalAI]
const _knownLocalAIChatModels: ManualMappings = [];
const _knownLocalAIPrice = { input: 'free', output: 'free' } as const;
const _hideLocalAIModels = [
'jina-reranker-v1-base-en', // vector search
'stablediffusion', // text-to-image
'text-embedding-ada-002', // embedding generator
'tts-1', // text-to-speech
'whisper-1', // speech-to-text
];
export function localAIModelSortFn(a: ModelDescriptionSchema, b: ModelDescriptionSchema): number {
// hidden to the bottom
if (a.hidden && !b.hidden) return 1;
if (!a.hidden && b.hidden) return -1;
// keep the order from the API
return 0;
}
export function localAIModelToModelDescription(modelId: string): ModelDescriptionSchema {
// heurisics to extract a label from the model ID
const label = modelId
.replace('.gguf', '')
.replace('ggml-', '')
.replace('.bin', '')
.replaceAll('-', ' ')
.replace(' Q4_K_M', ' (Q4_K_M)')
.replace(' F16', ' (F16)')
.split(' ')
.map(capitalizeFirstLetter)
.join(' ');
const description = `LocalAI model. File: ${modelId}`;
// very dull heuristics
const interfaces = [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn];
if (modelId.includes('vision') || modelId.includes('llava'))
interfaces.push(LLM_IF_OAI_Vision);
if (modelId.includes('r1'))
interfaces.push(LLM_IF_OAI_Reasoning);
return fromManualMapping(_knownLocalAIChatModels, modelId, undefined, undefined, {
idPrefix: modelId,
label,
description,
contextWindow: null, // 'not provided'
interfaces,
// parameterSpecs
// maxCompletionTokens
// trainingDataCutoff
// benchmark
chatPrice: _knownLocalAIPrice,
hidden: _hideLocalAIModels.includes(modelId),
});
}
// Helpers
// -- Manual model mappings: types and helper --
export type ManualMappings = (KnownModel | KnownLink)[];
@@ -21,7 +21,8 @@ import { deepseekModelFilter, deepseekModelSort, deepseekModelToModelDescription
import { fastAPIHeuristic, fastAPIModels } from './models/fastapi.models';
import { fireworksAIHeuristic, fireworksAIModelsToModelDescriptions } from './models/fireworksai.models';
import { groqModelFilter, groqModelSortFn, groqModelToModelDescription } from './models/groq.models';
import { lmStudioModelToModelDescription, localAIModelSortFn, localAIModelToModelDescription } from './models/models.data';
import { lmStudioModelToModelDescription } from './models/lmstudio.models';
import { localAIModelSortFn, localAIModelToModelDescription } from './models/localai.models';
import { mistralModels } from './models/mistral.models';
import { moonshotModelFilter, moonshotModelSortFn, moonshotModelToModelDescription } from './models/moonshot.models';
import { openaiDevCheckForModelsOverlap_DEV, openAIInjectVariants, openAIModelFilter, openAIModelToModelDescription, openAISortModels } from './models/openai.models';