Gemini: acknowledge the new capability to createCachedContent. Fixes #565

This commit is contained in:
Enrico Ros
2024-06-10 23:56:02 -07:00
parent 11c41e7381
commit b4d8e39d56
2 changed files with 8 additions and 0 deletions
@@ -3,6 +3,10 @@ import type { ModelDescriptionSchema } from '../llm.server.types';
import { LLM_IF_OAI_Chat, LLM_IF_OAI_Json, LLM_IF_OAI_Vision } from '../../store-llms';
// dev options
const DEV_DEBUG_GEMINI_MODELS = false;
// supported interfaces
const geminiChatInterfaces: GeminiModelSchema['supportedGenerationMethods'] = ['generateContent'];
@@ -175,6 +179,9 @@ export function geminiSortModels(a: ModelDescriptionSchema, b: ModelDescriptionS
export function geminiModelToModelDescription(geminiModel: GeminiModelSchema): ModelDescriptionSchema {
const { description, displayName, name: modelId, supportedGenerationMethods } = geminiModel;
if (DEV_DEBUG_GEMINI_MODELS)
console.log('geminiModelToModelDescription', geminiModel);
// find known manual mapping
const knownModel = _knownGeminiModels.find(m => m.id === modelId);
@@ -18,6 +18,7 @@ const geminiModelSchema = z.object({
inputTokenLimit: z.number().int().min(1),
outputTokenLimit: z.number().int().min(1),
supportedGenerationMethods: z.array(z.enum([
'createCachedContent', // appeared on 2024-06-10, see https://github.com/enricoros/big-AGI/issues/565
'countMessageTokens',
'countTextTokens',
'countTokens',