diff --git a/src/common/stores/llms/llms.types.ts b/src/common/stores/llms/llms.types.ts index e7a1fb0fb..addca7cd5 100644 --- a/src/common/stores/llms/llms.types.ts +++ b/src/common/stores/llms/llms.types.ts @@ -168,6 +168,7 @@ export type DModelInterfaceV1 = // FIXME: keep this in sync with the server side on modules/llms/server/llm.server.types.ts export const LLM_IF_OAI_Chat: DModelInterfaceV1 = 'oai-chat'; export const LLM_IF_OAI_Fn: DModelInterfaceV1 = 'oai-chat-fn'; +/** @deprecated we don't se this one anymore 2026-04-19; suspended until we have a reason or per-model continuos validation of this */ export const LLM_IF_OAI_Json: DModelInterfaceV1 = 'oai-chat-json'; // for Structured Outputs (or JSON mode at worst) export const LLM_IF_ANT_ToolsSearch: DModelInterfaceV1 = 'ant-tools-search'; // export const LLM_IF_OAI_JsonSchema: ... future? diff --git a/src/modules/llms/server/llm.server.types.ts b/src/modules/llms/server/llm.server.types.ts index ba61c45ce..dcf43b735 100644 --- a/src/modules/llms/server/llm.server.types.ts +++ b/src/modules/llms/server/llm.server.types.ts @@ -139,7 +139,7 @@ export const ModelDescription_schema = z.object({ updated: z.int().optional(), description: z.string(), contextWindow: z.int().nullable(), - interfaces: z.array(z.union([z.enum(LLMS_ALL_INTERFACES), z.string()])), // backward compatibility: to not Break client-side interface parsing on newer server + interfaces: z.array(z.enum(LLMS_ALL_INTERFACES).or(z.string())), // backward compatibility: to not Break client-side interface parsing on newer server parameterSpecs: z.array(ModelParameterSpec_schema).optional(), maxCompletionTokens: z.int().optional(), // initial parameter value for 'llmResponseTokens' // rateLimits: rateLimitsSchema.optional(),