From 1f10905a03a427872a5c686a3524369b5bd02fd7 Mon Sep 17 00:00:00 2001 From: Enrico Ros Date: Tue, 7 May 2024 00:47:11 -0700 Subject: [PATCH] Fix model temperaturs --- src/modules/llms/server/ollama/ollama.router.ts | 4 ++-- src/modules/llms/server/openai/openai.router.ts | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/modules/llms/server/ollama/ollama.router.ts b/src/modules/llms/server/ollama/ollama.router.ts index 64958504f..c275b3ae9 100644 --- a/src/modules/llms/server/ollama/ollama.router.ts +++ b/src/modules/llms/server/ollama/ollama.router.ts @@ -44,7 +44,7 @@ export const ollamaChatCompletionPayload = (model: OpenAIModelSchema, history: O model: model.id, messages: history, options: { - ...(model.temperature && { temperature: model.temperature }), + ...(model.temperature !== undefined && { temperature: model.temperature }), }, // n: ... // functions: ... @@ -78,7 +78,7 @@ export function ollamaCompletionPayload(model: OpenAIModelSchema, history: OpenA model: model.id, prompt, options: { - ...(model.temperature && { temperature: model.temperature }), + ...(model.temperature !== undefined && { temperature: model.temperature }), }, ...(systemPrompt && { system: systemPrompt }), stream, diff --git a/src/modules/llms/server/openai/openai.router.ts b/src/modules/llms/server/openai/openai.router.ts index 83bcf9dd5..fedc124a3 100644 --- a/src/modules/llms/server/openai/openai.router.ts +++ b/src/modules/llms/server/openai/openai.router.ts @@ -616,7 +616,7 @@ export function openAIChatCompletionPayload(dialect: OpenAIDialects, model: Open model: model.id, messages: history, ...(functions && { functions: functions, function_call: forceFunctionName ? { name: forceFunctionName } : 'auto' }), - ...(model.temperature && { temperature: model.temperature }), + ...(model.temperature !== undefined && { temperature: model.temperature }), ...(model.maxTokens && { max_tokens: model.maxTokens }), ...(n > 1 && { n }), stream,