Fix model temperaturs

This commit is contained in:
Enrico Ros
2024-05-07 00:47:11 -07:00
parent 88762db484
commit 1f10905a03
2 changed files with 3 additions and 3 deletions
@@ -44,7 +44,7 @@ export const ollamaChatCompletionPayload = (model: OpenAIModelSchema, history: O
model: model.id,
messages: history,
options: {
...(model.temperature && { temperature: model.temperature }),
...(model.temperature !== undefined && { temperature: model.temperature }),
},
// n: ...
// functions: ...
@@ -78,7 +78,7 @@ export function ollamaCompletionPayload(model: OpenAIModelSchema, history: OpenA
model: model.id,
prompt,
options: {
...(model.temperature && { temperature: model.temperature }),
...(model.temperature !== undefined && { temperature: model.temperature }),
},
...(systemPrompt && { system: systemPrompt }),
stream,
@@ -616,7 +616,7 @@ export function openAIChatCompletionPayload(dialect: OpenAIDialects, model: Open
model: model.id,
messages: history,
...(functions && { functions: functions, function_call: forceFunctionName ? { name: forceFunctionName } : 'auto' }),
...(model.temperature && { temperature: model.temperature }),
...(model.temperature !== undefined && { temperature: model.temperature }),
...(model.maxTokens && { max_tokens: model.maxTokens }),
...(n > 1 && { n }),
stream,