diff --git a/src/modules/aix/server/dispatch/chatGenerate/adapters/anthropic.messageCreate.ts b/src/modules/aix/server/dispatch/chatGenerate/adapters/anthropic.messageCreate.ts index a47ed4c54..3b35ab0d4 100644 --- a/src/modules/aix/server/dispatch/chatGenerate/adapters/anthropic.messageCreate.ts +++ b/src/modules/aix/server/dispatch/chatGenerate/adapters/anthropic.messageCreate.ts @@ -88,6 +88,12 @@ export function aixToAnthropicMessageCreate(model: AixAPI_Model, chatGenerate: A // top_p: undefined, }; + // Top-P instead of temperature + if (model.topP !== undefined) { + payload.top_p = model.topP; + delete payload.temperature + } + // Preemptive error detection with server-side payload validation before sending it upstream const validated = AnthropicWire_API_Message_Create.Request_schema.safeParse(payload); if (!validated.success) { diff --git a/src/modules/aix/server/dispatch/chatGenerate/adapters/gemini.generateContent.ts b/src/modules/aix/server/dispatch/chatGenerate/adapters/gemini.generateContent.ts index a8b952ef5..d4740f422 100644 --- a/src/modules/aix/server/dispatch/chatGenerate/adapters/gemini.generateContent.ts +++ b/src/modules/aix/server/dispatch/chatGenerate/adapters/gemini.generateContent.ts @@ -51,6 +51,12 @@ export function aixToGeminiGenerateContent(model: AixAPI_Model, chatGenerate: Ai }, }; + // Top-P instead of temperature + if (model.topP !== undefined) { + delete payload.generationConfig!.temperature; + payload.generationConfig!.topP = model.topP; + } + // Preemptive error detection with server-side payload validation before sending it upstream const validated = GeminiWire_API_Generate_Content.Request_schema.safeParse(payload); if (!validated.success) { diff --git a/src/modules/aix/server/dispatch/chatGenerate/adapters/openai.chatCompletions.ts b/src/modules/aix/server/dispatch/chatGenerate/adapters/openai.chatCompletions.ts index f046d8fc7..8448e3207 100644 --- a/src/modules/aix/server/dispatch/chatGenerate/adapters/openai.chatCompletions.ts +++ b/src/modules/aix/server/dispatch/chatGenerate/adapters/openai.chatCompletions.ts @@ -79,6 +79,12 @@ export function aixToOpenAIChatCompletions(openAIDialect: OpenAIDialects, model: user: undefined, }; + // Top-P instead of temperature + if (model.topP !== undefined) { + delete payload.temperature; + payload.top_p = model.topP; + } + if (hotFixOpenAIo1Family) payload = _fixRequestForOpenAIO1_maxCompletionTokens(payload);