From 2ca2322bafe91df9daa916b44790b1dca6a8622f Mon Sep 17 00:00:00 2001 From: reanon <> Date: Fri, 26 Sep 2025 09:40:13 +0200 Subject: [PATCH] gpt5 codex somewhat --- .../preprocessors/validate-context-size.ts | 2 ++ src/proxy/openai.ts | 17 +++++++++++++++-- src/shared/models.ts | 1 + 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/src/proxy/middleware/request/preprocessors/validate-context-size.ts b/src/proxy/middleware/request/preprocessors/validate-context-size.ts index 9f73670..4c12938 100644 --- a/src/proxy/middleware/request/preprocessors/validate-context-size.ts +++ b/src/proxy/middleware/request/preprocessors/validate-context-size.ts @@ -75,6 +75,8 @@ export const validateContextSize: RequestPreprocessor = async (req) => { modelMax = 400000; } else if (model.match(/^gpt-5-chat-latest$/)) { modelMax = 400000; + } else if (model.match(/^gpt-5-codex(-latest|-\d{4}-\d{2}-\d{2})?$/)) { + modelMax = 400000; } else if (model.match(/^chatgpt-4o/)) { modelMax = 128000; } else if (model.match(/gpt-4-turbo(-\d{4}-\d{2}-\d{2})?$/)) { diff --git a/src/proxy/openai.ts b/src/proxy/openai.ts index 14168eb..7e8ae78 100644 --- a/src/proxy/openai.ts +++ b/src/proxy/openai.ts @@ -336,6 +336,18 @@ const setupChunkedTransfer: RequestHandler = (req, res, next) => { }); } + // Check if user is trying to use streaming with gpt-5-codex models + if (req.body.model?.startsWith("gpt-5-codex") && req.body.stream === true) { + return res.status(400).json({ + error: { + message: "The gpt-5-codex models do not support streaming. Please set 'stream: false' in your request.", + type: "invalid_request_error", + param: "stream", + code: "streaming_not_supported" + } + }); + } + // Only o1 doesn't support streaming if (req.body.model === "o1" || req.body.model === "o1-2024-12-17") { req.isChunkedTransfer = true; @@ -358,8 +370,9 @@ const setupChunkedTransfer: RequestHandler = (req, res, next) => { // Functions to handle model-specific API routing function shouldUseResponsesApi(model: string): boolean { return model === "o1-pro" || model.startsWith("o1-pro-") || - model === "o3-pro" || model.startsWith("o3-pro-") || - model === "codex-mini-latest" || model.startsWith("codex-mini-"); + model === "o3-pro" || model.startsWith("o3-pro-") || + model === "codex-mini-latest" || model.startsWith("codex-mini") || + model === "gpt-5-codex-latest" || model.startsWith("gpt-5-codex"); } // Preprocessor to redirect requests to the responses API diff --git a/src/shared/models.ts b/src/shared/models.ts index 55d47d8..7e8c638 100644 --- a/src/shared/models.ts +++ b/src/shared/models.ts @@ -282,6 +282,7 @@ export const OPENAI_MODEL_FAMILY_MAP: { [regex: string]: OpenAIModelFamily } = { "^o3(-\\d{4}-\\d{2}-\\d{2})?$": "o3", "^o4-mini(-\\d{4}-\\d{2}-\\d{2})?$": "o4-mini", "^codex-mini(-latest|-\d{4}-\d{2}-\d{2})?$": "codex-mini", + "^gpt-5-codex(-latest|-\\d{4}-\\d{2}-\\d{2})?$": "gpt5", }; export function getOpenAIModelFamily(