gpt5 codex somewhat

This commit is contained in:
reanon
2025-09-26 09:40:13 +02:00
parent de6034d2e8
commit 2ca2322baf
3 changed files with 18 additions and 2 deletions
@@ -75,6 +75,8 @@ export const validateContextSize: RequestPreprocessor = async (req) => {
modelMax = 400000;
} else if (model.match(/^gpt-5-chat-latest$/)) {
modelMax = 400000;
} else if (model.match(/^gpt-5-codex(-latest|-\d{4}-\d{2}-\d{2})?$/)) {
modelMax = 400000;
} else if (model.match(/^chatgpt-4o/)) {
modelMax = 128000;
} else if (model.match(/gpt-4-turbo(-\d{4}-\d{2}-\d{2})?$/)) {
+15 -2
View File
@@ -336,6 +336,18 @@ const setupChunkedTransfer: RequestHandler = (req, res, next) => {
});
}
// Check if user is trying to use streaming with gpt-5-codex models
if (req.body.model?.startsWith("gpt-5-codex") && req.body.stream === true) {
return res.status(400).json({
error: {
message: "The gpt-5-codex models do not support streaming. Please set 'stream: false' in your request.",
type: "invalid_request_error",
param: "stream",
code: "streaming_not_supported"
}
});
}
// Only o1 doesn't support streaming
if (req.body.model === "o1" || req.body.model === "o1-2024-12-17") {
req.isChunkedTransfer = true;
@@ -358,8 +370,9 @@ const setupChunkedTransfer: RequestHandler = (req, res, next) => {
// Functions to handle model-specific API routing
function shouldUseResponsesApi(model: string): boolean {
return model === "o1-pro" || model.startsWith("o1-pro-") ||
model === "o3-pro" || model.startsWith("o3-pro-") ||
model === "codex-mini-latest" || model.startsWith("codex-mini-");
model === "o3-pro" || model.startsWith("o3-pro-") ||
model === "codex-mini-latest" || model.startsWith("codex-mini") ||
model === "gpt-5-codex-latest" || model.startsWith("gpt-5-codex");
}
// Preprocessor to redirect requests to the responses API
+1
View File
@@ -282,6 +282,7 @@ export const OPENAI_MODEL_FAMILY_MAP: { [regex: string]: OpenAIModelFamily } = {
"^o3(-\\d{4}-\\d{2}-\\d{2})?$": "o3",
"^o4-mini(-\\d{4}-\\d{2}-\\d{2})?$": "o4-mini",
"^codex-mini(-latest|-\d{4}-\d{2}-\d{2})?$": "codex-mini",
"^gpt-5-codex(-latest|-\\d{4}-\\d{2}-\\d{2})?$": "gpt5",
};
export function getOpenAIModelFamily(