From 82b021d75acc791e68c7afb35f6492f68cf02bec Mon Sep 17 00:00:00 2001 From: aberemia24 Date: Fri, 3 Oct 2025 13:59:55 +0300 Subject: [PATCH] fix: add GPT-5-Codex to Responses API routing and simplify comments - Route GPT-5-Codex to /v1/responses endpoint (same as o3-pro) - Simplify verbose comments per code review feedback Addresses code review from @gemini-code-assist --- providers/openai_compatible.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/providers/openai_compatible.py b/providers/openai_compatible.py index 2da361d..5c93a3c 100644 --- a/providers/openai_compatible.py +++ b/providers/openai_compatible.py @@ -571,9 +571,10 @@ class OpenAICompatibleProvider(ModelProvider): continue # Skip unsupported parameters for reasoning models completion_params[key] = value - # Check if this is o3-pro and needs the responses endpoint - if resolved_model == "o3-pro": - # This model requires the /v1/responses endpoint + # Check if this model needs the Responses API endpoint + # Both o3-pro and gpt-5-codex use the new Responses API + if resolved_model in ["o3-pro", "gpt-5-codex"]: + # These models require the /v1/responses endpoint for stateful context # If it fails, we should not fall back to chat/completions return self._generate_with_responses_endpoint( model_name=resolved_model,