Fix o3-pro model resolution to use o3-pro consistently
- Use o3-pro throughout the codebase instead of o3-pro-2025-06-10 - Update test expectations to match o3-pro model name - Update cassette to use o3-pro for consistency - Ensure responses endpoint routing works correctly with o3-pro 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -541,7 +541,7 @@ class OpenAICompatibleProvider(ModelProvider):
|
||||
completion_params[key] = value
|
||||
|
||||
# Check if this is o3-pro and needs the responses endpoint
|
||||
if resolved_model == "o3-pro-2025-06-10":
|
||||
if resolved_model == "o3-pro":
|
||||
# This model requires the /v1/responses endpoint
|
||||
# If it fails, we should not fall back to chat/completions
|
||||
return self._generate_with_responses_endpoint(
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
"role": "user"
|
||||
}
|
||||
],
|
||||
"model": "o3-pro-2025-06-10",
|
||||
"model": "o3-pro",
|
||||
"reasoning": {
|
||||
"effort": "medium"
|
||||
},
|
||||
|
||||
@@ -55,7 +55,7 @@ class TestO3ProOutputTextFix:
|
||||
ModelProviderRegistry.reset_for_testing()
|
||||
|
||||
@pytest.mark.no_mock_provider # Disable provider mocking for this test
|
||||
@patch.dict(os.environ, {"OPENAI_ALLOWED_MODELS": "o3-pro,o3-pro-2025-06-10", "LOCALE": ""})
|
||||
@patch.dict(os.environ, {"OPENAI_ALLOWED_MODELS": "o3-pro", "LOCALE": ""})
|
||||
async def test_o3_pro_uses_output_text_field(self, monkeypatch):
|
||||
"""Test that o3-pro parsing uses the output_text convenience field via ChatTool."""
|
||||
cassette_path = cassette_dir / "o3_pro_basic_math.json"
|
||||
|
||||
@@ -280,7 +280,7 @@ class TestOpenAIProvider:
|
||||
mock_response = MagicMock()
|
||||
# New o3-pro format: direct output_text field
|
||||
mock_response.output_text = "4"
|
||||
mock_response.model = "o3-pro-2025-06-10"
|
||||
mock_response.model = "o3-pro"
|
||||
mock_response.id = "test-id"
|
||||
mock_response.created_at = 1234567890
|
||||
mock_response.usage = MagicMock()
|
||||
@@ -298,13 +298,13 @@ class TestOpenAIProvider:
|
||||
# Verify responses.create was called
|
||||
mock_client.responses.create.assert_called_once()
|
||||
call_args = mock_client.responses.create.call_args[1]
|
||||
assert call_args["model"] == "o3-pro-2025-06-10"
|
||||
assert call_args["model"] == "o3-pro"
|
||||
assert call_args["input"][0]["role"] == "user"
|
||||
assert "What is 2 + 2?" in call_args["input"][0]["content"][0]["text"]
|
||||
|
||||
# Verify the response
|
||||
assert result.content == "4"
|
||||
assert result.model_name == "o3-pro-2025-06-10"
|
||||
assert result.model_name == "o3-pro"
|
||||
assert result.metadata["endpoint"] == "responses"
|
||||
|
||||
@patch("providers.openai_compatible.OpenAI")
|
||||
|
||||
Reference in New Issue
Block a user