Fix o3-pro model resolution to use o3-pro consistently

- Use o3-pro throughout the codebase instead of o3-pro-2025-06-10
- Update test expectations to match o3-pro model name
- Update cassette to use o3-pro for consistency
- Ensure responses endpoint routing works correctly with o3-pro

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Fahad
2025-08-08 10:52:23 +05:00
parent 2fdc8fad72
commit fcb0fe3ef2
4 changed files with 6 additions and 6 deletions

View File

@@ -280,7 +280,7 @@ class TestOpenAIProvider:
mock_response = MagicMock()
# New o3-pro format: direct output_text field
mock_response.output_text = "4"
mock_response.model = "o3-pro-2025-06-10"
mock_response.model = "o3-pro"
mock_response.id = "test-id"
mock_response.created_at = 1234567890
mock_response.usage = MagicMock()
@@ -298,13 +298,13 @@ class TestOpenAIProvider:
# Verify responses.create was called
mock_client.responses.create.assert_called_once()
call_args = mock_client.responses.create.call_args[1]
assert call_args["model"] == "o3-pro-2025-06-10"
assert call_args["model"] == "o3-pro"
assert call_args["input"][0]["role"] == "user"
assert "What is 2 + 2?" in call_args["input"][0]["content"][0]["text"]
# Verify the response
assert result.content == "4"
assert result.model_name == "o3-pro-2025-06-10"
assert result.model_name == "o3-pro"
assert result.metadata["endpoint"] == "responses"
@patch("providers.openai_compatible.OpenAI")