Add o3-pro model support and extend test coverage
- Added o3-pro model configuration to custom_models.json with 200K context - Updated OpenAI provider to support o3-pro with fixed temperature constraint - Extended simulator tests to include o3-pro validation scenarios 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -22,6 +22,10 @@ class OpenAIModelProvider(OpenAICompatibleProvider):
|
||||
"context_window": 200_000, # 200K tokens
|
||||
"supports_extended_thinking": False,
|
||||
},
|
||||
"o3-pro": {
|
||||
"context_window": 200_000, # 200K tokens
|
||||
"supports_extended_thinking": False,
|
||||
},
|
||||
"o4-mini": {
|
||||
"context_window": 200_000, # 200K tokens
|
||||
"supports_extended_thinking": False,
|
||||
@@ -54,8 +58,13 @@ class OpenAIModelProvider(OpenAICompatibleProvider):
|
||||
config = self.SUPPORTED_MODELS[resolved_name]
|
||||
|
||||
# Define temperature constraints per model
|
||||
<<<<<<< HEAD
|
||||
if resolved_name in ["o3", "o3-mini", "o4-mini", "o4-mini-high"]:
|
||||
# O3 and O4 reasoning models only support temperature=1.0
|
||||
=======
|
||||
if model_name in ["o3", "o3-mini", "o3-pro"]:
|
||||
# O3 models only support temperature=1.0
|
||||
>>>>>>> 155c4ec (Add o3-pro model support and extend test coverage)
|
||||
temp_constraint = FixedTemperatureConstraint(1.0)
|
||||
else:
|
||||
# Other OpenAI models support 0.0-2.0 range
|
||||
|
||||
Reference in New Issue
Block a user