Merge pull request #36 from lox/add-o3-pro-support

feat: Add o3-pro model support
This commit is contained in:
Beehive Innovations
2025-06-14 19:44:14 +04:00
committed by GitHub
5 changed files with 118 additions and 11 deletions

View File

@@ -26,6 +26,10 @@ class OpenAIModelProvider(OpenAICompatibleProvider):
"context_window": 200_000, # 200K tokens
"supports_extended_thinking": False,
},
"o3-pro": {
"context_window": 200_000, # 200K tokens
"supports_extended_thinking": False,
},
"o4-mini": {
"context_window": 200_000, # 200K tokens
"supports_extended_thinking": False,
@@ -66,7 +70,7 @@ class OpenAIModelProvider(OpenAICompatibleProvider):
config = self.SUPPORTED_MODELS[resolved_name]
# Define temperature constraints per model
if resolved_name in ["o3", "o3-mini", "o4-mini", "o4-mini-high"]:
if resolved_name in ["o3", "o3-mini", "o3-pro", "o4-mini", "o4-mini-high"]:
# O3 and O4 reasoning models only support temperature=1.0
temp_constraint = FixedTemperatureConstraint(1.0)
else: