Native support for xAI Grok3

Model shorthand mapping related fixes
Comprehensive auto-mode related tests
This commit is contained in:
Fahad
2025-06-15 12:21:44 +04:00
parent 4becd70a82
commit 6304b7af6b
24 changed files with 2278 additions and 58 deletions

View File

@@ -1,10 +1,12 @@
"""OpenAI model provider implementation."""
import logging
from typing import Optional
from .base import (
FixedTemperatureConstraint,
ModelCapabilities,
ModelResponse,
ProviderType,
RangeTemperatureConstraint,
)
@@ -111,6 +113,29 @@ class OpenAIModelProvider(OpenAICompatibleProvider):
return True
def generate_content(
self,
prompt: str,
model_name: str,
system_prompt: Optional[str] = None,
temperature: float = 0.7,
max_output_tokens: Optional[int] = None,
**kwargs,
) -> ModelResponse:
"""Generate content using OpenAI API with proper model name resolution."""
# Resolve model alias before making API call
resolved_model_name = self._resolve_model_name(model_name)
# Call parent implementation with resolved model name
return super().generate_content(
prompt=prompt,
model_name=resolved_model_name,
system_prompt=system_prompt,
temperature=temperature,
max_output_tokens=max_output_tokens,
**kwargs,
)
def supports_thinking_mode(self, model_name: str) -> bool:
"""Check if the model supports extended thinking mode."""
# Currently no OpenAI models support extended thinking