- OpenRouter model configuration registry
- Model definition file for users to be able to control
- Additional tests
- Update instructions
This commit is contained in:
Fahad
2025-06-13 06:33:12 +04:00
parent cd1105b741
commit 2cdb92460b
12 changed files with 417 additions and 381 deletions

View File

@@ -1,12 +1,8 @@
"""OpenAI model provider implementation."""
import logging
from typing import Optional
from .base import (
FixedTemperatureConstraint,
ModelCapabilities,
ModelResponse,
ProviderType,
RangeTemperatureConstraint,
)
@@ -34,7 +30,6 @@ class OpenAIModelProvider(OpenAICompatibleProvider):
kwargs.setdefault("base_url", "https://api.openai.com/v1")
super().__init__(api_key, **kwargs)
def get_capabilities(self, model_name: str) -> ModelCapabilities:
"""Get capabilities for a specific OpenAI model."""
if model_name not in self.SUPPORTED_MODELS:
@@ -62,7 +57,6 @@ class OpenAIModelProvider(OpenAICompatibleProvider):
temperature_constraint=temp_constraint,
)
def get_provider_type(self) -> ProviderType:
"""Get the provider type."""
return ProviderType.OPENAI
@@ -76,4 +70,3 @@ class OpenAIModelProvider(OpenAICompatibleProvider):
# Currently no OpenAI models support extended thinking
# This may change with future O3 models
return False