refactor: moved temperature method from base provider to model capabilities
docs: added AGENTS.md for onboarding Codex
This commit is contained in:
@@ -83,43 +83,6 @@ class ModelProvider(ABC):
|
||||
"""Validate if the model name is supported by this provider."""
|
||||
pass
|
||||
|
||||
def get_effective_temperature(self, model_name: str, requested_temperature: float) -> Optional[float]:
|
||||
"""Get the effective temperature to use for a model given a requested temperature.
|
||||
|
||||
This method handles:
|
||||
- Models that don't support temperature (returns None)
|
||||
- Fixed temperature models (returns the fixed value)
|
||||
- Clamping to min/max range for models with constraints
|
||||
|
||||
Args:
|
||||
model_name: The model to get temperature for
|
||||
requested_temperature: The temperature requested by the user/tool
|
||||
|
||||
Returns:
|
||||
The effective temperature to use, or None if temperature shouldn't be passed
|
||||
"""
|
||||
try:
|
||||
capabilities = self.get_capabilities(model_name)
|
||||
|
||||
# Check if model supports temperature at all
|
||||
if not capabilities.supports_temperature:
|
||||
return None
|
||||
|
||||
# Use temperature constraint to get corrected value
|
||||
corrected_temp = capabilities.temperature_constraint.get_corrected_value(requested_temperature)
|
||||
|
||||
if corrected_temp != requested_temperature:
|
||||
logger.debug(
|
||||
f"Adjusting temperature from {requested_temperature} to {corrected_temp} for model {model_name}"
|
||||
)
|
||||
|
||||
return corrected_temp
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Could not determine effective temperature for {model_name}: {e}")
|
||||
# If we can't get capabilities, return the requested temperature
|
||||
return requested_temperature
|
||||
|
||||
def validate_parameters(self, model_name: str, temperature: float, **kwargs) -> None:
|
||||
"""Validate model parameters against capabilities.
|
||||
|
||||
|
||||
@@ -469,8 +469,22 @@ class OpenAICompatibleProvider(ModelProvider):
|
||||
if not self.validate_model_name(model_name):
|
||||
raise ValueError(f"Model '{model_name}' not in allowed models list. Allowed models: {self.allowed_models}")
|
||||
|
||||
# Get effective temperature for this model
|
||||
effective_temperature = self.get_effective_temperature(model_name, temperature)
|
||||
capabilities: Optional[ModelCapabilities]
|
||||
try:
|
||||
capabilities = self.get_capabilities(model_name)
|
||||
except Exception as exc:
|
||||
logging.debug(f"Falling back to generic capabilities for {model_name}: {exc}")
|
||||
capabilities = None
|
||||
|
||||
# Get effective temperature for this model from capabilities when available
|
||||
if capabilities:
|
||||
effective_temperature = capabilities.get_effective_temperature(temperature)
|
||||
if effective_temperature is not None and effective_temperature != temperature:
|
||||
logging.debug(
|
||||
f"Adjusting temperature from {temperature} to {effective_temperature} for model {model_name}"
|
||||
)
|
||||
else:
|
||||
effective_temperature = temperature
|
||||
|
||||
# Only validate if temperature is not None (meaning the model supports it)
|
||||
if effective_temperature is not None:
|
||||
@@ -482,13 +496,6 @@ class OpenAICompatibleProvider(ModelProvider):
|
||||
if system_prompt:
|
||||
messages.append({"role": "system", "content": system_prompt})
|
||||
|
||||
# Resolve capabilities once for vision/temperature checks
|
||||
try:
|
||||
capabilities = self.get_capabilities(model_name)
|
||||
except Exception as exc:
|
||||
logging.debug(f"Falling back to generic capabilities for {model_name}: {exc}")
|
||||
capabilities = None
|
||||
|
||||
# Prepare user message with text and potentially images
|
||||
user_content = []
|
||||
user_content.append({"type": "text", "text": prompt})
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Dataclass describing the feature set of a model exposed by a provider."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
|
||||
from .provider_type import ProviderType
|
||||
from .temperature import RangeTemperatureConstraint, TemperatureConstraint
|
||||
@@ -33,6 +34,19 @@ class ModelCapabilities:
|
||||
default_factory=lambda: RangeTemperatureConstraint(0.0, 2.0, 0.3)
|
||||
)
|
||||
|
||||
def get_effective_temperature(self, requested_temperature: float) -> Optional[float]:
|
||||
"""Return the temperature that should be sent to the provider.
|
||||
|
||||
Models that do not support temperature return ``None`` so that callers
|
||||
can omit the parameter entirely. For supported models, the configured
|
||||
constraint clamps the requested value into a provider-safe range.
|
||||
"""
|
||||
|
||||
if not self.supports_temperature:
|
||||
return None
|
||||
|
||||
return self.temperature_constraint.get_corrected_value(requested_temperature)
|
||||
|
||||
@staticmethod
|
||||
def collect_aliases(model_configs: dict[str, "ModelCapabilities"]) -> dict[str, list[str]]:
|
||||
"""Build a mapping of model name to aliases from capability configs."""
|
||||
|
||||
Reference in New Issue
Block a user