refactor: renaming to reflect underlying type

docs: updated to reflect new modules
This commit is contained in:
Fahad
2025-10-02 09:07:40 +04:00
parent 2b10adcaf2
commit 1dc25f6c3d
18 changed files with 129 additions and 131 deletions

View File

@@ -165,7 +165,7 @@ class TestAliasTargetRestrictions:
openai_all_known = openai_provider.list_all_known_models()
# Verify that for each alias, its target is also included
for model_name, config in openai_provider.SUPPORTED_MODELS.items():
for model_name, config in openai_provider.MODEL_CAPABILITIES.items():
assert model_name.lower() in openai_all_known
if isinstance(config, str): # This is an alias
# The target should also be in the known models
@@ -178,7 +178,7 @@ class TestAliasTargetRestrictions:
gemini_all_known = gemini_provider.list_all_known_models()
# Verify that for each alias, its target is also included
for model_name, config in gemini_provider.SUPPORTED_MODELS.items():
for model_name, config in gemini_provider.MODEL_CAPABILITIES.items():
assert model_name.lower() in gemini_all_known
if isinstance(config, str): # This is an alias
# The target should also be in the known models

View File

@@ -53,7 +53,7 @@ class TestAutoMode:
for provider_type in enabled_provider_types:
provider = ModelProviderRegistry.get_provider(provider_type)
if provider:
for model_name, config in provider.SUPPORTED_MODELS.items():
for model_name, config in provider.MODEL_CAPABILITIES.items():
# Skip alias entries (string values)
if isinstance(config, str):
continue

View File

@@ -176,7 +176,7 @@ class TestBuggyBehaviorPrevention:
# Create a mock provider that simulates the old behavior
old_style_provider = MagicMock()
old_style_provider.SUPPORTED_MODELS = {
old_style_provider.MODEL_CAPABILITIES = {
"mini": "o4-mini",
"o3mini": "o3-mini",
"o4-mini": {"context_window": 200000},

View File

@@ -137,7 +137,7 @@ class TestModelRestrictionService:
# Create mock provider with known models
mock_provider = MagicMock()
mock_provider.SUPPORTED_MODELS = {
mock_provider.MODEL_CAPABILITIES = {
"o3": {"context_window": 200000},
"o3-mini": {"context_window": 200000},
"o4-mini": {"context_window": 200000},
@@ -441,7 +441,7 @@ class TestRegistryIntegration:
# Mock providers
mock_openai = MagicMock()
mock_openai.SUPPORTED_MODELS = {
mock_openai.MODEL_CAPABILITIES = {
"o3": {"context_window": 200000},
"o3-mini": {"context_window": 200000},
}
@@ -452,7 +452,7 @@ class TestRegistryIntegration:
restriction_service = get_restriction_service() if respect_restrictions else None
models = []
for model_name, config in mock_openai.SUPPORTED_MODELS.items():
for model_name, config in mock_openai.MODEL_CAPABILITIES.items():
if isinstance(config, str):
target_model = config
if restriction_service and not restriction_service.is_allowed(ProviderType.OPENAI, target_model):
@@ -468,7 +468,7 @@ class TestRegistryIntegration:
mock_openai.list_all_known_models.return_value = ["o3", "o3-mini"]
mock_gemini = MagicMock()
mock_gemini.SUPPORTED_MODELS = {
mock_gemini.MODEL_CAPABILITIES = {
"gemini-2.5-pro": {"context_window": 1048576},
"gemini-2.5-flash": {"context_window": 1048576},
}
@@ -479,7 +479,7 @@ class TestRegistryIntegration:
restriction_service = get_restriction_service() if respect_restrictions else None
models = []
for model_name, config in mock_gemini.SUPPORTED_MODELS.items():
for model_name, config in mock_gemini.MODEL_CAPABILITIES.items():
if isinstance(config, str):
target_model = config
if restriction_service and not restriction_service.is_allowed(ProviderType.GOOGLE, target_model):
@@ -608,7 +608,7 @@ class TestAutoModeWithRestrictions:
# Mock providers
mock_openai = MagicMock()
mock_openai.SUPPORTED_MODELS = {
mock_openai.MODEL_CAPABILITIES = {
"o3": {"context_window": 200000},
"o3-mini": {"context_window": 200000},
"o4-mini": {"context_window": 200000},
@@ -620,7 +620,7 @@ class TestAutoModeWithRestrictions:
restriction_service = get_restriction_service() if respect_restrictions else None
models = []
for model_name, config in mock_openai.SUPPORTED_MODELS.items():
for model_name, config in mock_openai.MODEL_CAPABILITIES.items():
if isinstance(config, str):
target_model = config
if restriction_service and not restriction_service.is_allowed(ProviderType.OPENAI, target_model):

View File

@@ -205,7 +205,7 @@ class TestO3TemperatureParameterFixSimple:
), f"Model {model} capabilities should have supports_temperature field"
assert capabilities.supports_temperature is True, f"Model {model} should have supports_temperature=True"
except ValueError:
# Skip if model not in SUPPORTED_MODELS (that's okay for this test)
# Skip if model not in MODEL_CAPABILITIES (that's okay for this test)
pass
@patch("utils.model_restrictions.get_restriction_service")

View File

@@ -28,7 +28,7 @@ class TestOldBehaviorSimulation:
"""
# Create a mock provider that simulates the OLD BROKEN BEHAVIOR
old_broken_provider = MagicMock()
old_broken_provider.SUPPORTED_MODELS = {
old_broken_provider.MODEL_CAPABILITIES = {
"mini": "o4-mini", # alias -> target
"o3mini": "o3-mini", # alias -> target
"o4-mini": {"context_window": 200000},
@@ -73,7 +73,7 @@ class TestOldBehaviorSimulation:
"""
# Create mock provider with NEW FIXED BEHAVIOR
new_fixed_provider = MagicMock()
new_fixed_provider.SUPPORTED_MODELS = {
new_fixed_provider.MODEL_CAPABILITIES = {
"mini": "o4-mini",
"o3mini": "o3-mini",
"o4-mini": {"context_window": 200000},
@@ -203,14 +203,14 @@ class TestOldBehaviorSimulation:
for provider in providers:
all_known = provider.list_all_known_models()
# Check that for every alias in SUPPORTED_MODELS, its target is also included
for model_name, config in provider.SUPPORTED_MODELS.items():
# Model name itself should be in the list
# Check that every model and its aliases appear in the comprehensive list
for model_name, config in provider.MODEL_CAPABILITIES.items():
assert model_name.lower() in all_known, f"{provider.__class__.__name__}: Missing model {model_name}"
# If it's an alias (config is a string), target should also be in list
if isinstance(config, str):
target_model = config
for alias in getattr(config, "aliases", []):
assert (
target_model.lower() in all_known
), f"{provider.__class__.__name__}: Missing target {target_model} for alias {model_name}"
alias.lower() in all_known
), f"{provider.__class__.__name__}: Missing alias {alias} for model {model_name}"
assert (
provider._resolve_model_name(alias) == model_name
), f"{provider.__class__.__name__}: Alias {alias} should resolve to {model_name}"

View File

@@ -15,7 +15,7 @@ class TestOpenAICompatibleTokenUsage(unittest.TestCase):
# Create a concrete implementation for testing
class TestProvider(OpenAICompatibleProvider):
FRIENDLY_NAME = "Test"
SUPPORTED_MODELS = {"test-model": {"context_window": 4096}}
MODEL_CAPABILITIES = {"test-model": {"context_window": 4096}}
def get_capabilities(self, model_name):
return Mock()

View File

@@ -1,4 +1,4 @@
"""Test the SUPPORTED_MODELS aliases structure across all providers."""
"""Test the MODEL_CAPABILITIES aliases structure across all providers."""
from providers.dial import DIALModelProvider
from providers.gemini import GeminiModelProvider
@@ -7,24 +7,24 @@ from providers.xai import XAIModelProvider
class TestSupportedModelsAliases:
"""Test that all providers have correctly structured SUPPORTED_MODELS with aliases."""
"""Test that all providers have correctly structured MODEL_CAPABILITIES with aliases."""
def test_gemini_provider_aliases(self):
"""Test Gemini provider's alias structure."""
provider = GeminiModelProvider("test-key")
# Check that all models have ModelCapabilities with aliases
for model_name, config in provider.SUPPORTED_MODELS.items():
for model_name, config in provider.MODEL_CAPABILITIES.items():
assert hasattr(config, "aliases"), f"{model_name} must have aliases attribute"
assert isinstance(config.aliases, list), f"{model_name} aliases must be a list"
# Test specific aliases
assert "flash" in provider.SUPPORTED_MODELS["gemini-2.5-flash"].aliases
assert "pro" in provider.SUPPORTED_MODELS["gemini-2.5-pro"].aliases
assert "flash-2.0" in provider.SUPPORTED_MODELS["gemini-2.0-flash"].aliases
assert "flash2" in provider.SUPPORTED_MODELS["gemini-2.0-flash"].aliases
assert "flashlite" in provider.SUPPORTED_MODELS["gemini-2.0-flash-lite"].aliases
assert "flash-lite" in provider.SUPPORTED_MODELS["gemini-2.0-flash-lite"].aliases
assert "flash" in provider.MODEL_CAPABILITIES["gemini-2.5-flash"].aliases
assert "pro" in provider.MODEL_CAPABILITIES["gemini-2.5-pro"].aliases
assert "flash-2.0" in provider.MODEL_CAPABILITIES["gemini-2.0-flash"].aliases
assert "flash2" in provider.MODEL_CAPABILITIES["gemini-2.0-flash"].aliases
assert "flashlite" in provider.MODEL_CAPABILITIES["gemini-2.0-flash-lite"].aliases
assert "flash-lite" in provider.MODEL_CAPABILITIES["gemini-2.0-flash-lite"].aliases
# Test alias resolution
assert provider._resolve_model_name("flash") == "gemini-2.5-flash"
@@ -42,18 +42,18 @@ class TestSupportedModelsAliases:
provider = OpenAIModelProvider("test-key")
# Check that all models have ModelCapabilities with aliases
for model_name, config in provider.SUPPORTED_MODELS.items():
for model_name, config in provider.MODEL_CAPABILITIES.items():
assert hasattr(config, "aliases"), f"{model_name} must have aliases attribute"
assert isinstance(config.aliases, list), f"{model_name} aliases must be a list"
# Test specific aliases
# "mini" is now an alias for gpt-5-mini, not o4-mini
assert "mini" in provider.SUPPORTED_MODELS["gpt-5-mini"].aliases
assert "o4mini" in provider.SUPPORTED_MODELS["o4-mini"].aliases
assert "mini" in provider.MODEL_CAPABILITIES["gpt-5-mini"].aliases
assert "o4mini" in provider.MODEL_CAPABILITIES["o4-mini"].aliases
# o4-mini is no longer in its own aliases (removed self-reference)
assert "o3mini" in provider.SUPPORTED_MODELS["o3-mini"].aliases
assert "o3pro" in provider.SUPPORTED_MODELS["o3-pro"].aliases
assert "gpt4.1" in provider.SUPPORTED_MODELS["gpt-4.1"].aliases
assert "o3mini" in provider.MODEL_CAPABILITIES["o3-mini"].aliases
assert "o3pro" in provider.MODEL_CAPABILITIES["o3-pro"].aliases
assert "gpt4.1" in provider.MODEL_CAPABILITIES["gpt-4.1"].aliases
# Test alias resolution
assert provider._resolve_model_name("mini") == "gpt-5-mini" # mini -> gpt-5-mini now
@@ -71,16 +71,16 @@ class TestSupportedModelsAliases:
provider = XAIModelProvider("test-key")
# Check that all models have ModelCapabilities with aliases
for model_name, config in provider.SUPPORTED_MODELS.items():
for model_name, config in provider.MODEL_CAPABILITIES.items():
assert hasattr(config, "aliases"), f"{model_name} must have aliases attribute"
assert isinstance(config.aliases, list), f"{model_name} aliases must be a list"
# Test specific aliases
assert "grok" in provider.SUPPORTED_MODELS["grok-4"].aliases
assert "grok4" in provider.SUPPORTED_MODELS["grok-4"].aliases
assert "grok3" in provider.SUPPORTED_MODELS["grok-3"].aliases
assert "grok3fast" in provider.SUPPORTED_MODELS["grok-3-fast"].aliases
assert "grokfast" in provider.SUPPORTED_MODELS["grok-3-fast"].aliases
assert "grok" in provider.MODEL_CAPABILITIES["grok-4"].aliases
assert "grok4" in provider.MODEL_CAPABILITIES["grok-4"].aliases
assert "grok3" in provider.MODEL_CAPABILITIES["grok-3"].aliases
assert "grok3fast" in provider.MODEL_CAPABILITIES["grok-3-fast"].aliases
assert "grokfast" in provider.MODEL_CAPABILITIES["grok-3-fast"].aliases
# Test alias resolution
assert provider._resolve_model_name("grok") == "grok-4"
@@ -98,16 +98,16 @@ class TestSupportedModelsAliases:
provider = DIALModelProvider("test-key")
# Check that all models have ModelCapabilities with aliases
for model_name, config in provider.SUPPORTED_MODELS.items():
for model_name, config in provider.MODEL_CAPABILITIES.items():
assert hasattr(config, "aliases"), f"{model_name} must have aliases attribute"
assert isinstance(config.aliases, list), f"{model_name} aliases must be a list"
# Test specific aliases
assert "o3" in provider.SUPPORTED_MODELS["o3-2025-04-16"].aliases
assert "o4-mini" in provider.SUPPORTED_MODELS["o4-mini-2025-04-16"].aliases
assert "sonnet-4.1" in provider.SUPPORTED_MODELS["anthropic.claude-sonnet-4.1-20250805-v1:0"].aliases
assert "opus-4.1" in provider.SUPPORTED_MODELS["anthropic.claude-opus-4.1-20250805-v1:0"].aliases
assert "gemini-2.5-pro" in provider.SUPPORTED_MODELS["gemini-2.5-pro-preview-05-06"].aliases
assert "o3" in provider.MODEL_CAPABILITIES["o3-2025-04-16"].aliases
assert "o4-mini" in provider.MODEL_CAPABILITIES["o4-mini-2025-04-16"].aliases
assert "sonnet-4.1" in provider.MODEL_CAPABILITIES["anthropic.claude-sonnet-4.1-20250805-v1:0"].aliases
assert "opus-4.1" in provider.MODEL_CAPABILITIES["anthropic.claude-opus-4.1-20250805-v1:0"].aliases
assert "gemini-2.5-pro" in provider.MODEL_CAPABILITIES["gemini-2.5-pro-preview-05-06"].aliases
# Test alias resolution
assert provider._resolve_model_name("o3") == "o3-2025-04-16"
@@ -183,12 +183,12 @@ class TestSupportedModelsAliases:
]
for provider in providers:
for model_name, config in provider.SUPPORTED_MODELS.items():
for model_name, config in provider.MODEL_CAPABILITIES.items():
# All values must be ModelCapabilities objects, not strings or dicts
from providers.shared import ModelCapabilities
assert isinstance(config, ModelCapabilities), (
f"{provider.__class__.__name__}.SUPPORTED_MODELS['{model_name}'] "
f"{provider.__class__.__name__}.MODEL_CAPABILITIES['{model_name}'] "
f"must be a ModelCapabilities object, not {type(config).__name__}"
)

View File

@@ -256,18 +256,18 @@ class TestXAIProvider:
assert capabilities.friendly_name == "X.AI (Grok 3)"
def test_supported_models_structure(self):
"""Test that SUPPORTED_MODELS has the correct structure."""
"""Test that MODEL_CAPABILITIES has the correct structure."""
provider = XAIModelProvider("test-key")
# Check that all expected base models are present
assert "grok-4" in provider.SUPPORTED_MODELS
assert "grok-3" in provider.SUPPORTED_MODELS
assert "grok-3-fast" in provider.SUPPORTED_MODELS
assert "grok-4" in provider.MODEL_CAPABILITIES
assert "grok-3" in provider.MODEL_CAPABILITIES
assert "grok-3-fast" in provider.MODEL_CAPABILITIES
# Check model configs have required fields
from providers.shared import ModelCapabilities
grok4_config = provider.SUPPORTED_MODELS["grok-4"]
grok4_config = provider.MODEL_CAPABILITIES["grok-4"]
assert isinstance(grok4_config, ModelCapabilities)
assert hasattr(grok4_config, "context_window")
assert hasattr(grok4_config, "supports_extended_thinking")
@@ -280,18 +280,18 @@ class TestXAIProvider:
assert "grok-4" in grok4_config.aliases
assert "grok4" in grok4_config.aliases
grok3_config = provider.SUPPORTED_MODELS["grok-3"]
grok3_config = provider.MODEL_CAPABILITIES["grok-3"]
assert grok3_config.context_window == 131_072
assert grok3_config.supports_extended_thinking is False
# Check aliases are correctly structured
assert "grok3" in grok3_config.aliases # grok3 resolves to grok-3
# Check grok-4 aliases
grok4_config = provider.SUPPORTED_MODELS["grok-4"]
grok4_config = provider.MODEL_CAPABILITIES["grok-4"]
assert "grok" in grok4_config.aliases # grok resolves to grok-4
assert "grok4" in grok4_config.aliases
grok3fast_config = provider.SUPPORTED_MODELS["grok-3-fast"]
grok3fast_config = provider.MODEL_CAPABILITIES["grok-3-fast"]
assert "grok3fast" in grok3fast_config.aliases
assert "grokfast" in grok3fast_config.aliases