Files
my-pal-mcp-server/tests/test_openrouter_registry.py
2026-04-01 12:07:53 +02:00

399 lines
15 KiB
Python

"""Tests for OpenRouter model registry functionality."""
import json
import os
import tempfile
from pathlib import Path
from unittest.mock import patch
import pytest
from providers.registries.openrouter import OpenRouterModelRegistry
from providers.shared import ModelCapabilities, ProviderType
class TestOpenRouterModelRegistry:
"""Test cases for OpenRouter model registry."""
def test_registry_initialization(self):
"""Test registry initializes with default config."""
registry = OpenRouterModelRegistry()
# Should load models from default location
assert len(registry.list_models()) > 0
assert len(registry.list_aliases()) > 0
def test_default_init_resolves_live_only_model(self):
registry = OpenRouterModelRegistry()
config = registry.resolve("x-ai/grok-4")
assert config is not None
assert config.model_name == "x-ai/grok-4"
assert config.context_window == 256000
assert config.supports_extended_thinking is True
def test_custom_config_path(self):
"""Test registry with custom config path."""
# Create temporary config
config_data = {
"models": [
{
"model_name": "test/model-1",
"aliases": ["test1", "t1"],
"context_window": 4096,
"max_output_tokens": 2048,
}
]
}
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
json.dump(config_data, f)
temp_path = f.name
try:
registry = OpenRouterModelRegistry(config_path=temp_path)
assert "test/model-1" in registry.list_models()
assert "test1" in registry.list_aliases()
assert "t1" in registry.list_aliases()
assert registry.resolve("x-ai/grok-4") is not None
finally:
os.unlink(temp_path)
def test_environment_variable_override(self, monkeypatch):
"""Test OPENROUTER_MODELS_CONFIG_PATH environment variable."""
# Create custom config
config_data = {
"models": [
{"model_name": "env/model", "aliases": ["envtest"], "context_window": 8192, "max_output_tokens": 4096}
]
}
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
json.dump(config_data, f)
temp_path = f.name
try:
# Set environment variable
monkeypatch.setenv("OPENROUTER_MODELS_CONFIG_PATH", temp_path)
# Create registry without explicit path
registry = OpenRouterModelRegistry()
# Should load from environment path
assert "env/model" in registry.list_models()
assert "envtest" in registry.list_aliases()
finally:
os.unlink(temp_path)
def test_alias_resolution(self):
"""Test alias resolution functionality."""
registry = OpenRouterModelRegistry()
# Test various aliases
test_cases = [
("opus", "anthropic/claude-opus-4.5"), # opus now points to 4.5
("OPUS", "anthropic/claude-opus-4.5"), # Case insensitive
("claude-opus", "anthropic/claude-opus-4.5"),
("opus4.5", "anthropic/claude-opus-4.5"),
("opus4.1", "anthropic/claude-opus-4.1"), # 4.1 still accessible
("sonnet", "anthropic/claude-sonnet-4.5"),
("o3", "openai/o3"),
("deepseek", "deepseek/deepseek-r1-0528"),
("mistral", "mistralai/mistral-large-2411"),
]
for alias, expected_model in test_cases:
config = registry.resolve(alias)
assert config is not None, f"Failed to resolve alias '{alias}'"
assert config.model_name == expected_model
def test_direct_model_name_lookup(self):
"""Test looking up models by their full name."""
registry = OpenRouterModelRegistry()
# Should be able to look up by full model name
config = registry.resolve("anthropic/claude-opus-4.1")
assert config is not None
assert config.model_name == "anthropic/claude-opus-4.1"
config = registry.resolve("openai/o3")
assert config is not None
assert config.model_name == "openai/o3"
def test_unknown_model_resolution(self):
"""Test resolution of unknown models."""
registry = OpenRouterModelRegistry()
# Unknown aliases should return None
assert registry.resolve("unknown-alias") is None
assert registry.resolve("") is None
assert registry.resolve("non-existent") is None
def test_model_capabilities_conversion(self):
"""Test that registry returns ModelCapabilities directly."""
registry = OpenRouterModelRegistry()
config = registry.resolve("opus")
assert config is not None
# Registry now returns ModelCapabilities objects directly
# opus alias now points to 4.5
assert config.provider == ProviderType.OPENROUTER
assert config.model_name == "anthropic/claude-opus-4.5"
assert config.friendly_name == "OpenRouter (anthropic/claude-opus-4.5)"
assert config.context_window == 200000
assert not config.supports_extended_thinking
def test_duplicate_alias_detection(self):
"""Test that duplicate aliases are detected."""
config_data = {
"models": [
{"model_name": "test/model-1", "aliases": ["dupe"], "context_window": 4096, "max_output_tokens": 2048},
{
"model_name": "test/model-2",
"aliases": ["DUPE"], # Same alias, different case
"context_window": 8192,
"max_output_tokens": 2048,
},
]
}
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
json.dump(config_data, f)
temp_path = f.name
try:
with pytest.raises(ValueError, match="Duplicate alias"):
OpenRouterModelRegistry(config_path=temp_path)
finally:
os.unlink(temp_path)
def test_backwards_compatibility_max_tokens(self):
"""Test that legacy max_tokens field maps to max_output_tokens."""
config_data = {
"models": [
{
"model_name": "test/old-model",
"aliases": ["old"],
"max_tokens": 16384, # Old field name should cause error
"supports_extended_thinking": False,
}
]
}
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
json.dump(config_data, f)
temp_path = f.name
try:
with patch.dict("os.environ", {}, clear=True):
with pytest.raises(ValueError, match="max_output_tokens"):
OpenRouterModelRegistry(config_path=temp_path)
finally:
os.unlink(temp_path)
def test_missing_config_file(self):
"""Test behavior with missing config file."""
# Use a non-existent path
with patch.dict("os.environ", {}, clear=True):
registry = OpenRouterModelRegistry(config_path="/non/existent/path.json")
assert len(registry.list_models()) > 0
assert registry.resolve("x-ai/grok-4") is not None
assert registry.resolve("anything") is None
def test_invalid_json_config(self):
"""Test handling of invalid JSON."""
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
f.write("{ invalid json }")
temp_path = f.name
try:
registry = OpenRouterModelRegistry(config_path=temp_path)
assert len(registry.list_models()) > 0
assert registry.resolve("x-ai/grok-4") is not None
finally:
os.unlink(temp_path)
def test_live_catalogue_adds_unsynced_model_ids(self):
curated_data = {
"models": [
{
"model_name": "openai/gpt-5.2",
"aliases": ["gpt5.2"],
"context_window": 400000,
"max_output_tokens": 128000,
}
]
}
live_data = {
"models": [
{
"model_name": "openai/gpt-5.4",
"aliases": [],
"context_window": 400000,
"max_output_tokens": 128000,
"supports_extended_thinking": True,
"supports_json_mode": True,
"supports_function_calling": False,
"supports_images": True,
"max_image_size_mb": 20.0,
"supports_temperature": True,
"temperature_constraint": "range",
"description": "Live-only GPT-5.4 entry",
}
]
}
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as curated_file:
json.dump(curated_data, curated_file)
curated_path = curated_file.name
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as live_file:
json.dump(live_data, live_file)
live_path = live_file.name
try:
registry = OpenRouterModelRegistry(config_path=curated_path, live_config_path=live_path)
assert "openai/gpt-5.4" in registry.list_models()
caps = registry.resolve("openai/gpt-5.4")
assert caps is not None
assert caps.description == "Live-only GPT-5.4 entry"
finally:
os.unlink(curated_path)
os.unlink(live_path)
def test_curated_manifest_overrides_live_metadata(self):
curated_data = {
"models": [
{
"model_name": "openai/gpt-5.4",
"aliases": ["gpt5.4"],
"context_window": 400000,
"max_output_tokens": 128000,
"supports_extended_thinking": True,
"supports_json_mode": True,
"supports_function_calling": True,
"supports_images": True,
"max_image_size_mb": 20.0,
"supports_temperature": False,
"temperature_constraint": "fixed",
"description": "Curated override",
"intelligence_score": 18,
"allow_code_generation": True,
"use_openai_response_api": True,
}
]
}
live_data = {
"models": [
{
"model_name": "openai/gpt-5.4",
"aliases": [],
"context_window": 1234,
"max_output_tokens": 5678,
"supports_extended_thinking": False,
"supports_json_mode": False,
"supports_function_calling": False,
"supports_images": False,
"max_image_size_mb": 0.0,
"supports_temperature": True,
"temperature_constraint": "range",
"description": "Live baseline",
"intelligence_score": 10,
"allow_code_generation": False,
"use_openai_response_api": False,
}
]
}
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as curated_file:
json.dump(curated_data, curated_file)
curated_path = curated_file.name
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as live_file:
json.dump(live_data, live_file)
live_path = live_file.name
try:
registry = OpenRouterModelRegistry(config_path=curated_path, live_config_path=live_path)
caps = registry.resolve("gpt5.4")
assert caps is not None
assert caps.model_name == "openai/gpt-5.4"
assert caps.description == "Curated override"
assert caps.context_window == 400000
assert caps.max_output_tokens == 128000
assert caps.supports_function_calling is True
assert caps.supports_temperature is False
assert caps.allow_code_generation is True
assert caps.use_openai_response_api is True
finally:
os.unlink(curated_path)
os.unlink(live_path)
def test_missing_live_catalogue_keeps_curated_models_working(self, monkeypatch):
missing_live_path = Path(tempfile.gettempdir()) / "pal-missing-openrouter-live.json"
if missing_live_path.exists():
missing_live_path.unlink()
monkeypatch.setenv("OPENROUTER_LIVE_MODELS_CONFIG_PATH", str(missing_live_path))
registry = OpenRouterModelRegistry()
assert "openai/o3" in registry.list_models()
assert registry.resolve("o3") is not None
def test_invalid_live_json_keeps_curated_models_working(self):
curated_data = {
"models": [
{
"model_name": "openai/gpt-5.2",
"aliases": ["gpt5.2"],
"context_window": 400000,
"max_output_tokens": 128000,
}
]
}
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as curated_file:
json.dump(curated_data, curated_file)
curated_path = curated_file.name
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as live_file:
live_file.write("{ invalid json }")
live_path = live_file.name
try:
registry = OpenRouterModelRegistry(config_path=curated_path, live_config_path=live_path)
assert "openai/gpt-5.2" in registry.list_models()
assert registry.resolve("gpt5.2") is not None
finally:
os.unlink(curated_path)
os.unlink(live_path)
def test_model_with_all_capabilities(self):
"""Test model with all capability flags."""
from providers.shared import TemperatureConstraint
caps = ModelCapabilities(
provider=ProviderType.OPENROUTER,
model_name="test/full-featured",
friendly_name="OpenRouter (test/full-featured)",
aliases=["full"],
context_window=128000,
max_output_tokens=8192,
supports_extended_thinking=True,
supports_system_prompts=True,
supports_streaming=True,
supports_function_calling=True,
supports_json_mode=True,
description="Fully featured test model",
temperature_constraint=TemperatureConstraint.create("range"),
)
assert caps.context_window == 128000
assert caps.supports_extended_thinking
assert caps.supports_system_prompts
assert caps.supports_streaming
assert caps.supports_function_calling
# Note: supports_json_mode is not in ModelCapabilities yet