feat(zen): add live model sync

This commit is contained in:
Torbjørn Lindahl
2026-04-01 23:48:16 +02:00
parent 65567ec40e
commit 7ef476cfbd
7 changed files with 1300 additions and 9 deletions

View File

@@ -3,6 +3,7 @@
import json
import os
import tempfile
from pathlib import Path
from unittest.mock import patch
from providers.registries.zen import ZenModelRegistry
@@ -43,14 +44,19 @@ class TestZenModelRegistry:
json.dump(config_data, f)
temp_path = f.name
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as live_file:
json.dump({"models": []}, live_file)
live_path = live_file.name
try:
registry = ZenModelRegistry(config_path=temp_path)
registry = ZenModelRegistry(config_path=temp_path, live_config_path=live_path)
assert len(registry.list_models()) == 1
assert "test/zen-model-1" in registry.list_models()
assert "zen-test1" in registry.list_aliases()
assert "zt1" in registry.list_aliases()
finally:
os.unlink(temp_path)
os.unlink(live_path)
def test_get_capabilities(self):
"""Test capability retrieval."""
@@ -158,9 +164,172 @@ class TestZenModelRegistry:
json.dump(empty_config, f)
temp_path = f.name
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as live_file:
json.dump({"models": []}, live_file)
live_path = live_file.name
try:
registry = ZenModelRegistry(config_path=temp_path)
registry = ZenModelRegistry(config_path=temp_path, live_config_path=live_path)
assert len(registry.list_models()) == 0
assert len(registry.list_aliases()) == 0
finally:
os.unlink(temp_path)
os.unlink(live_path)
def test_live_catalogue_adds_unsynced_model_ids(self):
curated_data = {
"models": [
{
"model_name": "gpt-5.1",
"aliases": ["zen-gpt5.1"],
"context_window": 400000,
"max_output_tokens": 64000,
"intelligence_score": 16,
}
]
}
live_data = {
"models": [
{
"model_name": "gpt-5.4",
"aliases": [],
"context_window": 1050000,
"max_output_tokens": 128000,
"supports_extended_thinking": True,
"supports_json_mode": True,
"supports_function_calling": True,
"supports_images": True,
"max_image_size_mb": 20.0,
"supports_temperature": True,
"temperature_constraint": "range",
"description": "Live-only GPT-5.4 entry",
"use_openai_response_api": True,
}
]
}
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as curated_file:
json.dump(curated_data, curated_file)
curated_path = curated_file.name
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as live_file:
json.dump(live_data, live_file)
live_path = live_file.name
try:
registry = ZenModelRegistry(config_path=curated_path, live_config_path=live_path)
assert "gpt-5.4" in registry.list_models()
caps = registry.resolve("gpt-5.4")
assert caps is not None
assert caps.description == "Live-only GPT-5.4 entry"
assert caps.use_openai_response_api is True
finally:
os.unlink(curated_path)
os.unlink(live_path)
def test_curated_manifest_overrides_live_metadata(self):
curated_data = {
"models": [
{
"model_name": "gpt-5.4",
"aliases": ["zen-gpt5.4"],
"context_window": 1050000,
"max_output_tokens": 128000,
"supports_extended_thinking": True,
"supports_json_mode": True,
"supports_function_calling": True,
"supports_images": True,
"max_image_size_mb": 20.0,
"supports_temperature": False,
"temperature_constraint": "fixed",
"description": "Curated override",
"intelligence_score": 19,
"allow_code_generation": True,
"use_openai_response_api": True,
}
]
}
live_data = {
"models": [
{
"model_name": "gpt-5.4",
"aliases": [],
"context_window": 400000,
"max_output_tokens": 64000,
"supports_extended_thinking": False,
"supports_json_mode": True,
"supports_function_calling": True,
"supports_images": False,
"max_image_size_mb": 0.0,
"supports_temperature": True,
"temperature_constraint": "range",
"description": "Live baseline",
"intelligence_score": 10,
"allow_code_generation": False,
}
]
}
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as curated_file:
json.dump(curated_data, curated_file)
curated_path = curated_file.name
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as live_file:
json.dump(live_data, live_file)
live_path = live_file.name
try:
registry = ZenModelRegistry(config_path=curated_path, live_config_path=live_path)
caps = registry.resolve("zen-gpt5.4")
assert caps is not None
assert caps.model_name == "gpt-5.4"
assert caps.description == "Curated override"
assert caps.context_window == 1050000
assert caps.max_output_tokens == 128000
assert caps.supports_images is True
assert caps.supports_temperature is False
assert caps.allow_code_generation is True
assert caps.use_openai_response_api is True
finally:
os.unlink(curated_path)
os.unlink(live_path)
def test_missing_live_catalogue_keeps_curated_models_working(self, monkeypatch):
missing_live_path = Path(tempfile.gettempdir()) / "pal-missing-zen-live.json"
if missing_live_path.exists():
missing_live_path.unlink()
monkeypatch.setenv("ZEN_LIVE_MODELS_CONFIG_PATH", str(missing_live_path))
registry = ZenModelRegistry()
assert "gpt-5.1" in registry.list_models()
assert registry.resolve("zen-gpt5.1") is not None
def test_invalid_live_json_keeps_curated_models_working(self):
curated_data = {
"models": [
{
"model_name": "gpt-5.1",
"aliases": ["zen-gpt5.1"],
"context_window": 400000,
"max_output_tokens": 64000,
"intelligence_score": 16,
}
]
}
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as curated_file:
json.dump(curated_data, curated_file)
curated_path = curated_file.name
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as live_file:
live_file.write("{ invalid json }")
live_path = live_file.name
try:
registry = ZenModelRegistry(config_path=curated_path, live_config_path=live_path)
assert "gpt-5.1" in registry.list_models()
assert registry.resolve("zen-gpt5.1") is not None
finally:
os.unlink(curated_path)
os.unlink(live_path)