Files
my-pal-mcp-server/tests/test_sync_openrouter_models.py
2026-04-01 11:41:53 +02:00

54 lines
2.0 KiB
Python

import importlib.util
from pathlib import Path
SCRIPT_PATH = Path(__file__).resolve().parents[1] / "scripts" / "sync_openrouter_models.py"
SPEC = importlib.util.spec_from_file_location("sync_openrouter_models", SCRIPT_PATH)
assert SPEC is not None and SPEC.loader is not None
MODULE = importlib.util.module_from_spec(SPEC)
SPEC.loader.exec_module(MODULE)
def test_convert_model_maps_openrouter_payload_conservatively():
converted = MODULE.convert_model(
{
"id": "openai/gpt-5.4",
"description": "GPT-5.4 description",
"context_length": 400000,
"top_provider": {"max_completion_tokens": 128000},
"architecture": {"input_modalities": ["text", "image"]},
"supported_parameters": ["temperature", "reasoning", "response_format", "structured_outputs"],
}
)
assert converted is not None
assert converted["model_name"] == "openai/gpt-5.4"
assert converted["context_window"] == 400000
assert converted["max_output_tokens"] == 128000
assert converted["supports_extended_thinking"] is True
assert converted["supports_json_mode"] is True
assert converted["supports_function_calling"] is False
assert converted["supports_images"] is True
assert converted["max_image_size_mb"] == 20.0
assert converted["supports_temperature"] is True
assert converted["temperature_constraint"] == "range"
assert converted["allow_code_generation"] is False
def test_convert_model_marks_reasoning_models_without_temperature_as_fixed():
converted = MODULE.convert_model(
{
"id": "openai/o3",
"description": "o3 description",
"context_length": 200000,
"top_provider": {},
"architecture": {"input_modalities": ["text"]},
"supported_parameters": ["reasoning"],
}
)
assert converted is not None
assert converted["supports_temperature"] is False
assert converted["temperature_constraint"] == "fixed"
assert converted["supports_images"] is False