feat: Azure OpenAI / Azure AI Foundry support. Models should be defined in conf/azure_models.json (or a custom path). See .env.example for environment variables or see readme. https://github.com/BeehiveInnovations/zen-mcp-server/issues/265 feat: OpenRouter / Custom Models / Azure can separately also use custom config paths now (see .env.example ) refactor: Model registry class made abstract, OpenRouter / Custom Provider / Azure OpenAI now subclass these refactor: breaking change: `is_custom` property has been removed from model_capabilities.py (and thus custom_models.json) given each models are now read from separate configuration files
27 lines
1.1 KiB
Python
27 lines
1.1 KiB
Python
"""Registry for models exposed via custom (local) OpenAI-compatible endpoints."""
|
|
|
|
from __future__ import annotations
|
|
|
|
from .model_registry_base import CAPABILITY_FIELD_NAMES, CapabilityModelRegistry
|
|
from .shared import ModelCapabilities, ProviderType
|
|
|
|
|
|
class CustomEndpointModelRegistry(CapabilityModelRegistry):
|
|
def __init__(self, config_path: str | None = None) -> None:
|
|
super().__init__(
|
|
env_var_name="CUSTOM_MODELS_CONFIG_PATH",
|
|
default_filename="custom_models.json",
|
|
provider=ProviderType.CUSTOM,
|
|
friendly_prefix="Custom ({model})",
|
|
config_path=config_path,
|
|
)
|
|
self.reload()
|
|
|
|
def _finalise_entry(self, entry: dict) -> tuple[ModelCapabilities, dict]:
|
|
entry["provider"] = ProviderType.CUSTOM
|
|
entry.setdefault("friendly_name", f"Custom ({entry['model_name']})")
|
|
filtered = {k: v for k, v in entry.items() if k in CAPABILITY_FIELD_NAMES}
|
|
filtered.setdefault("provider", ProviderType.CUSTOM)
|
|
capability = ModelCapabilities(**filtered)
|
|
return capability, {}
|