feat!: breaking change - OpenRouter models are now read from conf/openrouter_models.json while Custom / Self-hosted models are read from conf/custom_models.json

feat: Azure OpenAI / Azure AI Foundry support. Models should be defined in conf/azure_models.json (or a custom path). See .env.example for environment variables or see readme. https://github.com/BeehiveInnovations/zen-mcp-server/issues/265

feat: OpenRouter / Custom Models / Azure can separately also use custom config paths now (see .env.example )

refactor: Model registry class made abstract, OpenRouter / Custom Provider / Azure OpenAI now subclass these

refactor: breaking change: `is_custom` property has been removed from model_capabilities.py (and thus custom_models.json) given each models are now read from separate configuration files
This commit is contained in:
Fahad
2025-10-04 21:10:56 +04:00
parent e91ed2a924
commit ff9a07a37a
40 changed files with 1651 additions and 852 deletions

View File

@@ -377,6 +377,7 @@ def configure_providers():
value = get_env(key)
logger.debug(f" {key}: {'[PRESENT]' if value else '[MISSING]'}")
from providers import ModelProviderRegistry
from providers.azure_openai import AzureOpenAIProvider
from providers.custom import CustomProvider
from providers.dial import DIALModelProvider
from providers.gemini import GeminiModelProvider
@@ -411,6 +412,27 @@ def configure_providers():
else:
logger.debug("OpenAI API key is placeholder value")
# Check for Azure OpenAI configuration
azure_key = get_env("AZURE_OPENAI_API_KEY")
azure_endpoint = get_env("AZURE_OPENAI_ENDPOINT")
azure_models_available = False
if azure_key and azure_key != "your_azure_openai_key_here" and azure_endpoint:
try:
from providers.azure_registry import AzureModelRegistry
azure_registry = AzureModelRegistry()
if azure_registry.list_models():
valid_providers.append("Azure OpenAI")
has_native_apis = True
azure_models_available = True
logger.info("Azure OpenAI configuration detected")
else:
logger.warning(
"Azure OpenAI models configuration is empty. Populate conf/azure_models.json or set AZURE_MODELS_CONFIG_PATH."
)
except Exception as exc:
logger.warning(f"Failed to load Azure OpenAI models: {exc}")
# Check for X.AI API key
xai_key = get_env("XAI_API_KEY")
if xai_key and xai_key != "your_xai_api_key_here":
@@ -468,6 +490,10 @@ def configure_providers():
ModelProviderRegistry.register_provider(ProviderType.OPENAI, OpenAIModelProvider)
registered_providers.append(ProviderType.OPENAI.value)
logger.debug(f"Registered provider: {ProviderType.OPENAI.value}")
if azure_models_available:
ModelProviderRegistry.register_provider(ProviderType.AZURE, AzureOpenAIProvider)
registered_providers.append(ProviderType.AZURE.value)
logger.debug(f"Registered provider: {ProviderType.AZURE.value}")
if xai_key and xai_key != "your_xai_api_key_here":
ModelProviderRegistry.register_provider(ProviderType.XAI, XAIModelProvider)
registered_providers.append(ProviderType.XAI.value)