Final cleanup

This commit is contained in:
Fahad
2025-06-13 07:12:29 +04:00
parent 5df3c97fd3
commit 0e36fcbc69
6 changed files with 153 additions and 133 deletions

View File

@@ -46,8 +46,14 @@ class O3ModelSelectionTest(BaseSimulatorTest):
self.logger.info(" Test: O3 model selection and usage validation") self.logger.info(" Test: O3 model selection and usage validation")
# Check which API keys are configured # Check which API keys are configured
check_cmd = ["docker", "exec", self.container_name, "python", "-c", check_cmd = [
"import os; print(f'OPENAI_KEY:{bool(os.environ.get(\"OPENAI_API_KEY\"))}|OPENROUTER_KEY:{bool(os.environ.get(\"OPENROUTER_API_KEY\"))}')"] "docker",
"exec",
self.container_name,
"python",
"-c",
'import os; print(f\'OPENAI_KEY:{bool(os.environ.get("OPENAI_API_KEY"))}|OPENROUTER_KEY:{bool(os.environ.get("OPENROUTER_API_KEY"))}\')',
]
result = subprocess.run(check_cmd, capture_output=True, text=True) result = subprocess.run(check_cmd, capture_output=True, text=True)
has_openai = False has_openai = False
@@ -289,13 +295,21 @@ def multiply(x, y):
logs = self.get_recent_server_logs() logs = self.get_recent_server_logs()
# Check for OpenRouter API calls # Check for OpenRouter API calls
openrouter_api_logs = [line for line in logs.split("\n") if "openrouter" in line.lower() and ("API" in line or "request" in line)] openrouter_api_logs = [
line
for line in logs.split("\n")
if "openrouter" in line.lower() and ("API" in line or "request" in line)
]
# Check for model resolution through OpenRouter # Check for model resolution through OpenRouter
openrouter_model_logs = [line for line in logs.split("\n") if "openrouter" in line.lower() and ("o3" in line or "model" in line)] openrouter_model_logs = [
line for line in logs.split("\n") if "openrouter" in line.lower() and ("o3" in line or "model" in line)
]
# Check for successful responses # Check for successful responses
openrouter_response_logs = [line for line in logs.split("\n") if "openrouter" in line.lower() and "response" in line] openrouter_response_logs = [
line for line in logs.split("\n") if "openrouter" in line.lower() and "response" in line
]
self.logger.info(f" OpenRouter API logs: {len(openrouter_api_logs)}") self.logger.info(f" OpenRouter API logs: {len(openrouter_api_logs)}")
self.logger.info(f" OpenRouter model logs: {len(openrouter_model_logs)}") self.logger.info(f" OpenRouter model logs: {len(openrouter_model_logs)}")

View File

@@ -8,7 +8,6 @@ Tests that verify the system correctly falls back to OpenRouter when:
- Auto mode correctly selects OpenRouter models - Auto mode correctly selects OpenRouter models
""" """
import json
import subprocess import subprocess
from .base_test import BaseSimulatorTest from .base_test import BaseSimulatorTest
@@ -45,6 +44,22 @@ class OpenRouterFallbackTest(BaseSimulatorTest):
try: try:
self.logger.info("Test: OpenRouter fallback behavior when only provider available") self.logger.info("Test: OpenRouter fallback behavior when only provider available")
# Check if OpenRouter API key is configured
check_cmd = [
"docker",
"exec",
self.container_name,
"python",
"-c",
'import os; print("OPENROUTER_KEY:" + str(bool(os.environ.get("OPENROUTER_API_KEY"))))',
]
result = subprocess.run(check_cmd, capture_output=True, text=True)
if result.returncode == 0 and "OPENROUTER_KEY:False" in result.stdout:
self.logger.info(" ⚠️ OpenRouter API key not configured - skipping test")
self.logger.info(" This test requires OPENROUTER_API_KEY to be set in .env")
return True # Return True to indicate test is skipped, not failed
# Setup test files # Setup test files
self.setup_test_files() self.setup_test_files()
@@ -137,28 +152,29 @@ class OpenRouterFallbackTest(BaseSimulatorTest):
# Check for provider fallback logs # Check for provider fallback logs
fallback_logs = [ fallback_logs = [
line for line in logs.split("\n") line
if "No Gemini API key found" in line or for line in logs.split("\n")
"No OpenAI API key found" in line or if "No Gemini API key found" in line
"Only OpenRouter available" in line or or "No OpenAI API key found" in line
"Using OpenRouter" in line or "Only OpenRouter available" in line
or "Using OpenRouter" in line
] ]
# Check for OpenRouter provider initialization # Check for OpenRouter provider initialization
provider_logs = [ provider_logs = [
line for line in logs.split("\n") line
if "OpenRouter provider" in line or for line in logs.split("\n")
"OpenRouterProvider" in line or if "OpenRouter provider" in line or "OpenRouterProvider" in line or "openrouter.ai/api/v1" in line
"openrouter.ai/api/v1" in line
] ]
# Check for model resolution through OpenRouter # Check for model resolution through OpenRouter
model_resolution_logs = [ model_resolution_logs = [
line for line in logs.split("\n") line
if ("Resolved model" in line and "via OpenRouter" in line) or for line in logs.split("\n")
("Model alias" in line and "resolved to" in line) or if ("Resolved model" in line and "via OpenRouter" in line)
("flash" in line and "gemini-flash" in line) or or ("Model alias" in line and "resolved to" in line)
("pro" in line and "gemini-pro" in line) or ("flash" in line and "gemini-flash" in line)
or ("pro" in line and "gemini-pro" in line)
] ]
# Log findings # Log findings

View File

@@ -9,7 +9,6 @@ Tests that verify OpenRouter functionality including:
- Error handling when models are not available - Error handling when models are not available
""" """
import json
import subprocess import subprocess
from .base_test import BaseSimulatorTest from .base_test import BaseSimulatorTest
@@ -47,6 +46,22 @@ class OpenRouterModelsTest(BaseSimulatorTest):
try: try:
self.logger.info("Test: OpenRouter model functionality and alias mapping") self.logger.info("Test: OpenRouter model functionality and alias mapping")
# Check if OpenRouter API key is configured
check_cmd = [
"docker",
"exec",
self.container_name,
"python",
"-c",
'import os; print("OPENROUTER_KEY:" + str(bool(os.environ.get("OPENROUTER_API_KEY"))))',
]
result = subprocess.run(check_cmd, capture_output=True, text=True)
if result.returncode == 0 and "OPENROUTER_KEY:False" in result.stdout:
self.logger.info(" ⚠️ OpenRouter API key not configured - skipping test")
self.logger.info(" This test requires OPENROUTER_API_KEY to be set in .env")
return True # Return True to indicate test is skipped, not failed
# Setup test files for later use # Setup test files for later use
self.setup_test_files() self.setup_test_files()
@@ -189,15 +204,17 @@ class OpenRouterModelsTest(BaseSimulatorTest):
# Check for specific model mappings # Check for specific model mappings
flash_mapping_logs = [ flash_mapping_logs = [
line for line in logs.split("\n") line
if ("flash" in line and "google/gemini-flash" in line) or for line in logs.split("\n")
("Resolved model" in line and "google/gemini-flash" in line) if ("flash" in line and "google/gemini-flash" in line)
or ("Resolved model" in line and "google/gemini-flash" in line)
] ]
pro_mapping_logs = [ pro_mapping_logs = [
line for line in logs.split("\n") line
if ("pro" in line and "google/gemini-pro" in line) or for line in logs.split("\n")
("Resolved model" in line and "google/gemini-pro" in line) if ("pro" in line and "google/gemini-pro" in line)
or ("Resolved model" in line and "google/gemini-pro" in line)
] ]
# Log findings # Log findings

View File

@@ -4,29 +4,21 @@ Simple test script to demonstrate model mapping through the MCP server.
Tests how model aliases (flash, pro, o3) are mapped to OpenRouter models. Tests how model aliases (flash, pro, o3) are mapped to OpenRouter models.
""" """
import subprocess
import json import json
import subprocess
import sys import sys
from typing import Dict, Any from typing import Any
def call_mcp_server(model: str, message: str = "Hello, which model are you?") -> Dict[str, Any]:
def call_mcp_server(model: str, message: str = "Hello, which model are you?") -> dict[str, Any]:
"""Call the MCP server with a specific model and return the response.""" """Call the MCP server with a specific model and return the response."""
# Prepare the request # Prepare the request
request = { request = {
"jsonrpc": "2.0", "jsonrpc": "2.0",
"method": "completion", "method": "completion",
"params": { "params": {"model": model, "messages": [{"role": "user", "content": message}], "max_tokens": 100},
"model": model, "id": 1,
"messages": [
{
"role": "user",
"content": message
}
],
"max_tokens": 100
},
"id": 1
} }
# Call the server # Call the server
@@ -35,55 +27,36 @@ def call_mcp_server(model: str, message: str = "Hello, which model are you?") ->
try: try:
# Send request to stdin and capture output # Send request to stdin and capture output
process = subprocess.Popen( process = subprocess.Popen(
cmd, cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True
) )
stdout, stderr = process.communicate(input=json.dumps(request)) stdout, stderr = process.communicate(input=json.dumps(request))
if process.returncode != 0: if process.returncode != 0:
return { return {"error": f"Server returned non-zero exit code: {process.returncode}", "stderr": stderr}
"error": f"Server returned non-zero exit code: {process.returncode}",
"stderr": stderr
}
# Parse the response # Parse the response
try: try:
response = json.loads(stdout) response = json.loads(stdout)
return response return response
except json.JSONDecodeError: except json.JSONDecodeError:
return { return {"error": "Failed to parse JSON response", "stdout": stdout, "stderr": stderr}
"error": "Failed to parse JSON response",
"stdout": stdout,
"stderr": stderr
}
except Exception as e: except Exception as e:
return { return {"error": f"Failed to call server: {str(e)}"}
"error": f"Failed to call server: {str(e)}"
}
def extract_model_info(response: Dict[str, Any]) -> Dict[str, str]:
def extract_model_info(response: dict[str, Any]) -> dict[str, str]:
"""Extract model information from the response.""" """Extract model information from the response."""
if "error" in response: if "error" in response:
return { return {"status": "error", "message": response.get("error", "Unknown error")}
"status": "error",
"message": response.get("error", "Unknown error")
}
# Look for result in the response # Look for result in the response
result = response.get("result", {}) result = response.get("result", {})
# Extract relevant information # Extract relevant information
info = { info = {"status": "success", "provider": "unknown", "model": "unknown"}
"status": "success",
"provider": "unknown",
"model": "unknown"
}
# Try to find provider and model info in the response # Try to find provider and model info in the response
# This might be in metadata or debug info depending on server implementation # This might be in metadata or debug info depending on server implementation
@@ -101,6 +74,7 @@ def extract_model_info(response: Dict[str, Any]) -> Dict[str, str]:
return info return info
def main(): def main():
"""Test model mapping for different aliases.""" """Test model mapping for different aliases."""
@@ -134,5 +108,6 @@ def main():
print("\nNote: This test assumes the MCP server is configured with OpenRouter.") print("\nNote: This test assumes the MCP server is configured with OpenRouter.")
print("The actual model mappings depend on the server configuration.") print("The actual model mappings depend on the server configuration.")
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -193,5 +193,5 @@ class TestOpenRouterFunctionality:
provider = OpenRouterProvider(api_key="test-key") provider = OpenRouterProvider(api_key="test-key")
# Registry should be initialized # Registry should be initialized
assert hasattr(provider, '_registry') assert hasattr(provider, "_registry")
assert provider._registry is not None assert provider._registry is not None

View File

@@ -195,9 +195,7 @@ class BaseTool(ABC):
# Show all aliases so Claude knows every option available # Show all aliases so Claude knows every option available
all_aliases = sorted(aliases) all_aliases = sorted(aliases)
alias_list = ", ".join(f"'{a}'" for a in all_aliases) alias_list = ", ".join(f"'{a}'" for a in all_aliases)
model_desc_parts.append( model_desc_parts.append(f"\nOpenRouter models available via aliases: {alias_list}")
f"\nOpenRouter models available via aliases: {alias_list}"
)
else: else:
model_desc_parts.append( model_desc_parts.append(
"\nOpenRouter models: If configured, you can also use ANY model available on OpenRouter." "\nOpenRouter models: If configured, you can also use ANY model available on OpenRouter."