feat: added intelligence_score to the model capabilities schema; a 1-20 number that can be specified to influence the sort order of models presented to the CLI in auto selection mode
fix: model definition re-introduced into the schema but intelligently and only a summary is generated per tool. Required to ensure CLI calls and uses the correct model fix: removed `model` param from some tools where this wasn't needed fix: fixed adherence to `*_ALLOWED_MODELS` by advertising only the allowed models to the CLI fix: removed duplicates across providers when passing canonical names back to the CLI; the first enabled provider wins
This commit is contained in:
@@ -26,6 +26,7 @@ from utils.conversation_memory import (
|
||||
get_conversation_image_list,
|
||||
get_thread,
|
||||
)
|
||||
from utils.model_context import ModelContext
|
||||
|
||||
|
||||
@pytest.mark.no_mock_provider
|
||||
@@ -180,17 +181,18 @@ class TestImageSupportIntegration:
|
||||
|
||||
try:
|
||||
# Test with an invalid model name that doesn't exist in any provider
|
||||
result = tool._validate_image_limits(small_images, "non-existent-model-12345")
|
||||
# Use model_context parameter name (not positional)
|
||||
result = tool._validate_image_limits(small_images, model_context=ModelContext("non-existent-model-12345"))
|
||||
# Should return error because model not available or doesn't support images
|
||||
assert result is not None
|
||||
assert result["status"] == "error"
|
||||
assert "is not available" in result["content"] or "does not support image processing" in result["content"]
|
||||
|
||||
# Test that empty/None images always pass regardless of model
|
||||
result = tool._validate_image_limits([], "any-model")
|
||||
result = tool._validate_image_limits([], model_context=ModelContext("gemini-2.5-pro"))
|
||||
assert result is None
|
||||
|
||||
result = tool._validate_image_limits(None, "any-model")
|
||||
result = tool._validate_image_limits(None, model_context=ModelContext("gemini-2.5-pro"))
|
||||
assert result is None
|
||||
|
||||
finally:
|
||||
@@ -215,7 +217,7 @@ class TestImageSupportIntegration:
|
||||
small_image_path = temp_file.name
|
||||
|
||||
# Test with the default model from test environment (gemini-2.5-flash)
|
||||
result = tool._validate_image_limits([small_image_path], "gemini-2.5-flash")
|
||||
result = tool._validate_image_limits([small_image_path], ModelContext("gemini-2.5-flash"))
|
||||
assert result is None # Should pass for Gemini models
|
||||
|
||||
# Create 150MB image (over typical limits)
|
||||
@@ -223,7 +225,7 @@ class TestImageSupportIntegration:
|
||||
temp_file.write(b"\x00" * (150 * 1024 * 1024)) # 150MB
|
||||
large_image_path = temp_file.name
|
||||
|
||||
result = tool._validate_image_limits([large_image_path], "gemini-2.5-flash")
|
||||
result = tool._validate_image_limits([large_image_path], ModelContext("gemini-2.5-flash"))
|
||||
# Large images should fail validation
|
||||
assert result is not None
|
||||
assert result["status"] == "error"
|
||||
@@ -429,14 +431,14 @@ class TestImageSupportIntegration:
|
||||
images = [data_url]
|
||||
|
||||
# Test with a dummy model that doesn't exist in any provider
|
||||
result = tool._validate_image_limits(images, "test-dummy-model-name")
|
||||
result = tool._validate_image_limits(images, ModelContext("test-dummy-model-name"))
|
||||
# Should return error because model not available or doesn't support images
|
||||
assert result is not None
|
||||
assert result["status"] == "error"
|
||||
assert "is not available" in result["content"] or "does not support image processing" in result["content"]
|
||||
|
||||
# Test with another non-existent model to check error handling
|
||||
result = tool._validate_image_limits(images, "another-dummy-model")
|
||||
result = tool._validate_image_limits(images, ModelContext("another-dummy-model"))
|
||||
# Should return error because model not available
|
||||
assert result is not None
|
||||
assert result["status"] == "error"
|
||||
@@ -446,11 +448,11 @@ class TestImageSupportIntegration:
|
||||
tool = ChatTool()
|
||||
|
||||
# Empty list should not fail validation (no need for provider setup)
|
||||
result = tool._validate_image_limits([], "test_model")
|
||||
result = tool._validate_image_limits([], ModelContext("gemini-2.5-pro"))
|
||||
assert result is None
|
||||
|
||||
# None should not fail validation (no need for provider setup)
|
||||
result = tool._validate_image_limits(None, "test_model")
|
||||
result = tool._validate_image_limits(None, ModelContext("gemini-2.5-pro"))
|
||||
assert result is None
|
||||
|
||||
@patch("utils.conversation_memory.get_storage")
|
||||
|
||||
Reference in New Issue
Block a user