diff --git a/providers/openai_provider.py b/providers/openai_provider.py index dc0159a..2d3c0cd 100644 --- a/providers/openai_provider.py +++ b/providers/openai_provider.py @@ -53,11 +53,29 @@ class OpenAIModelProvider(OpenAICompatibleProvider): supports_json_mode=True, supports_images=True, # GPT-5-mini supports vision max_image_size_mb=20.0, # 20MB per OpenAI docs - supports_temperature=True, # Regular models accept temperature parameter + supports_temperature=True, temperature_constraint=create_temperature_constraint("fixed"), description="GPT-5-mini (400K context, 128K output) - Efficient variant with reasoning support", aliases=["gpt5-mini", "gpt5mini", "mini"], ), + "gpt-5-nano": ModelCapabilities( + provider=ProviderType.OPENAI, + model_name="gpt-5-nano", + friendly_name="OpenAI (GPT-5 nano)", + context_window=400_000, + max_output_tokens=128_000, + supports_extended_thinking=True, + supports_system_prompts=True, + supports_streaming=True, + supports_function_calling=True, + supports_json_mode=True, + supports_images=True, + max_image_size_mb=20.0, + supports_temperature=True, + temperature_constraint=create_temperature_constraint("fixed"), + description="GPT-5 nano (400K context) - Fastest, cheapest version of GPT-5 for summarization and classification tasks", + aliases=["gpt5nano", "gpt5-nano", "nano"], + ), "o3": ModelCapabilities( provider=ProviderType.OPENAI, model_name="o3",