{ "_README": { "description": "Generated baseline OpenCode Zen catalogue for PAL MCP Server.", "source": "https://opencode.ai/zen/v1/models", "usage": "Generated by scripts/sync_zen_models.py. Curated overrides belong in conf/zen_models.json.", "field_notes": "Entries are conservative discovery data. Curated manifest values override these at runtime." }, "models": [ { "model_name": "big-pickle", "aliases": [], "context_window": 200000, "max_output_tokens": 32000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Big Pickle via OpenCode Zen - Stealth model for coding tasks", "intelligence_score": 13, "allow_code_generation": true }, { "model_name": "claude-3-5-haiku", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model claude-3-5-haiku.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "claude-haiku-4-5", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 5.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Claude Haiku 4.5 via OpenCode Zen - Fast and efficient for coding tasks", "intelligence_score": 16, "allow_code_generation": true }, { "model_name": "claude-opus-4-1", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model claude-opus-4-1.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "claude-opus-4-5", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 5.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Claude Opus 4.5 via OpenCode Zen - Anthropic's frontier reasoning model for complex software engineering", "intelligence_score": 18, "allow_code_generation": true }, { "model_name": "claude-opus-4-6", "aliases": [], "context_window": 1000000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model claude-opus-4-6.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "claude-sonnet-4", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model claude-sonnet-4.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "claude-sonnet-4-5", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 5.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Claude Sonnet 4.5 via OpenCode Zen - Balanced performance for coding and general tasks", "intelligence_score": 17, "allow_code_generation": true }, { "model_name": "claude-sonnet-4-6", "aliases": [], "context_window": 1000000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model claude-sonnet-4-6.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "gemini-3-flash", "aliases": [], "context_window": 1048576, "max_output_tokens": 65536, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gemini-3-flash.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "gemini-3-pro", "aliases": [], "context_window": 1000000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 10.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Gemini 3 Pro via OpenCode Zen - Google's multimodal model with large context", "intelligence_score": 16, "allow_code_generation": true }, { "model_name": "gemini-3.1-pro", "aliases": [], "context_window": 1048576, "max_output_tokens": 65536, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gemini-3.1-pro.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "glm-4.6", "aliases": [], "context_window": 205000, "max_output_tokens": 32000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "GLM 4.6 via OpenCode Zen - High-performance model for coding and reasoning", "intelligence_score": 15, "allow_code_generation": true }, { "model_name": "glm-4.7", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model glm-4.7.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "glm-5", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model glm-5.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "gpt-5", "aliases": [], "context_window": 400000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gpt-5.", "intelligence_score": 10, "allow_code_generation": false, "use_openai_response_api": true }, { "model_name": "gpt-5-codex", "aliases": [], "context_window": 400000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gpt-5-codex.", "intelligence_score": 10, "allow_code_generation": false, "use_openai_response_api": true }, { "model_name": "gpt-5-nano", "aliases": [], "context_window": 400000, "max_output_tokens": 32000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "GPT 5 Nano via OpenCode Zen - Lightweight GPT model", "intelligence_score": 12, "allow_code_generation": true }, { "model_name": "gpt-5.1", "aliases": [], "context_window": 400000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "GPT 5.1 via OpenCode Zen - Latest GPT model for general AI tasks", "intelligence_score": 16, "allow_code_generation": true, "use_openai_response_api": true }, { "model_name": "gpt-5.1-codex", "aliases": [], "context_window": 400000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "GPT 5.1 Codex via OpenCode Zen - Specialized for code generation and understanding", "intelligence_score": 17, "allow_code_generation": true, "use_openai_response_api": true }, { "model_name": "gpt-5.1-codex-max", "aliases": [], "context_window": 400000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gpt-5.1-codex-max.", "intelligence_score": 10, "allow_code_generation": false, "use_openai_response_api": true }, { "model_name": "gpt-5.1-codex-mini", "aliases": [], "context_window": 400000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gpt-5.1-codex-mini.", "intelligence_score": 10, "allow_code_generation": false, "use_openai_response_api": true }, { "model_name": "gpt-5.2", "aliases": [], "context_window": 400000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gpt-5.2.", "intelligence_score": 10, "allow_code_generation": false, "use_openai_response_api": true }, { "model_name": "gpt-5.2-codex", "aliases": [], "context_window": 400000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gpt-5.2-codex.", "intelligence_score": 10, "allow_code_generation": false, "use_openai_response_api": true }, { "model_name": "gpt-5.3-codex", "aliases": [], "context_window": 400000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gpt-5.3-codex.", "intelligence_score": 10, "allow_code_generation": false, "use_openai_response_api": true }, { "model_name": "gpt-5.3-codex-spark", "aliases": [], "context_window": 400000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gpt-5.3-codex-spark.", "intelligence_score": 10, "allow_code_generation": false, "use_openai_response_api": true }, { "model_name": "gpt-5.4", "aliases": [], "context_window": 1050000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gpt-5.4.", "intelligence_score": 10, "allow_code_generation": false, "use_openai_response_api": true }, { "model_name": "gpt-5.4-mini", "aliases": [], "context_window": 400000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gpt-5.4-mini.", "intelligence_score": 10, "allow_code_generation": false, "use_openai_response_api": true }, { "model_name": "gpt-5.4-nano", "aliases": [], "context_window": 400000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gpt-5.4-nano.", "intelligence_score": 10, "allow_code_generation": false, "use_openai_response_api": true }, { "model_name": "gpt-5.4-pro", "aliases": [], "context_window": 1050000, "max_output_tokens": 128000, "supports_extended_thinking": true, "supports_json_mode": true, "supports_function_calling": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model gpt-5.4-pro.", "intelligence_score": 10, "allow_code_generation": false, "use_openai_response_api": true }, { "model_name": "kimi-k2", "aliases": [], "context_window": 400000, "max_output_tokens": 32000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Kimi K2 via OpenCode Zen - Advanced reasoning model", "intelligence_score": 15, "allow_code_generation": true }, { "model_name": "kimi-k2-thinking", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model kimi-k2-thinking.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "kimi-k2.5", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model kimi-k2.5.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "mimo-v2-flash-free", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model mimo-v2-flash-free.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "mimo-v2-omni-free", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model mimo-v2-omni-free.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "mimo-v2-pro-free", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model mimo-v2-pro-free.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "minimax-m2.1", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model minimax-m2.1.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "minimax-m2.5", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model minimax-m2.5.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "minimax-m2.5-free", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model minimax-m2.5-free.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "nemotron-3-super-free", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model nemotron-3-super-free.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "qwen3.6-plus-free", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model qwen3.6-plus-free.", "intelligence_score": 10, "allow_code_generation": false }, { "model_name": "trinity-large-preview-free", "aliases": [], "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_json_mode": true, "supports_function_calling": true, "supports_images": false, "max_image_size_mb": 0.0, "supports_temperature": true, "temperature_constraint": "range", "description": "Generated baseline metadata for OpenCode Zen model trinity-large-preview-free.", "intelligence_score": 10, "allow_code_generation": false } ] }