feat!: Full code can now be generated by an external model and shared with the AI tool (Claude Code / Codex etc)!

model definitions now support a new `allow_code_generation` flag, only to be used with higher reasoning models such as GPT-5-Pro and-Gemini 2.5-Pro

 When `true`, the `chat` tool can now request the external model to generate a full implementation / update / instructions etc and then share the implementation with the calling agent.

 This effectively allows us to utilize more powerful models such as GPT-5-Pro to generate code for us or entire implementations (which are either API-only or part of the $200 Pro plan from within the ChatGPT app)
This commit is contained in:
Fahad
2025-10-07 18:49:13 +04:00
parent 04f7ce5b03
commit ece8a5ebed
29 changed files with 1008 additions and 122 deletions

View File

@@ -52,6 +52,9 @@ from tools.simple.base import SimpleTool
class ChatRequest(ToolRequest):
prompt: str = Field(..., description="Your question or idea.")
files: list[str] | None = Field(default_factory=list)
working_directory: str = Field(
..., description="Absolute full directory path where the assistant AI can save generated code for implementation."
)
class ChatTool(SimpleTool):
def get_name(self) -> str: # required by BaseTool
@@ -67,10 +70,17 @@ class ChatTool(SimpleTool):
return ChatRequest
def get_tool_fields(self) -> dict[str, dict[str, object]]:
return {"prompt": {"type": "string", "description": "Your question."}, "files": SimpleTool.FILES_FIELD}
return {
"prompt": {"type": "string", "description": "Your question."},
"files": SimpleTool.FILES_FIELD,
"working_directory": {
"type": "string",
"description": "Absolute full directory path where the assistant AI can save generated code for implementation.",
},
}
def get_required_fields(self) -> list[str]:
return ["prompt"]
return ["prompt", "working_directory"]
async def prepare_prompt(self, request: ChatRequest) -> str:
return self.prepare_chat_style_prompt(request)