From 82f6d290367f0ebda62012bc369165f87ba60e65 Mon Sep 17 00:00:00 2001 From: Fahad Date: Mon, 9 Jun 2025 06:48:28 +0400 Subject: [PATCH] feat: add system prompt to chat tool for better collaborative thinking MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add CHAT_PROMPT to establish Gemini as senior developer partner - Update handle_chat to use system prompt for consistent context - Emphasize edge case exploration and deep thinking in prompts - Enable thoughtful brainstorming and validation capabilities 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude --- prompts/__init__.py | 3 ++- prompts/tool_prompts.py | 22 ++++++++++++++++++++++ server.py | 10 +++++++--- 3 files changed, 31 insertions(+), 4 deletions(-) diff --git a/prompts/__init__.py b/prompts/__init__.py index ae950c7..7818681 100644 --- a/prompts/__init__.py +++ b/prompts/__init__.py @@ -2,7 +2,7 @@ System prompts for Gemini tools """ -from .tool_prompts import (ANALYZE_PROMPT, DEBUG_ISSUE_PROMPT, +from .tool_prompts import (ANALYZE_PROMPT, CHAT_PROMPT, DEBUG_ISSUE_PROMPT, REVIEW_CODE_PROMPT, THINK_DEEPER_PROMPT) __all__ = [ @@ -10,4 +10,5 @@ __all__ = [ "REVIEW_CODE_PROMPT", "DEBUG_ISSUE_PROMPT", "ANALYZE_PROMPT", + "CHAT_PROMPT", ] diff --git a/prompts/tool_prompts.py b/prompts/tool_prompts.py index 137dc78..5fa0e67 100644 --- a/prompts/tool_prompts.py +++ b/prompts/tool_prompts.py @@ -111,3 +111,25 @@ Focus on: Be thorough but concise. Prioritize the most important findings and always provide concrete examples and suggestions for improvement.""" + +CHAT_PROMPT = """You are a senior development partner and collaborative thinking companion to Claude Code. +You excel at brainstorming, validating ideas, and providing thoughtful second opinions on technical decisions. + +Your collaborative approach: +1. Engage deeply with shared ideas - build upon, extend, and explore alternatives +2. Think through edge cases, failure modes, and unintended consequences +3. Provide balanced perspectives considering trade-offs and implications +4. Challenge assumptions constructively while respecting the existing approach +5. Offer concrete examples and actionable insights + +When brainstorming or discussing: +- Consider multiple angles and approaches +- Identify potential pitfalls early +- Suggest creative solutions and alternatives +- Think about scalability, maintainability, and real-world usage +- Draw from industry best practices and patterns + +Always approach discussions as a peer - be direct, technical, and thorough. Your goal is to be +the ideal thinking partner who helps explore ideas deeply, validates approaches, and uncovers +insights that might be missed in solo analysis. Think step by step through complex problems +and don't hesitate to explore tangential but relevant considerations.""" diff --git a/server.py b/server.py index 22b61ac..4fa75f2 100644 --- a/server.py +++ b/server.py @@ -146,17 +146,21 @@ async def handle_call_tool( async def handle_chat(arguments: Dict[str, Any]) -> List[TextContent]: """Handle general chat requests""" from config import TEMPERATURE_BALANCED + from prompts import CHAT_PROMPT from utils import read_files prompt = arguments.get("prompt", "") context_files = arguments.get("context_files", []) temperature = arguments.get("temperature", TEMPERATURE_BALANCED) - # Build context if files provided - full_prompt = prompt + # Build the full prompt with system context + user_content = prompt if context_files: file_content, _ = read_files(context_files) - full_prompt = f"{prompt}\n\n=== CONTEXT FILES ===\n{file_content}\n=== END CONTEXT ===" + user_content = f"{prompt}\n\n=== CONTEXT FILES ===\n{file_content}\n=== END CONTEXT ===" + + # Combine system prompt with user content + full_prompt = f"{CHAT_PROMPT}\n\n=== USER REQUEST ===\n{user_content}\n=== END REQUEST ===\n\nPlease provide a thoughtful, comprehensive response:" try: model = genai.GenerativeModel(