diff --git a/README.md b/README.md index a9b4f93..529c659 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,8 @@ The ultimate development partner for Claude - a Model Context Protocol server that gives Claude access to Google's Gemini 2.5 Pro for extended thinking, code analysis, and problem-solving. **Automatically reads files and directories, passing their contents to Gemini for analysis within its 1M token context.** +**Think of it as Claude Code _for_ Claude Code.** + ## Quick Navigation - **Getting Started** @@ -159,7 +161,6 @@ The setup script shows you the exact configuration. It looks like this: - **Redis** automatically handles conversation memory between requests - **AI-to-AI conversations** persist across multiple exchanges - **File access** through mounted workspace directory -- **Fast communication** via `docker exec` to running container **That's it!** The Docker setup handles all dependencies, Redis configuration, and service management automatically. diff --git a/config.py b/config.py index 43500cb..4ab08a3 100644 --- a/config.py +++ b/config.py @@ -12,7 +12,9 @@ import os # Version and metadata # These values are used in server responses and for tracking releases -__version__ = "3.0.0" # Semantic versioning: MAJOR.MINOR.PATCH +# IMPORTANT: This is the single source of truth for version and author info +# setup.py imports these values to avoid duplication +__version__ = "3.2.0" # Semantic versioning: MAJOR.MINOR.PATCH __updated__ = "2025-06-10" # Last update date in ISO format __author__ = "Fahad Gilani" # Primary maintainer diff --git a/docker-compose.yml b/docker-compose.yml index c3d37eb..f63938b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -35,12 +35,29 @@ services: # and Claude Code could be running from multiple locations at the same time - WORKSPACE_ROOT=${WORKSPACE_ROOT:-${HOME}} - LOG_LEVEL=${LOG_LEVEL:-INFO} + - PYTHONUNBUFFERED=1 volumes: - ${HOME:-/tmp}:/workspace:ro + - mcp_logs:/tmp # Shared volume for logs stdin_open: true tty: true entrypoint: ["python"] command: ["server.py"] + log-monitor: + build: . + image: gemini-mcp-server:latest + container_name: gemini-mcp-log-monitor + restart: unless-stopped + depends_on: + - gemini-mcp + environment: + - PYTHONUNBUFFERED=1 + volumes: + - mcp_logs:/tmp # Shared volume for logs + entrypoint: ["python"] + command: ["log_monitor.py"] + volumes: - redis_data: \ No newline at end of file + redis_data: + mcp_logs: \ No newline at end of file diff --git a/log_monitor.py b/log_monitor.py new file mode 100644 index 0000000..a1972fb --- /dev/null +++ b/log_monitor.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 +""" +Log monitor for MCP server - monitors and displays tool activity +""" + +import os +import time +from datetime import datetime +from pathlib import Path + + +def monitor_mcp_activity(): + """Monitor MCP server activity by watching the log file""" + log_file = "/tmp/mcp_server.log" + activity_file = "/tmp/mcp_activity.log" + debug_file = "/tmp/gemini_debug.log" + + print(f"[{datetime.now().strftime('%H:%M:%S')}] MCP Log Monitor started") + print(f"[{datetime.now().strftime('%H:%M:%S')}] Monitoring: {log_file}") + print(f"[{datetime.now().strftime('%H:%M:%S')}] Activity file: {activity_file}") + print(f"[{datetime.now().strftime('%H:%M:%S')}] Debug file: {debug_file}") + print("-" * 60) + + # Track file positions + log_pos = 0 + activity_pos = 0 + debug_pos = 0 + + # Ensure files exist + Path(log_file).touch() + Path(activity_file).touch() + Path(debug_file).touch() + + while True: + try: + # Check activity file (most important for tool calls) + if os.path.exists(activity_file): + with open(activity_file) as f: + f.seek(activity_pos) + new_lines = f.readlines() + activity_pos = f.tell() + + for line in new_lines: + line = line.strip() + if line: + if "TOOL_CALL:" in line: + tool_info = line.split("TOOL_CALL:")[-1].strip() + print(f"[{datetime.now().strftime('%H:%M:%S')}] Tool called: {tool_info}") + elif "TOOL_COMPLETED:" in line: + tool_name = line.split("TOOL_COMPLETED:")[-1].strip() + print(f"[{datetime.now().strftime('%H:%M:%S')}] ✓ Tool completed: {tool_name}") + elif "CONVERSATION_RESUME:" in line: + resume_info = line.split("CONVERSATION_RESUME:")[-1].strip() + print(f"[{datetime.now().strftime('%H:%M:%S')}] Resume: {resume_info}") + elif "CONVERSATION_CONTEXT:" in line: + context_info = line.split("CONVERSATION_CONTEXT:")[-1].strip() + print(f"[{datetime.now().strftime('%H:%M:%S')}] Context: {context_info}") + elif "CONVERSATION_ERROR:" in line: + error_info = line.split("CONVERSATION_ERROR:")[-1].strip() + print(f"[{datetime.now().strftime('%H:%M:%S')}] ❌ Conversation error: {error_info}") + + # Check main log file for errors and warnings + if os.path.exists(log_file): + with open(log_file) as f: + f.seek(log_pos) + new_lines = f.readlines() + log_pos = f.tell() + + for line in new_lines: + line = line.strip() + if line: + if "ERROR" in line: + print(f"[{datetime.now().strftime('%H:%M:%S')}] ❌ {line}") + elif "WARNING" in line: + print(f"[{datetime.now().strftime('%H:%M:%S')}] ⚠️ {line}") + elif "Gemini API" in line and ("Sending" in line or "Received" in line): + print(f"[{datetime.now().strftime('%H:%M:%S')}] API: {line}") + + # Check debug file + if os.path.exists(debug_file): + with open(debug_file) as f: + f.seek(debug_pos) + new_lines = f.readlines() + debug_pos = f.tell() + + for line in new_lines: + line = line.strip() + if line: + print(f"[{datetime.now().strftime('%H:%M:%S')}] DEBUG: {line}") + + time.sleep(0.5) # Check every 500ms + + except KeyboardInterrupt: + print(f"\n[{datetime.now().strftime('%H:%M:%S')}] Log monitor stopped") + break + except Exception as e: + print(f"[{datetime.now().strftime('%H:%M:%S')}] Monitor error: {e}") + time.sleep(1) + + +if __name__ == "__main__": + monitor_mcp_activity() diff --git a/server.py b/server.py index eebdc18..342b547 100644 --- a/server.py +++ b/server.py @@ -45,6 +45,7 @@ from tools import ( Precommit, ThinkDeepTool, ) +from tools.models import ToolOutput # Configure logging for server operations # Can be controlled via LOG_LEVEL environment variable (DEBUG, INFO, WARNING, ERROR) @@ -52,7 +53,12 @@ log_level = os.getenv("LOG_LEVEL", "INFO").upper() # Configure both console and file logging log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" -logging.basicConfig(level=getattr(logging, log_level, logging.INFO), format=log_format) +logging.basicConfig( + level=getattr(logging, log_level, logging.INFO), + format=log_format, + force=True, # Force reconfiguration if already configured + stream=sys.stderr, # Use stderr to avoid interfering with MCP stdin/stdout protocol +) # Add file handler for Docker log monitoring try: @@ -60,8 +66,17 @@ try: file_handler.setLevel(getattr(logging, log_level, logging.INFO)) file_handler.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(file_handler) + + # Create a special logger for MCP activity tracking + mcp_logger = logging.getLogger("mcp_activity") + mcp_file_handler = logging.FileHandler("/tmp/mcp_activity.log") + mcp_file_handler.setLevel(logging.INFO) + mcp_file_handler.setFormatter(logging.Formatter("%(asctime)s - %(message)s")) + mcp_logger.addHandler(mcp_file_handler) + mcp_logger.setLevel(logging.INFO) + except Exception as e: - print(f"Warning: Could not set up file logging: {e}") + print(f"Warning: Could not set up file logging: {e}", file=sys.stderr) logger = logging.getLogger(__name__) @@ -115,6 +130,7 @@ async def handle_list_tools() -> list[Tool]: Returns: List of Tool objects representing all available tools """ + logger.debug("MCP client requested tool list") tools = [] # Add all registered AI-powered tools from the TOOLS registry @@ -142,6 +158,7 @@ async def handle_list_tools() -> list[Tool]: ] ) + logger.debug(f"Returning {len(tools)} tools to MCP client") return tools @@ -165,19 +182,51 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> list[TextCon Returns: List of TextContent objects containing the tool's response """ + logger.info(f"MCP tool call: {name}") + logger.debug(f"MCP tool arguments: {list(arguments.keys())}") + + # Log to activity file for monitoring + try: + mcp_activity_logger = logging.getLogger("mcp_activity") + mcp_activity_logger.info(f"TOOL_CALL: {name} with {len(arguments)} arguments") + except Exception: + pass # Handle thread context reconstruction if continuation_id is present if "continuation_id" in arguments and arguments["continuation_id"]: + continuation_id = arguments["continuation_id"] + logger.debug(f"Resuming conversation thread: {continuation_id}") + + # Log to activity file for monitoring + try: + mcp_activity_logger = logging.getLogger("mcp_activity") + mcp_activity_logger.info(f"CONVERSATION_RESUME: {name} resuming thread {continuation_id}") + except Exception: + pass + arguments = await reconstruct_thread_context(arguments) # Route to AI-powered tools that require Gemini API calls if name in TOOLS: + logger.info(f"Executing tool '{name}' with {len(arguments)} parameter(s)") tool = TOOLS[name] - return await tool.execute(arguments) + result = await tool.execute(arguments) + logger.info(f"Tool '{name}' execution completed") + + # Log completion to activity file + try: + mcp_activity_logger = logging.getLogger("mcp_activity") + mcp_activity_logger.info(f"TOOL_COMPLETED: {name}") + except Exception: + pass + return result # Route to utility tools that provide server information elif name == "get_version": - return await handle_get_version() + logger.info(f"Executing utility tool '{name}'") + result = await handle_get_version() + logger.info(f"Utility tool '{name}' execution completed") + return result # Handle unknown tool requests gracefully else: @@ -254,6 +303,14 @@ async def reconstruct_thread_context(arguments: dict[str, Any]) -> dict[str, Any context = get_thread(continuation_id) if not context: logger.warning(f"Thread not found: {continuation_id}") + + # Log to activity file for monitoring + try: + mcp_activity_logger = logging.getLogger("mcp_activity") + mcp_activity_logger.info(f"CONVERSATION_ERROR: Thread {continuation_id} not found or expired") + except Exception: + pass + # Return error asking Claude to restart conversation with full context raise ValueError( f"Conversation thread '{continuation_id}' was not found or has expired. " @@ -297,6 +354,16 @@ async def reconstruct_thread_context(arguments: dict[str, Any]) -> dict[str, Any enhanced_arguments[key] = value logger.info(f"Reconstructed context for thread {continuation_id} (turn {len(context.turns)})") + + # Log to activity file for monitoring + try: + mcp_activity_logger = logging.getLogger("mcp_activity") + mcp_activity_logger.info( + f"CONVERSATION_CONTEXT: Thread {continuation_id} turn {len(context.turns)} - {len(context.turns)} previous turns loaded" + ) + except Exception: + pass + return enhanced_arguments @@ -339,7 +406,10 @@ Available Tools: For updates, visit: https://github.com/BeehiveInnovations/gemini-mcp-server""" - return [TextContent(type="text", text=text)] + # Create standardized tool output + tool_output = ToolOutput(status="success", content=text, content_type="text", metadata={"tool_name": "get_version"}) + + return [TextContent(type="text", text=tool_output.model_dump_json())] async def main(): @@ -356,6 +426,13 @@ async def main(): # Validate that Gemini API key is available before starting configure_gemini() + # Log startup message for Docker log monitoring + logger.info("Gemini MCP Server starting up...") + logger.info(f"Log level: {log_level}") + logger.info(f"Using model: {GEMINI_MODEL}") + logger.info(f"Available tools: {list(TOOLS.keys())}") + logger.info("Server ready - waiting for tool requests...") + # Run the server using stdio transport (standard input/output) # This allows the server to be launched by MCP clients as a subprocess async with stdio_server() as (read_stream, write_stream): diff --git a/setup-docker.sh b/setup-docker.sh index ce7aa3c..1c78d2c 100755 --- a/setup-docker.sh +++ b/setup-docker.sh @@ -125,7 +125,7 @@ else exit 1 fi -echo " - Starting Redis and MCP services..." +echo " - Starting Redis and MCP services... please wait" if $COMPOSE_CMD up -d >/dev/null 2>&1; then echo "✅ Services started successfully!" else @@ -184,7 +184,7 @@ echo "# List your MCP servers to verify:" echo "claude mcp list" echo "" echo "# Remove if needed:" -echo "claude mcp remove gemini" +echo "claude mcp remove gemini -s user" echo "===========================================" echo "" diff --git a/setup.py b/setup.py index fac6678..87b681a 100644 --- a/setup.py +++ b/setup.py @@ -6,6 +6,9 @@ from pathlib import Path from setuptools import setup +# Import version and author from config to maintain single source of truth +from config import __author__, __version__ + # Read README for long description readme_path = Path(__file__).parent / "README.md" long_description = "" @@ -14,11 +17,11 @@ if readme_path.exists(): setup( name="gemini-mcp-server", - version="3.1.0", + version=__version__, description="Model Context Protocol server for Google Gemini", long_description=long_description, long_description_content_type="text/markdown", - author="Fahad Gilani", + author=__author__, python_requires=">=3.10", py_modules=["gemini_server"], install_requires=[