fix: apply isort formatting to fix CI linting
Applied isort to properly sort all imports according to PEP8: - Standard library imports first - Third-party imports second - Local imports last - Alphabetical ordering within each group All tests still passing after import reordering.
This commit is contained in:
@@ -4,8 +4,9 @@ This file exists to maintain compatibility with existing configurations.
|
|||||||
The main implementation is now in server.py
|
The main implementation is now in server.py
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from server import main
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
|
from server import main
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
asyncio.run(main())
|
asyncio.run(main())
|
||||||
|
|||||||
@@ -2,12 +2,8 @@
|
|||||||
System prompts for Gemini tools
|
System prompts for Gemini tools
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from .tool_prompts import (
|
from .tool_prompts import (ANALYZE_PROMPT, DEBUG_ISSUE_PROMPT,
|
||||||
THINK_DEEPER_PROMPT,
|
REVIEW_CODE_PROMPT, THINK_DEEPER_PROMPT)
|
||||||
REVIEW_CODE_PROMPT,
|
|
||||||
DEBUG_ISSUE_PROMPT,
|
|
||||||
ANALYZE_PROMPT,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"THINK_DEEPER_PROMPT",
|
"THINK_DEEPER_PROMPT",
|
||||||
|
|||||||
21
server.py
21
server.py
@@ -2,27 +2,22 @@
|
|||||||
Gemini MCP Server - Main server implementation
|
Gemini MCP Server - Main server implementation
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import List, Dict, Any
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
import google.generativeai as genai
|
import google.generativeai as genai
|
||||||
from mcp.server import Server
|
from mcp.server import Server
|
||||||
|
from mcp.server.models import InitializationOptions
|
||||||
from mcp.server.stdio import stdio_server
|
from mcp.server.stdio import stdio_server
|
||||||
from mcp.types import TextContent, Tool
|
from mcp.types import TextContent, Tool
|
||||||
from mcp.server.models import InitializationOptions
|
|
||||||
|
|
||||||
from config import (
|
from config import (DEFAULT_MODEL, MAX_CONTEXT_TOKENS, __author__, __updated__,
|
||||||
__version__,
|
__version__)
|
||||||
__updated__,
|
from tools import AnalyzeTool, DebugIssueTool, ReviewCodeTool, ThinkDeeperTool
|
||||||
__author__,
|
|
||||||
DEFAULT_MODEL,
|
|
||||||
MAX_CONTEXT_TOKENS,
|
|
||||||
)
|
|
||||||
from tools import ThinkDeeperTool, ReviewCodeTool, DebugIssueTool, AnalyzeTool
|
|
||||||
|
|
||||||
# Configure logging
|
# Configure logging
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
@@ -147,8 +142,8 @@ async def handle_call_tool(
|
|||||||
|
|
||||||
async def handle_chat(arguments: Dict[str, Any]) -> List[TextContent]:
|
async def handle_chat(arguments: Dict[str, Any]) -> List[TextContent]:
|
||||||
"""Handle general chat requests"""
|
"""Handle general chat requests"""
|
||||||
from utils import read_files
|
|
||||||
from config import TEMPERATURE_BALANCED
|
from config import TEMPERATURE_BALANCED
|
||||||
|
from utils import read_files
|
||||||
|
|
||||||
prompt = arguments.get("prompt", "")
|
prompt = arguments.get("prompt", "")
|
||||||
context_files = arguments.get("context_files", [])
|
context_files = arguments.get("context_files", [])
|
||||||
|
|||||||
3
setup.py
3
setup.py
@@ -2,9 +2,10 @@
|
|||||||
Setup configuration for Gemini MCP Server
|
Setup configuration for Gemini MCP Server
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from setuptools import setup
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
from setuptools import setup
|
||||||
|
|
||||||
# Read README for long description
|
# Read README for long description
|
||||||
readme_path = Path(__file__).parent / "README.md"
|
readme_path = Path(__file__).parent / "README.md"
|
||||||
long_description = ""
|
long_description = ""
|
||||||
|
|||||||
@@ -2,8 +2,8 @@
|
|||||||
Pytest configuration for Gemini MCP Server tests
|
Pytest configuration for Gemini MCP Server tests
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
# Ensure the parent directory is in the Python path for imports
|
# Ensure the parent directory is in the Python path for imports
|
||||||
|
|||||||
@@ -2,17 +2,9 @@
|
|||||||
Tests for configuration
|
Tests for configuration
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from config import (
|
from config import (DEFAULT_MODEL, MAX_CONTEXT_TOKENS, TEMPERATURE_ANALYTICAL,
|
||||||
__version__,
|
TEMPERATURE_BALANCED, TEMPERATURE_CREATIVE, TOOL_TRIGGERS,
|
||||||
__updated__,
|
__author__, __updated__, __version__)
|
||||||
__author__,
|
|
||||||
DEFAULT_MODEL,
|
|
||||||
MAX_CONTEXT_TOKENS,
|
|
||||||
TEMPERATURE_ANALYTICAL,
|
|
||||||
TEMPERATURE_BALANCED,
|
|
||||||
TEMPERATURE_CREATIVE,
|
|
||||||
TOOL_TRIGGERS,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestConfig:
|
class TestConfig:
|
||||||
|
|||||||
@@ -2,11 +2,12 @@
|
|||||||
Tests for the main server functionality
|
Tests for the main server functionality
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import pytest
|
|
||||||
import json
|
import json
|
||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
from server import handle_list_tools, handle_call_tool
|
import pytest
|
||||||
|
|
||||||
|
from server import handle_call_tool, handle_list_tools
|
||||||
|
|
||||||
|
|
||||||
class TestServerTools:
|
class TestServerTools:
|
||||||
|
|||||||
@@ -2,10 +2,11 @@
|
|||||||
Tests for individual tool implementations
|
Tests for individual tool implementations
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import pytest
|
|
||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
from tools import ThinkDeeperTool, ReviewCodeTool, DebugIssueTool, AnalyzeTool
|
import pytest
|
||||||
|
|
||||||
|
from tools import AnalyzeTool, DebugIssueTool, ReviewCodeTool, ThinkDeeperTool
|
||||||
|
|
||||||
|
|
||||||
class TestThinkDeeperTool:
|
class TestThinkDeeperTool:
|
||||||
|
|||||||
@@ -2,12 +2,8 @@
|
|||||||
Tests for utility functions
|
Tests for utility functions
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from utils import (
|
from utils import (check_token_limit, estimate_tokens, read_file_content,
|
||||||
read_file_content,
|
read_files)
|
||||||
read_files,
|
|
||||||
estimate_tokens,
|
|
||||||
check_token_limit,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestFileUtils:
|
class TestFileUtils:
|
||||||
|
|||||||
@@ -2,10 +2,10 @@
|
|||||||
Tool implementations for Gemini MCP Server
|
Tool implementations for Gemini MCP Server
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from .think_deeper import ThinkDeeperTool
|
|
||||||
from .review_code import ReviewCodeTool
|
|
||||||
from .debug_issue import DebugIssueTool
|
|
||||||
from .analyze import AnalyzeTool
|
from .analyze import AnalyzeTool
|
||||||
|
from .debug_issue import DebugIssueTool
|
||||||
|
from .review_code import ReviewCodeTool
|
||||||
|
from .think_deeper import ThinkDeeperTool
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"ThinkDeeperTool",
|
"ThinkDeeperTool",
|
||||||
|
|||||||
@@ -2,12 +2,15 @@
|
|||||||
Analyze tool - General-purpose code and file analysis
|
Analyze tool - General-purpose code and file analysis
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Dict, Any, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
from .base import BaseTool, ToolRequest
|
|
||||||
|
from config import MAX_CONTEXT_TOKENS, TEMPERATURE_ANALYTICAL
|
||||||
from prompts import ANALYZE_PROMPT
|
from prompts import ANALYZE_PROMPT
|
||||||
from utils import read_files, check_token_limit
|
from utils import check_token_limit, read_files
|
||||||
from config import TEMPERATURE_ANALYTICAL, MAX_CONTEXT_TOKENS
|
|
||||||
|
from .base import BaseTool, ToolRequest
|
||||||
|
|
||||||
|
|
||||||
class AnalyzeRequest(ToolRequest):
|
class AnalyzeRequest(ToolRequest):
|
||||||
|
|||||||
@@ -3,10 +3,11 @@ Base class for all Gemini MCP tools
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Dict, Any, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
import google.generativeai as genai
|
import google.generativeai as genai
|
||||||
from mcp.types import TextContent
|
from mcp.types import TextContent
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
class ToolRequest(BaseModel):
|
class ToolRequest(BaseModel):
|
||||||
|
|||||||
@@ -2,12 +2,15 @@
|
|||||||
Debug Issue tool - Root cause analysis and debugging assistance
|
Debug Issue tool - Root cause analysis and debugging assistance
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Dict, Any, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
from .base import BaseTool, ToolRequest
|
|
||||||
|
from config import MAX_CONTEXT_TOKENS, TEMPERATURE_ANALYTICAL
|
||||||
from prompts import DEBUG_ISSUE_PROMPT
|
from prompts import DEBUG_ISSUE_PROMPT
|
||||||
from utils import read_files, check_token_limit
|
from utils import check_token_limit, read_files
|
||||||
from config import TEMPERATURE_ANALYTICAL, MAX_CONTEXT_TOKENS
|
|
||||||
|
from .base import BaseTool, ToolRequest
|
||||||
|
|
||||||
|
|
||||||
class DebugIssueRequest(ToolRequest):
|
class DebugIssueRequest(ToolRequest):
|
||||||
|
|||||||
@@ -2,12 +2,15 @@
|
|||||||
Code Review tool - Comprehensive code analysis and review
|
Code Review tool - Comprehensive code analysis and review
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Dict, Any, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
from .base import BaseTool, ToolRequest
|
|
||||||
|
from config import MAX_CONTEXT_TOKENS, TEMPERATURE_ANALYTICAL
|
||||||
from prompts import REVIEW_CODE_PROMPT
|
from prompts import REVIEW_CODE_PROMPT
|
||||||
from utils import read_files, check_token_limit
|
from utils import check_token_limit, read_files
|
||||||
from config import TEMPERATURE_ANALYTICAL, MAX_CONTEXT_TOKENS
|
|
||||||
|
from .base import BaseTool, ToolRequest
|
||||||
|
|
||||||
|
|
||||||
class ReviewCodeRequest(ToolRequest):
|
class ReviewCodeRequest(ToolRequest):
|
||||||
|
|||||||
@@ -2,12 +2,15 @@
|
|||||||
Think Deeper tool - Extended reasoning and problem-solving
|
Think Deeper tool - Extended reasoning and problem-solving
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Dict, Any, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
from .base import BaseTool, ToolRequest
|
|
||||||
|
from config import MAX_CONTEXT_TOKENS, TEMPERATURE_CREATIVE
|
||||||
from prompts import THINK_DEEPER_PROMPT
|
from prompts import THINK_DEEPER_PROMPT
|
||||||
from utils import read_files, check_token_limit
|
from utils import check_token_limit, read_files
|
||||||
from config import TEMPERATURE_CREATIVE, MAX_CONTEXT_TOKENS
|
|
||||||
|
from .base import BaseTool, ToolRequest
|
||||||
|
|
||||||
|
|
||||||
class ThinkDeeperRequest(ToolRequest):
|
class ThinkDeeperRequest(ToolRequest):
|
||||||
|
|||||||
@@ -2,8 +2,8 @@
|
|||||||
Utility functions for Gemini MCP Server
|
Utility functions for Gemini MCP Server
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from .file_utils import read_files, read_file_content
|
from .file_utils import read_file_content, read_files
|
||||||
from .token_utils import estimate_tokens, check_token_limit
|
from .token_utils import check_token_limit, estimate_tokens
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"read_files",
|
"read_files",
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ File reading utilities
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Tuple, Optional
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
|
||||||
def read_file_content(file_path: str) -> str:
|
def read_file_content(file_path: str) -> str:
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ Token counting utilities
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
|
|
||||||
from config import MAX_CONTEXT_TOKENS
|
from config import MAX_CONTEXT_TOKENS
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user