fix: handle 429 response https://github.com/BeehiveInnovations/zen-mcp-server/issues/273
This commit is contained in:
@@ -23,8 +23,7 @@ class GeminiJSONParser(BaseParser):
|
|||||||
raise ParserError(f"Failed to decode Gemini CLI JSON output: {exc}") from exc
|
raise ParserError(f"Failed to decode Gemini CLI JSON output: {exc}") from exc
|
||||||
|
|
||||||
response = payload.get("response")
|
response = payload.get("response")
|
||||||
if not isinstance(response, str) or not response.strip():
|
response_text = response.strip() if isinstance(response, str) else ""
|
||||||
raise ParserError("Gemini CLI response is missing a textual 'response' field")
|
|
||||||
|
|
||||||
metadata: dict[str, Any] = {"raw": payload}
|
metadata: dict[str, Any] = {"raw": payload}
|
||||||
|
|
||||||
@@ -43,7 +42,57 @@ class GeminiJSONParser(BaseParser):
|
|||||||
if isinstance(api_stats, dict):
|
if isinstance(api_stats, dict):
|
||||||
metadata["latency_ms"] = api_stats.get("totalLatencyMs")
|
metadata["latency_ms"] = api_stats.get("totalLatencyMs")
|
||||||
|
|
||||||
if stderr and stderr.strip():
|
if response_text:
|
||||||
metadata["stderr"] = stderr.strip()
|
if stderr and stderr.strip():
|
||||||
|
metadata["stderr"] = stderr.strip()
|
||||||
|
return ParsedCLIResponse(content=response_text, metadata=metadata)
|
||||||
|
|
||||||
return ParsedCLIResponse(content=response.strip(), metadata=metadata)
|
fallback_message, extra_metadata = self._build_fallback_message(payload, stderr)
|
||||||
|
if fallback_message:
|
||||||
|
metadata.update(extra_metadata)
|
||||||
|
if stderr and stderr.strip():
|
||||||
|
metadata["stderr"] = stderr.strip()
|
||||||
|
return ParsedCLIResponse(content=fallback_message, metadata=metadata)
|
||||||
|
|
||||||
|
raise ParserError("Gemini CLI response is missing a textual 'response' field")
|
||||||
|
|
||||||
|
def _build_fallback_message(self, payload: dict[str, Any], stderr: str) -> tuple[str | None, dict[str, Any]]:
|
||||||
|
"""Derive a human friendly message when Gemini returns empty content."""
|
||||||
|
|
||||||
|
stderr_text = stderr.strip() if stderr else ""
|
||||||
|
stderr_lower = stderr_text.lower()
|
||||||
|
extra_metadata: dict[str, Any] = {"empty_response": True}
|
||||||
|
|
||||||
|
if "429" in stderr_lower or "rate limit" in stderr_lower:
|
||||||
|
extra_metadata["rate_limit_status"] = 429
|
||||||
|
message = (
|
||||||
|
"Gemini request returned no content because the API reported a 429 rate limit. "
|
||||||
|
"Retry after reducing the request size or waiting for quota to replenish."
|
||||||
|
)
|
||||||
|
return message, extra_metadata
|
||||||
|
|
||||||
|
stats = payload.get("stats")
|
||||||
|
if isinstance(stats, dict):
|
||||||
|
models = stats.get("models")
|
||||||
|
if isinstance(models, dict) and models:
|
||||||
|
first_model = next(iter(models.values()))
|
||||||
|
if isinstance(first_model, dict):
|
||||||
|
api_stats = first_model.get("api")
|
||||||
|
if isinstance(api_stats, dict):
|
||||||
|
total_errors = api_stats.get("totalErrors")
|
||||||
|
total_requests = api_stats.get("totalRequests")
|
||||||
|
if isinstance(total_errors, int) and total_errors > 0:
|
||||||
|
extra_metadata["api_total_errors"] = total_errors
|
||||||
|
if isinstance(total_requests, int):
|
||||||
|
extra_metadata["api_total_requests"] = total_requests
|
||||||
|
message = (
|
||||||
|
"Gemini CLI returned no textual output. The API reported "
|
||||||
|
f"{total_errors} error(s); see stderr for details."
|
||||||
|
)
|
||||||
|
return message, extra_metadata
|
||||||
|
|
||||||
|
if stderr_text:
|
||||||
|
message = "Gemini CLI returned no textual output. Raw stderr was preserved for troubleshooting."
|
||||||
|
return message, extra_metadata
|
||||||
|
|
||||||
|
return None, extra_metadata
|
||||||
|
|||||||
48
tests/test_clink_gemini_parser.py
Normal file
48
tests/test_clink_gemini_parser.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
"""Tests for the Gemini CLI JSON parser."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from clink.parsers.gemini import GeminiJSONParser, ParserError
|
||||||
|
|
||||||
|
|
||||||
|
def _build_rate_limit_stdout() -> str:
|
||||||
|
return (
|
||||||
|
"{\n"
|
||||||
|
' "response": "",\n'
|
||||||
|
' "stats": {\n'
|
||||||
|
' "models": {\n'
|
||||||
|
' "gemini-2.5-pro": {\n'
|
||||||
|
' "api": {\n'
|
||||||
|
' "totalRequests": 5,\n'
|
||||||
|
' "totalErrors": 5,\n'
|
||||||
|
' "totalLatencyMs": 13319\n'
|
||||||
|
" },\n"
|
||||||
|
' "tokens": {"prompt": 0, "candidates": 0, "total": 0, "cached": 0, "thoughts": 0, "tool": 0}\n'
|
||||||
|
" }\n"
|
||||||
|
" },\n"
|
||||||
|
' "tools": {"totalCalls": 0},\n'
|
||||||
|
' "files": {"totalLinesAdded": 0, "totalLinesRemoved": 0}\n'
|
||||||
|
" }\n"
|
||||||
|
"}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_gemini_parser_handles_rate_limit_empty_response():
|
||||||
|
parser = GeminiJSONParser()
|
||||||
|
stdout = _build_rate_limit_stdout()
|
||||||
|
stderr = "Attempt 1 failed with status 429. Retrying with backoff... ApiError: quota exceeded"
|
||||||
|
|
||||||
|
parsed = parser.parse(stdout, stderr)
|
||||||
|
|
||||||
|
assert "429" in parsed.content
|
||||||
|
assert parsed.metadata.get("rate_limit_status") == 429
|
||||||
|
assert parsed.metadata.get("empty_response") is True
|
||||||
|
assert "Attempt 1 failed" in parsed.metadata.get("stderr", "")
|
||||||
|
|
||||||
|
|
||||||
|
def test_gemini_parser_still_errors_when_no_fallback_available():
|
||||||
|
parser = GeminiJSONParser()
|
||||||
|
stdout = '{"response": "", "stats": {}}'
|
||||||
|
|
||||||
|
with pytest.raises(ParserError):
|
||||||
|
parser.parse(stdout, stderr="")
|
||||||
Reference in New Issue
Block a user