refactor: include file modification dates too

This commit is contained in:
Fahad
2025-10-05 08:19:46 +04:00
parent bd666227c8
commit 47973e945e
2 changed files with 21 additions and 4 deletions

View File

@@ -18,9 +18,14 @@ Authoritative documentation and samples live in `docs/`, and runtime diagnostics
- `python communication_simulator_test.py --quick` smoke-test orchestration across tools and providers. - `python communication_simulator_test.py --quick` smoke-test orchestration across tools and providers.
- `./run_integration_tests.sh [--with-simulator]` exercise provider-dependent flows against remote or Ollama models. - `./run_integration_tests.sh [--with-simulator]` exercise provider-dependent flows against remote or Ollama models.
Run code quality checks:
```bash
.zen_venv/bin/activate && ./code_quality_checks.sh
```
For example, this is how we run an individual / all tests: For example, this is how we run an individual / all tests:
``` ```bash
.zen_venv/bin/activate && pytest tests/test_auto_mode_model_listing.py -q .zen_venv/bin/activate && pytest tests/test_auto_mode_model_listing.py -q
.zen_venv/bin/activate && pytest -q .zen_venv/bin/activate && pytest -q
``` ```

View File

@@ -40,6 +40,7 @@ multi-turn file handling:
import json import json
import logging import logging
import os import os
from datetime import datetime, timezone
from pathlib import Path from pathlib import Path
from typing import Optional from typing import Optional
@@ -463,11 +464,17 @@ def read_file_content(
return content, estimate_tokens(content) return content, estimate_tokens(content)
# Check file size to prevent memory exhaustion # Check file size to prevent memory exhaustion
file_size = path.stat().st_size stat_result = path.stat()
file_size = stat_result.st_size
logger.debug(f"[FILES] File size for {file_path}: {file_size:,} bytes") logger.debug(f"[FILES] File size for {file_path}: {file_size:,} bytes")
if file_size > max_size: if file_size > max_size:
logger.debug(f"[FILES] File too large: {file_path} ({file_size:,} > {max_size:,} bytes)") logger.debug(f"[FILES] File too large: {file_path} ({file_size:,} > {max_size:,} bytes)")
content = f"\n--- FILE TOO LARGE: {file_path} ---\nFile size: {file_size:,} bytes (max: {max_size:,})\n--- END FILE ---\n" modified_at = datetime.fromtimestamp(stat_result.st_mtime, tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S %Z")
content = (
f"\n--- FILE TOO LARGE: {file_path} (Last modified: {modified_at}) ---\n"
f"File size: {file_size:,} bytes (max: {max_size:,})\n"
"--- END FILE ---\n"
)
return content, estimate_tokens(content) return content, estimate_tokens(content)
# Determine if we should add line numbers # Determine if we should add line numbers
@@ -495,7 +502,12 @@ def read_file_content(
# NOTE: These markers ("--- BEGIN FILE: ... ---") are distinct from git diff markers # NOTE: These markers ("--- BEGIN FILE: ... ---") are distinct from git diff markers
# ("--- BEGIN DIFF: ... ---") to allow AI to distinguish between complete file content # ("--- BEGIN DIFF: ... ---") to allow AI to distinguish between complete file content
# vs. partial diff content when files appear in both sections # vs. partial diff content when files appear in both sections
formatted = f"\n--- BEGIN FILE: {file_path} ---\n{file_content}\n--- END FILE: {file_path} ---\n" modified_at = datetime.fromtimestamp(stat_result.st_mtime, tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S %Z")
formatted = (
f"\n--- BEGIN FILE: {file_path} (Last modified: {modified_at}) ---\n"
f"{file_content}\n"
f"--- END FILE: {file_path} ---\n"
)
tokens = estimate_tokens(formatted) tokens = estimate_tokens(formatted)
logger.debug(f"[FILES] Formatted content for {file_path}: {len(formatted)} chars, {tokens} tokens") logger.debug(f"[FILES] Formatted content for {file_path}: {len(formatted)} chars, {tokens} tokens")
return formatted, tokens return formatted, tokens