More tests
This commit is contained in:
@@ -251,7 +251,7 @@ def add_turn(
|
||||
- File references are preserved for cross-tool access
|
||||
"""
|
||||
logger.debug(f"[FLOW] Adding {role} turn to {thread_id} ({tool_name})")
|
||||
|
||||
|
||||
context = get_thread(thread_id)
|
||||
if not context:
|
||||
logger.debug(f"[FLOW] Thread {thread_id} not found for turn addition")
|
||||
@@ -301,13 +301,13 @@ def get_conversation_file_list(context: ThreadContext) -> list[str]:
|
||||
list[str]: Deduplicated list of file paths referenced in the conversation
|
||||
"""
|
||||
if not context.turns:
|
||||
logger.debug(f"[FILES] No turns found, returning empty file list")
|
||||
logger.debug("[FILES] No turns found, returning empty file list")
|
||||
return []
|
||||
|
||||
# Collect all unique files from all turns, preserving order of first appearance
|
||||
seen_files = set()
|
||||
unique_files = []
|
||||
|
||||
|
||||
logger.debug(f"[FILES] Collecting files from {len(context.turns)} turns")
|
||||
|
||||
for i, turn in enumerate(context.turns):
|
||||
@@ -322,7 +322,7 @@ def get_conversation_file_list(context: ThreadContext) -> list[str]:
|
||||
logger.debug(f"[FILES] Duplicate file skipped: {file_path}")
|
||||
else:
|
||||
logger.debug(f"[FILES] Turn {i+1} has no files")
|
||||
|
||||
|
||||
logger.debug(f"[FILES] Final unique file list ({len(unique_files)}): {unique_files}")
|
||||
return unique_files
|
||||
|
||||
@@ -409,13 +409,17 @@ def build_conversation_history(context: ThreadContext, read_files_func=None) ->
|
||||
logger.debug(
|
||||
f"📄 File embedded in conversation history: {file_path} ({content_tokens:,} tokens)"
|
||||
)
|
||||
logger.debug(f"[FILES] Successfully embedded {file_path} - {content_tokens:,} tokens (total: {total_tokens:,})")
|
||||
logger.debug(
|
||||
f"[FILES] Successfully embedded {file_path} - {content_tokens:,} tokens (total: {total_tokens:,})"
|
||||
)
|
||||
else:
|
||||
files_truncated += 1
|
||||
logger.debug(
|
||||
f"📄 File truncated due to token limit: {file_path} ({content_tokens:,} tokens, would exceed {MAX_CONTENT_TOKENS:,} limit)"
|
||||
)
|
||||
logger.debug(f"[FILES] File {file_path} would exceed token limit - skipping (would be {total_tokens + content_tokens:,} tokens)")
|
||||
logger.debug(
|
||||
f"[FILES] File {file_path} would exceed token limit - skipping (would be {total_tokens + content_tokens:,} tokens)"
|
||||
)
|
||||
# Stop processing more files
|
||||
break
|
||||
else:
|
||||
@@ -439,7 +443,9 @@ def build_conversation_history(context: ThreadContext, read_files_func=None) ->
|
||||
logger.debug(
|
||||
f"📄 Conversation history file embedding complete: {files_included} files embedded, {files_truncated} truncated, {total_tokens:,} total tokens"
|
||||
)
|
||||
logger.debug(f"[FILES] File embedding summary - {files_included} embedded, {files_truncated} truncated, {total_tokens:,} tokens total")
|
||||
logger.debug(
|
||||
f"[FILES] File embedding summary - {files_included} embedded, {files_truncated} truncated, {total_tokens:,} tokens total"
|
||||
)
|
||||
else:
|
||||
history_parts.append("(No accessible files found)")
|
||||
logger.debug(
|
||||
@@ -505,11 +511,13 @@ def build_conversation_history(context: ThreadContext, read_files_func=None) ->
|
||||
from utils.token_utils import estimate_tokens
|
||||
|
||||
total_conversation_tokens = estimate_tokens(complete_history)
|
||||
|
||||
|
||||
# Summary log of what was built
|
||||
user_turns = len([t for t in context.turns if t.role == "user"])
|
||||
assistant_turns = len([t for t in context.turns if t.role == "assistant"])
|
||||
logger.debug(f"[FLOW] Built conversation history: {user_turns} user + {assistant_turns} assistant turns, {len(all_files)} files, {total_conversation_tokens:,} tokens")
|
||||
logger.debug(
|
||||
f"[FLOW] Built conversation history: {user_turns} user + {assistant_turns} assistant turns, {len(all_files)} files, {total_conversation_tokens:,} tokens"
|
||||
)
|
||||
|
||||
return complete_history, total_conversation_tokens
|
||||
|
||||
|
||||
@@ -470,7 +470,7 @@ def read_file_content(file_path: str, max_size: int = 1_000_000) -> tuple[str, i
|
||||
logger.debug(f"[FILES] Reading file content for {file_path}")
|
||||
with open(path, encoding="utf-8", errors="replace") as f:
|
||||
file_content = f.read()
|
||||
|
||||
|
||||
logger.debug(f"[FILES] Successfully read {len(file_content)} characters from {file_path}")
|
||||
|
||||
# Format with clear delimiters that help the AI understand file boundaries
|
||||
@@ -518,7 +518,9 @@ def read_files(
|
||||
max_tokens = MAX_CONTEXT_TOKENS
|
||||
|
||||
logger.debug(f"[FILES] read_files called with {len(file_paths)} paths")
|
||||
logger.debug(f"[FILES] Token budget: max={max_tokens:,}, reserve={reserve_tokens:,}, available={max_tokens - reserve_tokens:,}")
|
||||
logger.debug(
|
||||
f"[FILES] Token budget: max={max_tokens:,}, reserve={reserve_tokens:,}, available={max_tokens - reserve_tokens:,}"
|
||||
)
|
||||
|
||||
content_parts = []
|
||||
total_tokens = 0
|
||||
@@ -546,7 +548,7 @@ def read_files(
|
||||
|
||||
if not all_files and file_paths:
|
||||
# No files found but paths were provided
|
||||
logger.debug(f"[FILES] No files found from provided paths")
|
||||
logger.debug("[FILES] No files found from provided paths")
|
||||
content_parts.append(f"\n--- NO FILES FOUND ---\nProvided paths: {', '.join(file_paths)}\n--- END ---\n")
|
||||
else:
|
||||
# Read files sequentially until token limit is reached
|
||||
@@ -567,7 +569,9 @@ def read_files(
|
||||
logger.debug(f"[FILES] Added file {file_path}, total tokens: {total_tokens:,}")
|
||||
else:
|
||||
# File too large for remaining budget
|
||||
logger.debug(f"[FILES] File {file_path} too large for remaining budget ({file_tokens:,} tokens, {available_tokens - total_tokens:,} remaining)")
|
||||
logger.debug(
|
||||
f"[FILES] File {file_path} too large for remaining budget ({file_tokens:,} tokens, {available_tokens - total_tokens:,} remaining)"
|
||||
)
|
||||
files_skipped.append(file_path)
|
||||
|
||||
# Add informative note about skipped files to help users understand
|
||||
|
||||
Reference in New Issue
Block a user