Files
lovdata-chat/session-manager/logging_config.py

318 lines
10 KiB
Python

"""
Structured Logging Configuration
Provides comprehensive logging infrastructure with structured logging,
request tracking, log formatting, and aggregation capabilities.
"""
import os
import sys
import json
import logging
import logging.handlers
from typing import Dict, Any, Optional
from datetime import datetime
from pathlib import Path
import threading
import uuid
class StructuredFormatter(logging.Formatter):
"""Structured JSON formatter for production logging."""
def format(self, record: logging.LogRecord) -> str:
# Create structured log entry
log_entry = {
"timestamp": datetime.utcnow().isoformat() + "Z",
"level": record.levelname,
"logger": record.name,
"message": record.getMessage(),
"module": record.module,
"function": record.funcName,
"line": record.lineno,
}
# Add exception info if present
if record.exc_info:
log_entry["exception"] = self.formatException(record.exc_info)
# Add extra fields from record
if hasattr(record, "request_id"):
log_entry["request_id"] = record.request_id
if hasattr(record, "session_id"):
log_entry["session_id"] = record.session_id
if hasattr(record, "user_id"):
log_entry["user_id"] = record.user_id
if hasattr(record, "operation"):
log_entry["operation"] = record.operation
if hasattr(record, "duration_ms"):
log_entry["duration_ms"] = record.duration_ms
if hasattr(record, "status_code"):
log_entry["status_code"] = record.status_code
# Add any additional structured data
if hasattr(record, "__dict__"):
for key, value in record.__dict__.items():
if key not in [
"name",
"msg",
"args",
"levelname",
"levelno",
"pathname",
"filename",
"module",
"exc_info",
"exc_text",
"stack_info",
"lineno",
"funcName",
"created",
"msecs",
"relativeCreated",
"thread",
"threadName",
"processName",
"process",
"message",
]:
log_entry[key] = value
return json.dumps(log_entry, default=str)
class HumanReadableFormatter(logging.Formatter):
"""Human-readable formatter for development."""
def __init__(self):
super().__init__(
fmt="%(asctime)s [%(levelname)8s] %(name)s:%(funcName)s:%(lineno)d - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
def format(self, record: logging.LogRecord) -> str:
# Add request ID to human readable format
if hasattr(record, "request_id"):
self._fmt = "%(asctime)s [%(levelname)8s] %(name)s:%(funcName)s:%(lineno)d [%(request_id)s] - %(message)s"
else:
self._fmt = "%(asctime)s [%(levelname)8s] %(name)s:%(funcName)s:%(lineno)d - %(message)s"
return super().format(record)
class RequestContext:
"""Context manager for request-scoped logging."""
_local = threading.local()
def __init__(self, request_id: Optional[str] = None):
self.request_id = request_id or str(uuid.uuid4())[:8]
self._old_request_id = getattr(self._local, "request_id", None)
def __enter__(self):
self._local.request_id = self.request_id
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self._old_request_id is not None:
self._local.request_id = self._old_request_id
else:
delattr(self._local, "request_id")
@classmethod
def get_current_request_id(cls) -> Optional[str]:
"""Get the current request ID from thread local storage."""
return getattr(cls._local, "request_id", None)
class RequestAdapter(logging.LoggerAdapter):
"""Logger adapter that automatically adds request context."""
def __init__(self, logger: logging.Logger):
super().__init__(logger, {})
def process(self, msg: str, kwargs: Any) -> tuple:
"""Add request context to log records."""
request_id = RequestContext.get_current_request_id()
if request_id:
kwargs.setdefault("extra", {})["request_id"] = request_id
return msg, kwargs
def setup_logging(
level: str = "INFO",
format_type: str = "auto", # "json", "human", or "auto"
log_file: Optional[str] = None,
max_file_size: int = 10 * 1024 * 1024, # 10MB
backup_count: int = 5,
enable_console: bool = True,
enable_file: bool = True,
) -> logging.Logger:
"""
Set up comprehensive logging configuration.
Args:
level: Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
format_type: Log format - "json", "human", or "auto" (detects from environment)
log_file: Path to log file (optional)
max_file_size: Maximum log file size in bytes
backup_count: Number of backup files to keep
enable_console: Enable console logging
enable_file: Enable file logging
Returns:
Configured root logger
"""
# Determine format type
if format_type == "auto":
# Use JSON for production, human-readable for development
format_type = "json" if os.getenv("ENVIRONMENT") == "production" else "human"
# Clear existing handlers
root_logger = logging.getLogger()
root_logger.handlers.clear()
# Set log level
numeric_level = getattr(logging, level.upper(), logging.INFO)
root_logger.setLevel(numeric_level)
# Create formatters
if format_type == "json":
formatter = StructuredFormatter()
else:
formatter = HumanReadableFormatter()
# Console handler
if enable_console:
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(numeric_level)
console_handler.setFormatter(formatter)
root_logger.addHandler(console_handler)
# File handler with rotation
if enable_file and log_file:
# Ensure log directory exists
log_path = Path(log_file)
log_path.parent.mkdir(parents=True, exist_ok=True)
file_handler = logging.handlers.RotatingFileHandler(
log_file, maxBytes=max_file_size, backupCount=backup_count, encoding="utf-8"
)
file_handler.setLevel(numeric_level)
file_handler.setFormatter(StructuredFormatter()) # Always use JSON for files
root_logger.addHandler(file_handler)
# Create request adapter for the root logger
adapter = RequestAdapter(root_logger)
# Configure third-party loggers
logging.getLogger("httpx").setLevel(logging.WARNING)
logging.getLogger("docker").setLevel(logging.WARNING)
logging.getLogger("aiodeocker").setLevel(logging.WARNING)
logging.getLogger("asyncio").setLevel(logging.WARNING)
return adapter
def get_logger(name: str) -> RequestAdapter:
"""Get a configured logger with request context support."""
logger = logging.getLogger(name)
return RequestAdapter(logger)
def log_performance(operation: str, duration_ms: float, **kwargs) -> None:
"""Log performance metrics."""
logger = get_logger(__name__)
extra = {"operation": operation, "duration_ms": duration_ms, **kwargs}
logger.info(
f"Performance: {operation} completed in {duration_ms:.2f}ms", extra=extra
)
def log_request(
method: str, path: str, status_code: int, duration_ms: float, **kwargs
) -> None:
"""Log HTTP request metrics."""
logger = get_logger(__name__)
extra = {
"operation": "http_request",
"method": method,
"path": path,
"status_code": status_code,
"duration_ms": duration_ms,
**kwargs,
}
level = logging.INFO if status_code < 400 else logging.WARNING
logger.log(
level,
f"HTTP {method} {path} -> {status_code} ({duration_ms:.2f}ms)",
extra=extra,
)
def log_session_operation(session_id: str, operation: str, **kwargs) -> None:
"""Log session-related operations."""
logger = get_logger(__name__)
extra = {"session_id": session_id, "operation": operation, **kwargs}
logger.info(f"Session {operation}: {session_id}", extra=extra)
def log_security_event(event: str, severity: str = "info", **kwargs) -> None:
"""Log security-related events."""
logger = get_logger(__name__)
extra = {"security_event": event, "severity": severity, **kwargs}
level = getattr(logging, severity.upper(), logging.INFO)
logger.log(level, f"Security: {event}", extra=extra)
# Global logger instance
logger = get_logger(__name__)
# Initialize logging on import
_setup_complete = False
def init_logging():
"""Initialize logging system."""
global _setup_complete
if _setup_complete:
return
# Configuration from environment
level = os.getenv("LOG_LEVEL", "INFO")
format_type = os.getenv("LOG_FORMAT", "auto") # json, human, auto
log_file = os.getenv("LOG_FILE")
max_file_size = int(os.getenv("LOG_MAX_SIZE_MB", "10")) * 1024 * 1024
backup_count = int(os.getenv("LOG_BACKUP_COUNT", "5"))
enable_console = os.getenv("LOG_CONSOLE", "true").lower() == "true"
enable_file = (
os.getenv("LOG_FILE_ENABLED", "true").lower() == "true" and log_file is not None
)
setup_logging(
level=level,
format_type=format_type,
log_file=log_file,
max_file_size=max_file_size,
backup_count=backup_count,
enable_console=enable_console,
enable_file=enable_file,
)
logger.info(
"Structured logging initialized",
extra={
"level": level,
"format": format_type,
"log_file": log_file,
"max_file_size_mb": max_file_size // (1024 * 1024),
"backup_count": backup_count,
},
)
_setup_complete = True
# Initialize on import
init_logging()