135 lines
4.6 KiB
Python
135 lines
4.6 KiB
Python
import logging
|
|
import json
|
|
from logging.handlers import RotatingFileHandler
|
|
from pathlib import Path
|
|
from datetime import datetime
|
|
|
|
class JsonFormatter(logging.Formatter):
|
|
"""
|
|
Custom JSON log formatter for structured logging.
|
|
Outputs rich JSON objects with context for programmatic parsing and analysis.
|
|
"""
|
|
def format(self, record: logging.LogRecord) -> str:
|
|
log_object = {
|
|
"timestamp": datetime.utcfromtimestamp(record.created).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
|
"level": record.levelname,
|
|
"message": record.getMessage(),
|
|
"module": record.module,
|
|
"function": record.funcName,
|
|
"line": record.lineno,
|
|
}
|
|
|
|
# Include any extra fields added via logger.info("msg", extra={...})
|
|
# This allows passing structured context: logger.info("msg", extra={"file": "video.mkv", "size": 1024})
|
|
if hasattr(record, "extra") and isinstance(record.extra, dict):
|
|
log_object.update(record.extra)
|
|
|
|
# Include exception info if present (for error tracking)
|
|
if record.exc_info:
|
|
log_object["exception"] = self.formatException(record.exc_info)
|
|
|
|
return json.dumps(log_object, ensure_ascii=False)
|
|
|
|
def setup_logger(log_folder: Path, log_file_name: str = "conversion.log", level=logging.INFO) -> logging.Logger:
|
|
"""
|
|
Setup logger with structured JSON file output and disabled console output.
|
|
|
|
Output:
|
|
- File (logs/conversion.log): JSON format with full context for programmatic parsing
|
|
- Console: Disabled (all user output handled via print() for clean terminal UI)
|
|
|
|
Usage:
|
|
logger.info("Processing complete", extra={
|
|
"file": "video.mkv",
|
|
"size_mb": 1024,
|
|
"duration_sec": 3600
|
|
})
|
|
"""
|
|
log_folder.mkdir(parents=True, exist_ok=True)
|
|
log_file = log_folder / log_file_name
|
|
|
|
logger = logging.getLogger("conversion_logger")
|
|
logger.setLevel(level)
|
|
logger.propagate = False # Prevent double logging
|
|
|
|
# Formatters
|
|
text_formatter = logging.Formatter(
|
|
"%(asctime)s [%(levelname)s] %(message)s (%(module)s:%(lineno)d)",
|
|
datefmt="%Y-%m-%d %H:%M:%S"
|
|
)
|
|
json_formatter = JsonFormatter()
|
|
|
|
# Console handler (disabled - use print() for user-facing output)
|
|
# This prevents duplicate/ugly output mixing with terminal UI
|
|
console_handler = logging.StreamHandler()
|
|
console_handler.setFormatter(text_formatter)
|
|
console_handler.setLevel(logging.CRITICAL + 1) # Effectively disable (above CRITICAL)
|
|
|
|
# File handler (JSON logs)
|
|
file_handler = RotatingFileHandler(log_file, maxBytes=5 * 1024 * 1024, backupCount=3, encoding="utf-8")
|
|
file_handler.setFormatter(json_formatter)
|
|
file_handler.setLevel(level)
|
|
|
|
# Add handlers only once
|
|
if not logger.handlers:
|
|
logger.addHandler(console_handler)
|
|
logger.addHandler(file_handler)
|
|
|
|
return logger
|
|
|
|
|
|
def setup_failure_logger(log_folder: Path) -> logging.Logger:
|
|
"""
|
|
Setup dedicated failure logger for encoding/processing failures.
|
|
|
|
Output:
|
|
- File (logs/failure.log): Simple text format with timestamp and failure message
|
|
- Use this for tracking files that failed processing for later analysis
|
|
|
|
Usage:
|
|
failure_logger.warning(f"{file.name} | CQ mode failed: size threshold not met (95%)")
|
|
"""
|
|
log_folder.mkdir(parents=True, exist_ok=True)
|
|
log_file = log_folder / "failure.log"
|
|
|
|
logger = logging.getLogger("failure_logger")
|
|
logger.setLevel(logging.WARNING)
|
|
|
|
# Prevent duplicate handlers
|
|
if logger.handlers:
|
|
return logger
|
|
|
|
# Simple text formatter for failure log
|
|
formatter = logging.Formatter(
|
|
"%(asctime)s | %(message)s",
|
|
datefmt="%Y-%m-%d %H:%M:%S"
|
|
)
|
|
|
|
# File handler only
|
|
file_handler = RotatingFileHandler(log_file, maxBytes=5 * 1024 * 1024, backupCount=3, encoding="utf-8")
|
|
file_handler.setFormatter(formatter)
|
|
file_handler.setLevel(logging.WARNING)
|
|
|
|
logger.addHandler(file_handler)
|
|
logger.propagate = False
|
|
|
|
return logger
|
|
|
|
|
|
def log_event(logger: logging.Logger, level: str, message: str, **context):
|
|
"""
|
|
Log a structured event with context fields.
|
|
|
|
Args:
|
|
logger: Logger instance
|
|
level: Log level ("debug", "info", "warning", "error")
|
|
message: Main message text
|
|
**context: Additional context fields (file, size, duration, etc)
|
|
|
|
Example:
|
|
log_event(logger, "info", "Encoding complete",
|
|
file="video.mkv", size_mb=1024, method="CQ", reduction_pct=45)
|
|
"""
|
|
log_func = getattr(logger, level.lower(), logger.info)
|
|
log_func(message, extra=context)
|