| |
| """ |
| Logging Management Module |
| ======================== |
| |
| Professional logging system that integrates with existing LOGS/ folder |
| and provides structured, comprehensive logging for BackgroundFX Pro. |
| |
| Features: |
| - Integration with existing LOGS/ folder |
| - Structured logging with different levels |
| - Performance tracking and metrics |
| - Error tracking and debugging |
| - Rotating log files |
| - Console and file output |
| |
| Author: BackgroundFX Pro Team |
| License: MIT |
| """ |
|
|
| import os |
| import sys |
| import logging |
| import logging.handlers |
| from typing import Optional, Dict, Any |
| from datetime import datetime |
| from pathlib import Path |
| import json |
| import traceback |
| from functools import wraps |
| import time |
| from typing import Dict, List |
|
|
|
|
| class ColoredFormatter(logging.Formatter): |
| """Custom formatter with colors for console output""" |
| |
| |
| COLORS = { |
| 'DEBUG': '\033[36m', |
| 'INFO': '\033[32m', |
| 'WARNING': '\033[33m', |
| 'ERROR': '\033[31m', |
| 'CRITICAL': '\033[35m', |
| 'RESET': '\033[0m' |
| } |
| |
| def format(self, record): |
| |
| if record.levelname in self.COLORS: |
| record.levelname = f"{self.COLORS[record.levelname]}{record.levelname}{self.COLORS['RESET']}" |
| |
| return super().format(record) |
|
|
|
|
| class BackgroundFXLogger: |
| """Main logger class for BackgroundFX Pro""" |
| |
| def __init__(self, |
| name: str = "BackgroundFX", |
| logs_dir: str = "LOGS", |
| level: int = logging.INFO, |
| console_output: bool = True, |
| file_output: bool = True): |
| |
| self.name = name |
| self.logs_dir = Path(logs_dir) |
| self.level = level |
| self.console_output = console_output |
| self.file_output = file_output |
| |
| |
| self.logs_dir.mkdir(exist_ok=True) |
| |
| |
| self.logger = logging.getLogger(name) |
| self.logger.setLevel(level) |
| |
| |
| self.logger.handlers.clear() |
| |
| |
| self._setup_handlers() |
| |
| |
| self.performance_data = {} |
| self.start_times = {} |
| |
| def __getattr__(self, name): |
| """ |
| Delegate unknown attributes/methods to the underlying stdlib logger. |
| This makes BackgroundFXLogger behave like logging.Logger where needed. |
| """ |
| return getattr(self.logger, name) |
| |
| def _setup_handlers(self): |
| """Setup logging handlers for console and file output""" |
| |
| |
| if self.console_output: |
| console_handler = logging.StreamHandler(sys.stdout) |
| console_handler.setLevel(self.level) |
| |
| |
| console_formatter = ColoredFormatter( |
| '%(asctime)s - %(name)s - %(levelname)s - %(message)s', |
| datefmt='%Y-%m-%d %H:%M:%S' |
| ) |
| console_handler.setFormatter(console_formatter) |
| self.logger.addHandler(console_handler) |
| |
| |
| if self.file_output: |
| |
| main_log_file = self.logs_dir / "backgroundfx.log" |
| file_handler = logging.handlers.RotatingFileHandler( |
| main_log_file, |
| maxBytes=10*1024*1024, |
| backupCount=5, |
| encoding="utf-8" |
| ) |
| file_handler.setLevel(self.level) |
| |
| |
| file_formatter = logging.Formatter( |
| '%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s', |
| datefmt='%Y-%m-%d %H:%M:%S' |
| ) |
| file_handler.setFormatter(file_formatter) |
| self.logger.addHandler(file_handler) |
| |
| |
| error_log_file = self.logs_dir / "errors.log" |
| error_handler = logging.handlers.RotatingFileHandler( |
| error_log_file, |
| maxBytes=5*1024*1024, |
| backupCount=3, |
| encoding="utf-8" |
| ) |
| error_handler.setLevel(logging.ERROR) |
| error_handler.setFormatter(file_formatter) |
| self.logger.addHandler(error_handler) |
| |
| |
| self.performance_log_file = self.logs_dir / "performance.json" |
| |
| def debug(self, message: str, **kwargs): |
| """Log debug message""" |
| self.logger.debug(message, extra=kwargs) |
| |
| def info(self, message: str, **kwargs): |
| """Log info message""" |
| self.logger.info(message, extra=kwargs) |
| |
| def warning(self, message: str, **kwargs): |
| """Log warning message""" |
| self.logger.warning(message, extra=kwargs) |
| |
| def error(self, message: str, exception: Optional[Exception] = None, **kwargs): |
| """Log error message with optional exception details""" |
| if exception: |
| message = f"{message} | Exception: {str(exception)}" |
| |
| self.logger.error(f"{message}\n{traceback.format_exc()}", extra=kwargs) |
| else: |
| self.logger.error(message, extra=kwargs) |
| |
| def critical(self, message: str, exception: Optional[Exception] = None, **kwargs): |
| """Log critical message""" |
| if exception: |
| message = f"{message} | Exception: {str(exception)}" |
| self.logger.critical(f"{message}\n{traceback.format_exc()}", extra=kwargs) |
| else: |
| self.logger.critical(message, extra=kwargs) |
| |
| def log_processing_step(self, step_name: str, details: Dict[str, Any] = None): |
| """Log a processing step with details""" |
| details = details or {} |
| self.info(f"🔄 Processing: {step_name}", **details) |
| |
| def log_performance_metric(self, metric_name: str, value: float, unit: str = "", details: Dict = None): |
| """Log performance metric""" |
| details = details or {} |
| message = f"📊 {metric_name}: {value:.3f}{unit}" |
| self.info(message, **details) |
| |
| |
| timestamp = datetime.now().isoformat() |
| self.performance_data[timestamp] = { |
| 'metric': metric_name, |
| 'value': value, |
| 'unit': unit, |
| 'details': details |
| } |
| |
| |
| self._save_performance_data() |
| |
| def log_model_status(self, model_name: str, status: str, details: Dict = None): |
| """Log model initialization/status""" |
| details = details or {} |
| if status == "initialized": |
| self.info(f"✅ {model_name} initialized successfully", **details) |
| elif status == "failed": |
| self.error(f"❌ {model_name} initialization failed", **details) |
| elif status == "loading": |
| self.info(f"🔄 Loading {model_name}...", **details) |
| else: |
| self.info(f"🔧 {model_name}: {status}", **details) |
| |
| def log_quality_metrics(self, frame_id: int, metrics: Dict[str, float]): |
| """Log quality assessment metrics""" |
| metric_str = " | ".join([f"{k}: {v:.3f}" for k, v in metrics.items()]) |
| self.info(f"📊 Frame {frame_id} Quality: {metric_str}") |
| |
| |
| timestamp = datetime.now().isoformat() |
| self.performance_data[f"{timestamp}_quality_{frame_id}"] = { |
| 'type': 'quality_metrics', |
| 'frame_id': frame_id, |
| 'metrics': metrics |
| } |
| |
| def log_video_processing(self, input_path: str, output_path: str, |
| frame_count: int, processing_time: float): |
| """Log video processing completion""" |
| fps = frame_count / max(processing_time, 0.001) |
| self.info( |
| f"🎬 Video processed: {frame_count} frames in {processing_time:.1f}s ({fps:.1f} FPS)", |
| input_path=input_path, |
| output_path=output_path, |
| frame_count=frame_count, |
| processing_time=processing_time, |
| fps=fps |
| ) |
| |
| def start_timer(self, operation_name: str): |
| """Start timing an operation""" |
| self.start_times[operation_name] = time.time() |
| self.debug(f"⏱️ Started timing: {operation_name}") |
| |
| def end_timer(self, operation_name: str, log_result: bool = True) -> float: |
| """End timing an operation and optionally log result""" |
| if operation_name not in self.start_times: |
| self.warning(f"Timer '{operation_name}' was not started") |
| return 0.0 |
| |
| elapsed = time.time() - self.start_times[operation_name] |
| del self.start_times[operation_name] |
| |
| if log_result: |
| self.log_performance_metric(f"{operation_name}_time", elapsed, "s") |
| |
| return elapsed |
| |
| def _save_performance_data(self): |
| """Save performance data to JSON file""" |
| try: |
| |
| existing_data = {} |
| if self.performance_log_file.exists(): |
| with open(self.performance_log_file, 'r', encoding="utf-8") as f: |
| try: |
| existing_data = json.load(f) |
| except json.JSONDecodeError: |
| existing_data = {} |
| |
| |
| existing_data.update(self.performance_data) |
| |
| |
| if len(existing_data) > 1000: |
| sorted_keys = sorted(existing_data.keys()) |
| keep_keys = sorted_keys[-1000:] |
| existing_data = {k: existing_data[k] for k in keep_keys} |
| |
| |
| with open(self.performance_log_file, 'w', encoding="utf-8") as f: |
| json.dump(existing_data, f, indent=2) |
| |
| except Exception as e: |
| self.warning(f"Failed to save performance data: {e}") |
| |
| def get_log_files(self) -> Dict[str, str]: |
| """Get paths to all log files""" |
| return { |
| 'main_log': str(self.logs_dir / "backgroundfx.log"), |
| 'error_log': str(self.logs_dir / "errors.log"), |
| 'performance_log': str(self.performance_log_file), |
| 'logs_directory': str(self.logs_dir) |
| } |
| |
| def get_recent_logs(self, lines: int = 50) -> Dict[str, List[str]]: |
| """Get recent log entries""" |
| logs = {} |
| |
| try: |
| |
| main_log_file = self.logs_dir / "backgroundfx.log" |
| if main_log_file.exists(): |
| with open(main_log_file, 'r', encoding="utf-8") as f: |
| logs['main'] = f.readlines()[-lines:] |
| |
| |
| error_log_file = self.logs_dir / "errors.log" |
| if error_log_file.exists(): |
| with open(error_log_file, 'r', encoding="utf-8") as f: |
| logs['errors'] = f.readlines()[-lines:] |
| |
| except Exception as e: |
| self.warning(f"Failed to read recent logs: {e}") |
| |
| return logs |
|
|
|
|
| |
| _global_logger: Optional[BackgroundFXLogger] = None |
|
|
|
|
| def setup_logging(logs_dir: str = "LOGS", |
| level: int = logging.INFO, |
| console_output: bool = True, |
| file_output: bool = True) -> BackgroundFXLogger: |
| """Setup global logging configuration""" |
| global _global_logger |
| |
| if _global_logger is None: |
| _global_logger = BackgroundFXLogger( |
| name="BackgroundFX", |
| logs_dir=logs_dir, |
| level=level, |
| console_output=console_output, |
| file_output=file_output |
| ) |
| |
| return _global_logger |
|
|
|
|
| |
| def setup_logger(*args, **kwargs): |
| """ |
| Alias for old code: `from utils.logger import setup_logger`. |
| Behaves the same as setup_logging and returns a BackgroundFXLogger. |
| """ |
| return setup_logging(*args, **kwargs) |
|
|
|
|
| def get_logger(name: str = None) -> BackgroundFXLogger: |
| """Get logger instance""" |
| if _global_logger is None: |
| setup_logging() |
| |
| if name and name != "BackgroundFX": |
| |
| module_logger = BackgroundFXLogger( |
| name=name, |
| logs_dir=_global_logger.logs_dir, |
| level=_global_logger.level, |
| console_output=False, |
| file_output=True |
| ) |
| return module_logger |
| |
| return _global_logger |
|
|
|
|
| def log_function_call(logger: BackgroundFXLogger = None): |
| """Decorator to log function calls with timing""" |
| if logger is None: |
| logger = get_logger() |
| |
| def decorator(func): |
| @wraps(func) |
| def wrapper(*args, **kwargs): |
| func_name = f"{func.__module__}.{func.__name__}" |
| logger.debug(f"🔧 Calling: {func_name}") |
| logger.start_timer(func_name) |
| |
| try: |
| result = func(*args, **kwargs) |
| elapsed = logger.end_timer(func_name, log_result=False) |
| logger.debug(f"✅ Completed: {func_name} ({elapsed:.3f}s)") |
| return result |
| |
| except Exception as e: |
| elapsed = logger.end_timer(func_name, log_result=False) |
| logger.error(f"❌ Failed: {func_name} ({elapsed:.3f}s)", exception=e) |
| raise |
| |
| return wrapper |
| return decorator |
|
|
|
|
| def log_processing_pipeline(): |
| """Decorator for logging processing pipeline steps""" |
| logger = get_logger() |
| |
| def decorator(func): |
| @wraps(func) |
| def wrapper(*args, **kwargs): |
| step_name = func.__name__.replace('_', ' ').title() |
| logger.log_processing_step(step_name) |
| |
| try: |
| result = func(*args, **kwargs) |
| logger.info(f"✅ {step_name} completed successfully") |
| return result |
| |
| except Exception as e: |
| logger.error(f"❌ {step_name} failed", exception=e) |
| raise |
| |
| return wrapper |
| return decorator |
|
|
|
|
| |
| def log_info(message: str, **kwargs): |
| """Quick info logging""" |
| get_logger().info(message, **kwargs) |
|
|
| def log_error(message: str, exception: Exception = None, **kwargs): |
| """Quick error logging""" |
| get_logger().error(message, exception=exception, **kwargs) |
|
|
| def log_warning(message: str, **kwargs): |
| """Quick warning logging""" |
| get_logger().warning(message, **kwargs) |
|
|
| def log_debug(message: str, **kwargs): |
| """Quick debug logging""" |
| get_logger().debug(message, **kwargs) |
|
|
|
|
| |
| if _global_logger is None: |
| try: |
| setup_logging() |
| log_info("✅ Logging system initialized") |
| except Exception as e: |
| print(f"⚠️ Failed to initialize logging: {e}") |
|
|
|
|
| __all__ = [ |
| "BackgroundFXLogger", |
| "setup_logging", |
| "setup_logger", |
| "get_logger", |
| "log_function_call", |
| "log_processing_pipeline", |
| "log_info", |
| "log_error", |
| "log_warning", |
| "log_debug", |
| ] |
|
|