#!/usr/bin/env python3
"""
Log Analysis Script for Video Upload Request Flow

This script analyzes the comprehensive logs generated during video upload
requests to help debug issues and monitor system performance.

Usage:
    python scripts/analyze_logs.py [--request-id REQUEST_ID] [--user-id USER_ID] [--last-minutes MINUTES]
"""

import json
import re
import sys
from datetime import datetime, timedelta
from pathlib import Path
from typing import Dict, List, Optional
import argparse


class LogAnalyzer:
    """Analyzes Django logs for video upload request flow."""
    
    def __init__(self, logs_dir: str = "logs"):
        self.logs_dir = Path(logs_dir)
        self.log_files = {
            'requests': self.logs_dir / 'requests.log',
            'storage': self.logs_dir / 'storage.log',
            'errors': self.logs_dir / 'errors.log',
            'django': self.logs_dir / 'django.log'
        }
    
    def parse_log_line(self, line: str) -> Optional[Dict]:
        """Parse a log line and extract structured information."""
        # Pattern: [TIMESTAMP] [LEVEL] [MODULE] MESSAGE
        pattern = r'\[([^\]]+)\] \[([^\]]+)\] \[([^\]]+)\] (.+)'
        match = re.match(pattern, line.strip())
        
        if not match:
            return None
        
        timestamp_str, level, module, message = match.groups()
        
        try:
            timestamp = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
        except ValueError:
            # Fallback for different timestamp formats
            timestamp = None
        
        # Extract structured data from message if it contains JSON-like extra data
        extra_data = {}
        if ' - ' in message:
            parts = message.split(' - ', 1)
            if len(parts) == 2:
                message = parts[1]
                # Try to extract extra data patterns
                extra_pattern = r"user_id=(\w+)|request_id=(\w+)|file_name=([^,\s]+)|error_type=(\w+)"
                for match in re.finditer(extra_pattern, parts[0]):
                    key, value = [(k, v) for k, v in match.groups() if v][0]
                    extra_data[key] = value
        
        return {
            'timestamp': timestamp,
            'level': level,
            'module': module,
            'message': message,
            'extra': extra_data,
            'raw_line': line.strip()
        }
    
    def get_logs_by_request_id(self, request_id: str) -> List[Dict]:
        """Get all log entries for a specific request ID."""
        logs = []
        
        for log_type, log_file in self.log_files.items():
            if not log_file.exists():
                continue
            
            try:
                with open(log_file, 'r') as f:
                    for line in f:
                        if request_id in line:
                            parsed = self.parse_log_line(line)
                            if parsed:
                                parsed['log_type'] = log_type
                                logs.append(parsed)
            except Exception as e:
                print(f"Error reading {log_file}: {e}")
        
        # Sort by timestamp
        logs.sort(key=lambda x: x['timestamp'] or datetime.min)
        return logs
    
    def get_logs_by_user_id(self, user_id: str) -> List[Dict]:
        """Get all log entries for a specific user ID."""
        logs = []
        
        for log_type, log_file in self.log_files.items():
            if not log_file.exists():
                continue
            
            try:
                with open(log_file, 'r') as f:
                    for line in f:
                        if f"user_id={user_id}" in line or f"[user_{user_id}]" in line:
                            parsed = self.parse_log_line(line)
                            if parsed:
                                parsed['log_type'] = log_type
                                logs.append(parsed)
            except Exception as e:
                print(f"Error reading {log_file}: {e}")
        
        # Sort by timestamp
        logs.sort(key=lambda x: x['timestamp'] or datetime.min)
        return logs
    
    def get_recent_logs(self, minutes: int = 30) -> List[Dict]:
        """Get all log entries from the last N minutes."""
        cutoff_time = datetime.now() - timedelta(minutes=minutes)
        logs = []
        
        for log_type, log_file in self.log_files.items():
            if not log_file.exists():
                continue
            
            try:
                with open(log_file, 'r') as f:
                    for line in f:
                        parsed = self.parse_log_line(line)
                        if parsed and parsed['timestamp'] and parsed['timestamp'] > cutoff_time:
                            parsed['log_type'] = log_type
                            logs.append(parsed)
            except Exception as e:
                print(f"Error reading {log_file}: {e}")
        
        # Sort by timestamp
        logs.sort(key=lambda x: x['timestamp'] or datetime.min)
        return logs
    
    def analyze_request_flow(self, logs: List[Dict]) -> Dict:
        """Analyze the request flow and identify issues."""
        analysis = {
            'total_entries': len(logs),
            'error_count': 0,
            'warning_count': 0,
            'stages': {},
            'files_processed': [],
            'errors': [],
            'performance': {}
        }
        
        request_start = None
        request_end = None
        
        for log in logs:
            level = log['level']
            message = log['message']
            
            # Count log levels
            if level == 'ERROR':
                analysis['error_count'] += 1
                analysis['errors'].append({
                    'timestamp': log['timestamp'],
                    'message': message,
                    'module': log['module']
                })
            elif level == 'WARNING':
                analysis['warning_count'] += 1
            
            # Track request stages
            if 'request started' in message.lower():
                request_start = log['timestamp']
                analysis['stages']['request_start'] = log['timestamp']
            elif 'upload successful' in message.lower():
                analysis['stages']['upload_complete'] = log['timestamp']
            elif 'created successfully' in message.lower():
                request_end = log['timestamp']
                analysis['stages']['request_complete'] = log['timestamp']
            
            # Track files
            if 'file_name' in log['extra']:
                file_info = {
                    'name': log['extra']['file_name'],
                    'timestamp': log['timestamp'],
                    'operation': message
                }
                analysis['files_processed'].append(file_info)
        
        # Calculate performance metrics
        if request_start and request_end:
            duration = (request_end - request_start).total_seconds()
            analysis['performance']['total_duration_seconds'] = duration
        
        return analysis
    
    def print_analysis(self, logs: List[Dict], analysis: Dict):
        """Print formatted analysis results."""
        print("=" * 80)
        print("LOG ANALYSIS RESULTS")
        print("=" * 80)
        
        print(f"\n📊 SUMMARY:")
        print(f"   Total log entries: {analysis['total_entries']}")
        print(f"   Errors: {analysis['error_count']}")
        print(f"   Warnings: {analysis['warning_count']}")
        
        if analysis['performance'].get('total_duration_seconds'):
            duration = analysis['performance']['total_duration_seconds']
            print(f"   Request duration: {duration:.2f} seconds")
        
        print(f"\n📁 FILES PROCESSED:")
        for file_info in analysis['files_processed']:
            timestamp = file_info['timestamp'].strftime('%H:%M:%S') if file_info['timestamp'] else 'Unknown'
            print(f"   [{timestamp}] {file_info['name']} - {file_info['operation']}")
        
        if analysis['errors']:
            print(f"\n❌ ERRORS:")
            for error in analysis['errors']:
                timestamp = error['timestamp'].strftime('%H:%M:%S') if error['timestamp'] else 'Unknown'
                print(f"   [{timestamp}] [{error['module']}] {error['message']}")
        
        print(f"\n📋 DETAILED LOG ENTRIES:")
        for log in logs:
            timestamp = log['timestamp'].strftime('%Y-%m-%d %H:%M:%S') if log['timestamp'] else 'Unknown'
            level_icon = {'ERROR': '❌', 'WARNING': '⚠️', 'INFO': 'ℹ️', 'DEBUG': '🔍'}.get(log['level'], '📝')
            print(f"   {level_icon} [{timestamp}] [{log['module']}] {log['message']}")


def main():
    parser = argparse.ArgumentParser(description='Analyze video upload logs')
    parser.add_argument('--request-id', help='Filter by request ID')
    parser.add_argument('--user-id', help='Filter by user ID')
    parser.add_argument('--last-minutes', type=int, default=30, help='Show logs from last N minutes')
    parser.add_argument('--logs-dir', default='logs', help='Logs directory path')
    
    args = parser.parse_args()
    
    analyzer = LogAnalyzer(args.logs_dir)
    
    # Get logs based on filters
    if args.request_id:
        logs = analyzer.get_logs_by_request_id(args.request_id)
        print(f"Analyzing logs for request ID: {args.request_id}")
    elif args.user_id:
        logs = analyzer.get_logs_by_user_id(args.user_id)
        print(f"Analyzing logs for user ID: {args.user_id}")
    else:
        logs = analyzer.get_recent_logs(args.last_minutes)
        print(f"Analyzing logs from last {args.last_minutes} minutes")
    
    if not logs:
        print("No logs found matching the criteria.")
        return
    
    # Analyze and print results
    analysis = analyzer.analyze_request_flow(logs)
    analyzer.print_analysis(logs, analysis)


if __name__ == '__main__':
    main()