"""
LogFileAnalyzer
Generated by Eden via recursive self-improvement
2025-10-28 06:33:58.371973
"""

class LogFileAnalyzer:
    """
    A class to analyze log file contents.
    
    Attributes:
        log_lines (list): List of parsed log lines.
        parsers (dict): Common log format parsers.
    """

    def __init__(self, log_file_paths=None):
        """Initialize with log file paths or log lines."""
        self.log_lines = []
        if log_file_paths:
            for path in log_file_paths:
                with open(path, 'r') as f:
                    self.log_lines.extend(f.readlines())
    
    def _parse_log_line(self, line):
        """
        Parses a single log line into structured data.
        
        Args:
            line (str): A single log line
            
        Returns:
            dict: Parsed log information with keys like timestamp, level, logger, message
        Raises:
            ValueError: If the log line format is invalid
        """
        # Assuming standard log format: [YYYY-MM-DD HH:MM:SS] LEVEL Logger-Name - Message...
        try:
            parts = line.strip().split(' - ')
            if len(parts) < 4:
                raise ValueError("Invalid log line format")
            
            timestamp, level, logger_name, message = parts[:4]
            return {
                'timestamp': timestamp,
                'level': level.upper(),
                'logger': logger_name,
                'message': message
            }
        except Exception as e:
            raise ValueError(f"Failed to parse log line: {line}") from e
    
    def filter_by_level(self, level):
        """
        Filter logs by severity level.
        
        Args:
            level (str): Log level to filter by (e.g., 'ERROR', 'WARNING')
            
        Returns:
            list of dicts: Logs matching the level
        """
        return [log for log in self.log_lines if log.get('level') == level]
    
    def search(self, pattern):
        """
        Search logs for a specific pattern in messages.
        
        Args:
            pattern (str): Pattern to search for
            
        Returns:
            list of dicts: Logs containing the pattern
        """
        import re
        regex = re.compile(pattern)
        return [log for log in self.log_lines if regex.search(log['message'])]
    
    def count_unique_errors(self):
        """
        Count unique error types.
        
        Returns:
            int: Number of unique errors
        """
        errors = set()
        for log in self.log_lines:
            if log.get('level') == 'ERROR':
                errors.add(log['message'])
        return len(errors)
    
    def calculate_error_rate(self, timeframe=24*60*60):
        """
        Calculate error rate within a specific timeframe.
        
        Args:
            timeframe (int): Timeframe in seconds
            
        Returns:
            float: Error rate (percentage of errors per total logs)
        """
        import datetime
        current_time = datetime.datetime.now()
        filtered_logs = []
        
        for log in self.log_lines:
            timestamp_obj = datetime.datetime.strptime(log['timestamp'], "%Y-%m-%d %H:%M:%S")
            if (current_time - timestamp_obj).total_seconds() < timeframe:
                filtered_logs.append(log)
        
        error_count = sum(1 for log in filtered_logs if log.get('level') == 'ERROR')
        return (error_count / len(filtered_logs)) * 100 if filtered_logs else 0

# Example usage
if __name__ == "__main__":
    # Initialize with sample logs
    analyzer = LogFileAnalyzer([
        "logs/error.log",
        "logs/access.log"
    ])
    
    # Parse logs (assuming each line is a log entry)
    analyzer.log_lines = [analyzer._parse_log_line(line) for line in analyzer.log_lines]
    
    # Filter by ERROR level
    errors = analyzer.filter_by_level('ERROR')
    print(f"Found {len(errors)} ERROR logs.")
    
    # Search for specific pattern
    pattern = "database connection failed"
    matching_logs = analyzer.search(pattern)
    print(f"Found {len(matching_logs)} logs matching pattern: '{pattern}'.")
    
    # Count unique errors
    unique_errors = analyzer.count_unique_errors()
    print(f"Number of unique ERROR messages: {unique_errors}.")
    
    # Calculate error rate in the last hour (3600 seconds)
    error_rate = analyzer.calculate_error_rate(3600)
    print(f"Error rate in last hour: {error_rate:.2f}%.")