#!/usr/bin/env python """ Log Reader Utility This script provides a convenient way to read and filter log files during development. """ import os import sys import time import argparse from datetime import datetime def parse_args(): """Parse command line arguments""" parser = argparse.ArgumentParser(description='Read and filter log files') parser.add_argument('--file', type=str, help='Log file to read (defaults to most recent .log file)') parser.add_argument('--tail', type=int, default=50, help='Number of lines to show from the end') parser.add_argument('--follow', '-f', action='store_true', help='Follow the file as it grows') parser.add_argument('--filter', type=str, help='Only show lines containing this string') parser.add_argument('--list', action='store_true', help='List all log files sorted by modification time') return parser.parse_args() def get_most_recent_log(): """Find the most recently modified log file""" log_files = [f for f in os.listdir('.') if f.endswith('.log')] if not log_files: print("No log files found in current directory.") sys.exit(1) # Sort by modification time (newest first) log_files.sort(key=lambda x: os.path.getmtime(x), reverse=True) return log_files[0] def list_log_files(): """List all log files sorted by modification time""" log_files = [f for f in os.listdir('.') if f.endswith('.log')] if not log_files: print("No log files found in current directory.") sys.exit(1) # Sort by modification time (newest first) log_files.sort(key=lambda x: os.path.getmtime(x), reverse=True) print(f"{'LAST MODIFIED':<20} {'SIZE':<10} FILENAME") print("-" * 60) for log_file in log_files: mtime = datetime.fromtimestamp(os.path.getmtime(log_file)) size = os.path.getsize(log_file) size_str = f"{size / 1024:.1f} KB" if size > 1024 else f"{size} B" print(f"{mtime.strftime('%Y-%m-%d %H:%M:%S'):<20} {size_str:<10} {log_file}") def read_log_tail(file_path, num_lines, filter_text=None): """Read the last N lines of a file""" try: with open(file_path, 'r', encoding='utf-8') as f: # Read all lines (inefficient but simple) lines = f.readlines() # Filter if needed if filter_text: lines = [line for line in lines if filter_text in line] # Get the last N lines last_lines = lines[-num_lines:] if len(lines) > num_lines else lines return last_lines except Exception as e: print(f"Error reading file: {str(e)}") sys.exit(1) def follow_log(file_path, filter_text=None): """Follow the log file as it grows (like tail -f)""" try: with open(file_path, 'r', encoding='utf-8') as f: # Go to the end of the file f.seek(0, 2) while True: line = f.readline() if line: if not filter_text or filter_text in line: # Remove newlines at the end to avoid double spacing print(line.rstrip()) else: time.sleep(0.1) # Sleep briefly to avoid consuming CPU except KeyboardInterrupt: print("\nLog reading stopped.") except Exception as e: print(f"Error following file: {str(e)}") sys.exit(1) def main(): """Main function""" args = parse_args() # List all log files if requested if args.list: list_log_files() return # Determine which file to read file_path = args.file if not file_path: file_path = get_most_recent_log() print(f"Reading most recent log file: {file_path}") # Follow mode (like tail -f) if args.follow: print(f"Following {file_path} (Press Ctrl+C to stop)...") # First print the tail for line in read_log_tail(file_path, args.tail, args.filter): print(line.rstrip()) print("-" * 80) print("Waiting for new content...") # Then follow follow_log(file_path, args.filter) else: # Just print the tail for line in read_log_tail(file_path, args.tail, args.filter): print(line.rstrip()) if __name__ == "__main__": main()