1. import argparse
  2. import os
  3. import hashlib
  4. import json
  5. def cache_log_data(log_file, cache_dir="cache"):
  6. """Caches data from a log file using a hash of the file content."""
  7. if not os.path.exists(cache_dir):
  8. os.makedirs(cache_dir)
  9. cache_file = os.path.join(cache_dir, f"{hashlib.md5(log_file.encode()).hexdigest()}.json")
  10. if os.path.exists(cache_file):
  11. with open(cache_file, "r") as f:
  12. try:
  13. return json.load(f) # Load from cache
  14. except json.JSONDecodeError:
  15. print(f"Warning: Corrupted cache file {cache_file}. Recomputing.")
  16. pass
  17. try:
  18. with open(log_file, "r") as f:
  19. data = f.readlines()
  20. except FileNotFoundError:
  21. print(f"Error: Log file not found: {log_file}")
  22. return None
  23. except Exception as e:
  24. print(f"Error reading log file: {e}")
  25. return None
  26. # Store the data in the cache
  27. with open(cache_file, "w") as f:
  28. json.dump(data, f, indent=4)
  29. return data
  30. def main():
  31. parser = argparse.ArgumentParser(description="Process log files with caching.")
  32. parser.add_argument("log_file", help="Path to the log file.")
  33. parser.add_argument("--cache_dir", default="cache", help="Directory to store cached data.")
  34. parser.add_argument("--process_log", action="store_true", help="Process the log file.")
  35. parser.add_argument("--print_log", action="store_true", help="Print the log file content.")
  36. args = parser.parse_args()
  37. if args.process_log or args.print_log:
  38. cached_data = cache_log_data(args.log_file, args.cache_dir)
  39. if cached_data:
  40. if args.process_log:
  41. # Process the log data here. Example:
  42. for line in cached_data:
  43. print(line.strip())
  44. if args.print_log:
  45. print("Log File Content:")
  46. for line in cached_data:
  47. print(line.strip())
  48. else:
  49. # Just cache the data
  50. cache_log_data(args.log_file, args.cache_dir)
  51. if __name__ == "__main__":
  52. main()

Add your comment