import argparse
import os
import hashlib
import json
def cache_log_data(log_file, cache_dir="cache"):
"""Caches data from a log file using a hash of the file content."""
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
cache_file = os.path.join(cache_dir, f"{hashlib.md5(log_file.encode()).hexdigest()}.json")
if os.path.exists(cache_file):
with open(cache_file, "r") as f:
try:
return json.load(f) # Load from cache
except json.JSONDecodeError:
print(f"Warning: Corrupted cache file {cache_file}. Recomputing.")
pass
try:
with open(log_file, "r") as f:
data = f.readlines()
except FileNotFoundError:
print(f"Error: Log file not found: {log_file}")
return None
except Exception as e:
print(f"Error reading log file: {e}")
return None
# Store the data in the cache
with open(cache_file, "w") as f:
json.dump(data, f, indent=4)
return data
def main():
parser = argparse.ArgumentParser(description="Process log files with caching.")
parser.add_argument("log_file", help="Path to the log file.")
parser.add_argument("--cache_dir", default="cache", help="Directory to store cached data.")
parser.add_argument("--process_log", action="store_true", help="Process the log file.")
parser.add_argument("--print_log", action="store_true", help="Print the log file content.")
args = parser.parse_args()
if args.process_log or args.print_log:
cached_data = cache_log_data(args.log_file, args.cache_dir)
if cached_data:
if args.process_log:
# Process the log data here. Example:
for line in cached_data:
print(line.strip())
if args.print_log:
print("Log File Content:")
for line in cached_data:
print(line.strip())
else:
# Just cache the data
cache_log_data(args.log_file, args.cache_dir)
if __name__ == "__main__":
main()
Add your comment