monitor_logs.py 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/7/17
  4. import glob
  5. import datetime
  6. import os.path
  7. import re
  8. import time
  9. class MonitorLogs:
  10. @classmethod
  11. def monitor_logs(cls, log_type, crawler):
  12. # 获取日志文件的路径
  13. log_files = glob.glob(f"./{crawler}/logs/*-{datetime.datetime.now().date().strftime('%Y-%m-%d')}.log")
  14. if len(log_files) == 0:
  15. print("未发现最新日志")
  16. return
  17. for log_file in log_files:
  18. # 检查文件名是否包含"author"且后面带有数字
  19. if re.search(r"author\d", log_file):
  20. continue
  21. # 读取最新日志文件的内容
  22. with open(log_file, "r") as file:
  23. logs = file.readlines()
  24. # 过滤空行日志
  25. logs = [log.strip() for log in logs if log.strip()]
  26. # 获取最新一条日志的时间和内容
  27. latest_log = logs[-1]
  28. log_time_str = datetime.datetime.strptime(latest_log[:19], "%Y-%m-%d %H:%M:%S")
  29. log_time_stamp = int(log_time_str.timestamp())
  30. print("日志文件:", log_file)
  31. print("最新日志时间:", log_time_str)
  32. print("最新日志时间戳:", log_time_stamp)
  33. print("最新日志内容:\n", latest_log[24:])
  34. if int(time.time()) - log_time_stamp > 60*5:
  35. print(f"{int(time.time())} - {log_time_stamp} = {int(time.time())-log_time_stamp} > {60 * 5}")
  36. if __name__ == "__main__":
  37. MonitorLogs.monitor_logs("author", "gongzhonghao")
  38. pass