aliyun_log.py 1.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455
  1. # -*- coding: utf-8 -*-
  2. """
  3. 公共方法,包含:生成log / 删除log
  4. """
  5. from typing import Optional
  6. from loguru import logger
  7. from aliyun.log import PutLogsRequest, LogClient, LogItem
  8. proxies = {"http": None, "https": None}
  9. class AliyunLogger:
  10. # 写入阿里云日志
  11. @staticmethod
  12. def logging(
  13. video_id: str,
  14. data: Optional[str] = None):
  15. """
  16. 写入阿里云日志
  17. 测试库: https://sls.console.aliyun.com/lognext/project/crawler-log-dev/logsearch/crawler-log-dev
  18. 正式库: https://sls.console.aliyun.com/lognext/project/crawler-log-prod/logsearch/crawler-log-prod
  19. """
  20. accessKeyId = "LTAIWYUujJAm7CbH"
  21. accessKey = "RfSjdiWwED1sGFlsjXv0DlfTnZTG1P"
  22. project = "crawler-log-prod"
  23. logstore = "temp"
  24. endpoint = "cn-hangzhou.log.aliyuncs.com"
  25. try:
  26. contents = [
  27. ("video_id", video_id),
  28. ("data", data),
  29. ]
  30. # 创建 LogClient 实例
  31. client = LogClient(endpoint, accessKeyId, accessKey)
  32. log_group = []
  33. log_item = LogItem()
  34. log_item.set_contents(contents)
  35. log_group.append(log_item)
  36. # 写入日志
  37. request = PutLogsRequest(
  38. project=project,
  39. logstore=logstore,
  40. topic="",
  41. source="",
  42. logitems=log_group,
  43. compress=False,
  44. )
  45. client.put_logs(request)
  46. except Exception as e:
  47. logger.info( f'[+] 日志写入失败: {e}' )