aliyun_log.py 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960
  1. # -*- coding: utf-8 -*-
  2. """
  3. 公共方法,包含:生成log / 删除log
  4. """
  5. from typing import Optional
  6. from loguru import logger
  7. from aliyun.log import PutLogsRequest, LogClient, LogItem
  8. proxies = {"http": None, "https": None}
  9. class AliyunLogger:
  10. # 写入阿里云日志
  11. @staticmethod
  12. def logging(
  13. video_id: str,
  14. title: str,
  15. video_url: str,
  16. version: str,
  17. type: str,
  18. partition: str,
  19. data: Optional[str] = None):
  20. """
  21. 写入阿里云日志
  22. 测试库: https://sls.console.aliyun.com/lognext/project/crawler-log-dev/logsearch/crawler-log-dev
  23. 正式库: https://sls.console.aliyun.com/lognext/project/crawler-log-prod/logsearch/crawler-log-prod
  24. """
  25. accessKeyId = "LTAIWYUujJAm7CbH"
  26. accessKey = "RfSjdiWwED1sGFlsjXv0DlfTnZTG1P"
  27. project = "crawler-log-prod"
  28. logstore = "temp"
  29. endpoint = "cn-hangzhou.log.aliyuncs.com"
  30. try:
  31. contents = [
  32. ("video_id", video_id),
  33. ("data", data),
  34. ]
  35. # 创建 LogClient 实例
  36. client = LogClient(endpoint, accessKeyId, accessKey)
  37. log_group = []
  38. log_item = LogItem()
  39. log_item.set_contents(contents)
  40. log_group.append(log_item)
  41. # 写入日志
  42. request = PutLogsRequest(
  43. project=project,
  44. logstore=logstore,
  45. topic="",
  46. source="",
  47. logitems=log_group,
  48. compress=False,
  49. )
  50. client.put_logs(request)
  51. except Exception as e:
  52. logger.info( f'[+] 日志写入失败: {e}' )