aliyun_log.py 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129
  1. # -*- coding: utf-8 -*-
  2. """
  3. 公共方法,包含:生成log / 删除log
  4. """
  5. import json
  6. from datetime import date, timedelta
  7. from datetime import datetime
  8. from typing import Optional
  9. from loguru import logger
  10. from aliyun.log import PutLogsRequest, LogClient, LogItem
  11. proxies = {"http": None, "https": None}
  12. class AliyunLogger:
  13. # 统一获取当前时间 <class 'datetime.datetime'> 2022-04-14 20:13:51.244472
  14. now = datetime.now()
  15. # 昨天 <class 'str'> 2022-04-13
  16. yesterday = (date.today() + timedelta(days=-1)).strftime("%Y-%m-%d")
  17. # 今天 <class 'datetime.date'> 2022-04-14
  18. today = date.today()
  19. # 明天 <class 'str'> 2022-04-15
  20. tomorrow = (date.today() + timedelta(days=1)).strftime("%Y-%m-%d")
  21. # 写入阿里云日志
  22. @staticmethod
  23. def logging(
  24. video_id: str,
  25. title: str,
  26. video_url: str,
  27. version: str,
  28. type: str,
  29. partition: str,
  30. data: Optional[str] = None):
  31. """
  32. 写入阿里云日志
  33. 测试库: https://sls.console.aliyun.com/lognext/project/crawler-log-dev/logsearch/crawler-log-dev
  34. 正式库: https://sls.console.aliyun.com/lognext/project/crawler-log-prod/logsearch/crawler-log-prod
  35. """
  36. accessKeyId = "LTAIWYUujJAm7CbH"
  37. accessKey = "RfSjdiWwED1sGFlsjXv0DlfTnZTG1P"
  38. project = "crawler-log-prod"
  39. logstore = "video_tag_info"
  40. endpoint = "cn-hangzhou.log.aliyuncs.com"
  41. try:
  42. contents = [
  43. ("video_id", video_id),
  44. ("video_title", title),
  45. ("video_url", video_url),
  46. ("version", version),
  47. ("type", type),
  48. ("partition", partition),
  49. ("data", data),
  50. ]
  51. # 创建 LogClient 实例
  52. client = LogClient(endpoint, accessKeyId, accessKey)
  53. log_group = []
  54. log_item = LogItem()
  55. log_item.set_contents(contents)
  56. log_group.append(log_item)
  57. # 写入日志
  58. request = PutLogsRequest(
  59. project=project,
  60. logstore=logstore,
  61. topic="",
  62. source="",
  63. logitems=log_group,
  64. compress=False,
  65. )
  66. client.put_logs(request)
  67. except Exception as e:
  68. logger.info( f'[+] 日志写入失败: {e}' )
  69. # 写入阿里云日志
  70. @staticmethod
  71. def ad_logging(
  72. ad_id: str,
  73. creative_code: str,
  74. creative_title: str,
  75. material_address: str,
  76. click_button_text: str,
  77. creative_logo_address: str,
  78. update_time: str,
  79. data: Optional[str] = None):
  80. """
  81. 写入阿里云日志
  82. 测试库: https://sls.console.aliyun.com/lognext/project/crawler-log-dev/logsearch/crawler-log-dev
  83. 正式库: https://sls.console.aliyun.com/lognext/project/crawler-log-prod/logsearch/crawler-log-prod
  84. """
  85. accessKeyId = "LTAIWYUujJAm7CbH"
  86. accessKey = "RfSjdiWwED1sGFlsjXv0DlfTnZTG1P"
  87. project = "crawler-log-prod"
  88. logstore = "ad_tag_info"
  89. endpoint = "cn-hangzhou.log.aliyuncs.com"
  90. try:
  91. contents = [
  92. ("ad_id", ad_id),
  93. ("creative_code", creative_code),
  94. ("creative_title", creative_title),
  95. ("material_address", material_address),
  96. ("click_button_text", click_button_text),
  97. ("creative_logo_address", creative_logo_address),
  98. ("update_time", update_time),
  99. ("data", data),
  100. ]
  101. # 创建 LogClient 实例
  102. client = LogClient(endpoint, accessKeyId, accessKey)
  103. log_group = []
  104. log_item = LogItem()
  105. log_item.set_contents(contents)
  106. log_group.append(log_item)
  107. # 写入日志
  108. request = PutLogsRequest(
  109. project=project,
  110. logstore=logstore,
  111. topic="",
  112. source="",
  113. logitems=log_group,
  114. compress=False,
  115. )
  116. client.put_logs(request)
  117. except Exception as e:
  118. logger.info(f'[+] 日志写入失败: {e}')