123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129 |
- # -*- coding: utf-8 -*-
- """
- 公共方法,包含:生成log / 删除log
- """
- import json
- from datetime import date, timedelta
- from datetime import datetime
- from typing import Optional
- from loguru import logger
- from aliyun.log import PutLogsRequest, LogClient, LogItem
- proxies = {"http": None, "https": None}
- class AliyunLogger:
- # 统一获取当前时间 <class 'datetime.datetime'> 2022-04-14 20:13:51.244472
- now = datetime.now()
- # 昨天 <class 'str'> 2022-04-13
- yesterday = (date.today() + timedelta(days=-1)).strftime("%Y-%m-%d")
- # 今天 <class 'datetime.date'> 2022-04-14
- today = date.today()
- # 明天 <class 'str'> 2022-04-15
- tomorrow = (date.today() + timedelta(days=1)).strftime("%Y-%m-%d")
- # 写入阿里云日志
- @staticmethod
- def logging(
- video_id: str,
- title: str,
- video_url: str,
- version: str,
- type: str,
- partition: str,
- data: Optional[str] = None):
- """
- 写入阿里云日志
- 测试库: https://sls.console.aliyun.com/lognext/project/crawler-log-dev/logsearch/crawler-log-dev
- 正式库: https://sls.console.aliyun.com/lognext/project/crawler-log-prod/logsearch/crawler-log-prod
- """
- accessKeyId = "LTAIWYUujJAm7CbH"
- accessKey = "RfSjdiWwED1sGFlsjXv0DlfTnZTG1P"
- project = "crawler-log-prod"
- logstore = "video_tag_info"
- endpoint = "cn-hangzhou.log.aliyuncs.com"
- try:
- contents = [
- ("video_id", video_id),
- ("video_title", title),
- ("video_url", video_url),
- ("version", version),
- ("type", type),
- ("partition", partition),
- ("data", data),
- ]
- # 创建 LogClient 实例
- client = LogClient(endpoint, accessKeyId, accessKey)
- log_group = []
- log_item = LogItem()
- log_item.set_contents(contents)
- log_group.append(log_item)
- # 写入日志
- request = PutLogsRequest(
- project=project,
- logstore=logstore,
- topic="",
- source="",
- logitems=log_group,
- compress=False,
- )
- client.put_logs(request)
- except Exception as e:
- logger.info( f'[+] 日志写入失败: {e}' )
- # 写入阿里云日志
- @staticmethod
- def ad_logging(
- ad_id: str,
- creative_code: str,
- creative_title: str,
- material_address: str,
- click_button_text: str,
- creative_logo_address: str,
- update_time: str,
- data: Optional[str] = None):
- """
- 写入阿里云日志
- 测试库: https://sls.console.aliyun.com/lognext/project/crawler-log-dev/logsearch/crawler-log-dev
- 正式库: https://sls.console.aliyun.com/lognext/project/crawler-log-prod/logsearch/crawler-log-prod
- """
- accessKeyId = "LTAIWYUujJAm7CbH"
- accessKey = "RfSjdiWwED1sGFlsjXv0DlfTnZTG1P"
- project = "crawler-log-prod"
- logstore = "ad_tag_info"
- endpoint = "cn-hangzhou.log.aliyuncs.com"
- try:
- contents = [
- ("ad_id", ad_id),
- ("creative_code", creative_code),
- ("creative_title", creative_title),
- ("material_address", material_address),
- ("click_button_text", click_button_text),
- ("creative_logo_address", creative_logo_address),
- ("update_time", update_time),
- ("data", data),
- ]
- # 创建 LogClient 实例
- client = LogClient(endpoint, accessKeyId, accessKey)
- log_group = []
- log_item = LogItem()
- log_item.set_contents(contents)
- log_group.append(log_item)
- # 写入日志
- request = PutLogsRequest(
- project=project,
- logstore=logstore,
- topic="",
- source="",
- logitems=log_group,
- compress=False,
- )
- client.put_logs(request)
- except Exception as e:
- logger.info(f'[+] 日志写入失败: {e}')
|