# -*- coding: utf-8 -*- """ 公共方法,包含:生成log / 删除log """ import json from datetime import date, timedelta from datetime import datetime from typing import Optional from loguru import logger from aliyun.log import PutLogsRequest, LogClient, LogItem proxies = {"http": None, "https": None} class AliyunLogger: # 统一获取当前时间 2022-04-14 20:13:51.244472 now = datetime.now() # 昨天 2022-04-13 yesterday = (date.today() + timedelta(days=-1)).strftime("%Y-%m-%d") # 今天 2022-04-14 today = date.today() # 明天 2022-04-15 tomorrow = (date.today() + timedelta(days=1)).strftime("%Y-%m-%d") # 写入阿里云日志 @staticmethod def logging( video_id: str, title: str, video_url: str, version: str, type: str, partition: str, data: Optional[str] = None): """ 写入阿里云日志 测试库: https://sls.console.aliyun.com/lognext/project/crawler-log-dev/logsearch/crawler-log-dev 正式库: https://sls.console.aliyun.com/lognext/project/crawler-log-prod/logsearch/crawler-log-prod """ accessKeyId = "LTAIWYUujJAm7CbH" accessKey = "RfSjdiWwED1sGFlsjXv0DlfTnZTG1P" project = "crawler-log-prod" logstore = "video_tag_info" endpoint = "cn-hangzhou.log.aliyuncs.com" try: contents = [ ("video_id", video_id), ("video_title", title), ("video_url", video_url), ("version", version), ("type", type), ("partition", partition), ("data", data), ] # 创建 LogClient 实例 client = LogClient(endpoint, accessKeyId, accessKey) log_group = [] log_item = LogItem() log_item.set_contents(contents) log_group.append(log_item) # 写入日志 request = PutLogsRequest( project=project, logstore=logstore, topic="", source="", logitems=log_group, compress=False, ) client.put_logs(request) except Exception as e: logger.info( f'[+] 日志写入失败: {e}' ) # 写入阿里云日志 @staticmethod def ad_logging( ad_id: str, creative_code: str, creative_title: str, material_address: str, click_button_text: str, creative_logo_address: str, update_time: str, data: Optional[str] = None): """ 写入阿里云日志 测试库: https://sls.console.aliyun.com/lognext/project/crawler-log-dev/logsearch/crawler-log-dev 正式库: https://sls.console.aliyun.com/lognext/project/crawler-log-prod/logsearch/crawler-log-prod """ accessKeyId = "LTAIWYUujJAm7CbH" accessKey = "RfSjdiWwED1sGFlsjXv0DlfTnZTG1P" project = "crawler-log-prod" logstore = "ad_tag_info" endpoint = "cn-hangzhou.log.aliyuncs.com" try: contents = [ ("ad_id", ad_id), ("creative_code", creative_code), ("creative_title", creative_title), ("material_address", material_address), ("click_button_text", click_button_text), ("creative_logo_address", creative_logo_address), ("update_time", update_time), ("data", data), ] # 创建 LogClient 实例 client = LogClient(endpoint, accessKeyId, accessKey) log_group = [] log_item = LogItem() log_item.set_contents(contents) log_group.append(log_item) # 写入日志 request = PutLogsRequest( project=project, logstore=logstore, topic="", source="", logitems=log_group, compress=False, ) client.put_logs(request) except Exception as e: logger.info(f'[+] 日志写入失败: {e}')