# -*- coding: utf-8 -*- # @Author: 罗俊辉 # @Time: 2023/10/23 """ 公共方法,包含:生成log / 删除log """ import json from aliyun.log import LogClient, PutLogsRequest, LogItem from datetime import date, timedelta from datetime import datetime import time proxies = {"http": None, "https": None} class AliyunLogger: # 统一获取当前时间 2022-04-14 20:13:51.244472 now = datetime.now() # 昨天 2022-04-13 yesterday = (date.today() + timedelta(days=-1)).strftime("%Y-%m-%d") # 今天 2022-04-14 today = date.today() # 明天 2022-04-15 tomorrow = (date.today() + timedelta(days=1)).strftime("%Y-%m-%d") # 写入阿里云日志 @staticmethod def logging( code, platform, mode, env, message, data=None, trace_id=None, account=None ): """ 写入阿里云日志 测试库: https://sls.console.aliyun.com/lognext/project/crawler-log-dev/logsearch/crawler-log-dev 正式库: https://sls.console.aliyun.com/lognext/project/crawler-log-prod/logsearch/crawler-log-prod """ # 设置阿里云日志服务的访问信息 if data is None: data = {} accessKeyId = "LTAIWYUujJAm7CbH" accessKey = "RfSjdiWwED1sGFlsjXv0DlfTnZTG1P" if env == "dev": project = "crawler-log-dev" logstore = "crawler-log-dev" endpoint = "cn-hangzhou.log.aliyuncs.com" else: project = "crawler-log-prod" logstore = "crawler-fetch" endpoint = "cn-hangzhou.log.aliyuncs.com" # 创建 LogClient 实例 client = LogClient(endpoint, accessKeyId, accessKey) log_group = [] log_item = LogItem() """ 生成日志消息体格式,例如 crawler:xigua message:不满足抓取规则 mode:search timestamp:1686656143 """ message = message.replace("\r", " ").replace("\n", " ") contents = [ (f"TraceId", str(trace_id)), (f"code", str(code)), (f"platform", str(platform)), (f"mode", str(mode)), (f"message", str(message)), (f"data", json.dumps(data, ensure_ascii=False) if data else ""), (f"account", str(account)), ("timestamp", str(int(time.time()))), ] log_item.set_contents(contents) log_group.append(log_item) # 写入日志 request = PutLogsRequest( project=project, logstore=logstore, topic="", source="", logitems=log_group, compress=False, ) client.put_logs(request)