123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081 |
- # -*- coding: utf-8 -*-
- # @Author: 罗俊辉
- # @Time: 2023/12/18
- """
- 公共方法,包含:生成log
- """
- import json
- from aliyun.log import LogClient, PutLogsRequest, LogItem
- import time
- proxies = {"http": None, "https": None}
- class AliyunLogger(object):
- """
- 阿里云日志方法
- """
- def __init__(self, platform, mode, env="prod"):
- self.platform = platform
- self.mode = mode
- self.env = env
- # 写入阿里云日志
- def logging(
- self, code, message, data=None, trace_id=None, account=None
- ):
- """
- 写入阿里云日志
- 测试库: https://sls.console.aliyun.com/lognext/project/crawler-log-dev/logsearch/crawler-log-dev
- 正式库: https://sls.console.aliyun.com/lognext/project/crawler-log-prod/logsearch/crawler-log-prod
- """
- # 设置阿里云日志服务的访问信息
- if data is None:
- data = {}
- accessKeyId = "LTAIWYUujJAm7CbH"
- accessKey = "RfSjdiWwED1sGFlsjXv0DlfTnZTG1P"
- if self.env == "dev":
- project = "crawler-log-dev"
- logstore = "crawler-log-dev"
- endpoint = "cn-hangzhou.log.aliyuncs.com"
- else:
- project = "crawler-log-prod"
- logstore = "crawler-fetch"
- endpoint = "cn-hangzhou.log.aliyuncs.com"
- # 创建 LogClient 实例
- client = LogClient(endpoint, accessKeyId, accessKey)
- log_group = []
- log_item = LogItem()
- """
- 生成日志消息体格式,例如
- crawler:xigua
- message:不满足抓取规则
- mode:search
- timestamp:1686656143
- """
- message = message.replace("\r", " ").replace("\n", " ")
- contents = [
- (f"TraceId", str(trace_id)),
- (f"code", str(code)),
- (f"platform", str(self.platform)),
- (f"mode", str(self.mode)),
- (f"message", str(message)),
- (f"data", json.dumps(data, ensure_ascii=False) if data else ""),
- (f"account", str(account)),
- ("timestamp", str(int(time.time()))),
- ]
- log_item.set_contents(contents)
- log_group.append(log_item)
- # 写入日志
- request = PutLogsRequest(
- project=project,
- logstore=logstore,
- topic="",
- source="",
- logitems=log_group,
- compress=False,
- )
- client.put_logs(request)
|