1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768 |
- # -*- coding: utf-8 -*-
- """
- 公共方法,包含:生成log / 删除log
- """
- import json
- from datetime import date, timedelta
- from datetime import datetime
- from typing import Optional
- from aliyun.log import PutLogsRequest, LogClient, LogItem
- proxies = {"http": None, "https": None}
- class AliyunLogger:
- # 统一获取当前时间 <class 'datetime.datetime'> 2022-04-14 20:13:51.244472
- now = datetime.now()
- # 昨天 <class 'str'> 2022-04-13
- yesterday = (date.today() + timedelta(days=-1)).strftime("%Y-%m-%d")
- # 今天 <class 'datetime.date'> 2022-04-14
- today = date.today()
- # 明天 <class 'str'> 2022-04-15
- tomorrow = (date.today() + timedelta(days=1)).strftime("%Y-%m-%d")
- # 写入阿里云日志
- @staticmethod
- def logging(video_id: str,
- title: str,
- video_url: str,
- data: Optional[str] = None):
- """
- 写入阿里云日志
- 测试库: https://sls.console.aliyun.com/lognext/project/crawler-log-dev/logsearch/crawler-log-dev
- 正式库: https://sls.console.aliyun.com/lognext/project/crawler-log-prod/logsearch/crawler-log-prod
- """
- accessKeyId = "LTAIWYUujJAm7CbH"
- accessKey = "RfSjdiWwED1sGFlsjXv0DlfTnZTG1P"
- project = "crawler-log-prod"
- logstore = "video_tag_info"
- endpoint = "cn-hangzhou.log.aliyuncs.com"
- try:
- contents = [
- ("video_id", video_id),
- ("video_title", title),
- ("video_url", video_url),
- ("data", data),
- ]
- # 创建 LogClient 实例
- client = LogClient(endpoint, accessKeyId, accessKey)
- log_group = []
- log_item = LogItem()
- log_item.set_contents(contents)
- log_group.append(log_item)
- # 写入日志
- request = PutLogsRequest(
- project=project,
- logstore=logstore,
- topic="",
- source="",
- logitems=log_group,
- compress=False,
- )
- client.put_logs(request)
- except Exception as e:
- print("写入日志失败")
|