zhaohaipeng 1 mese fa
parent
commit
f70e4805d0
2 ha cambiato i file con 23 aggiunte e 18 eliminazioni
  1. 12 11
      enums/automation_job.py
  2. 11 7
      monitor/automation_provide_job_monitor.py

+ 12 - 11
enums/automation_job.py

@@ -2,18 +2,19 @@ from enum import Enum
 
 
 class AutomationJobCronInfo(Enum):
-    account_top = ("account", "top", 9)
-    account_extend_top = ("account_extend", "top", 9)
-    channel_topic_top = ("channel_topic", "top", 9)
-    channel_topic_extend_top = ("channel_topic_extend", "top", 9)
-    channel_image_search_video_top = ("channel_image_search_video", "top", 9)
-    channel_image_search_topic_top = ("channel_image_search_topic", "top", 9)
-    channel_image_search_topic_extend_top = ("channel_image_search_topic_extend", "top", 9)
-    channel_image_search_video_all_cate_top = ("channel_image_search_video_all_cate", "top", 9)
-    video_decode_accurate_text_top = ("video_decode_accurate_text", "top", 9)
-    keywords_top = ("keywords", "top", 9)
+    account_top = ("账号自动溯源(每日Top)", "account", "top", 9)
+    account_extend_top = ("账号自动拓展(每日Top)", "account_extend", "top", 9)
+    channel_topic_top = ("话题自动溯源(每日Top)", "channel_topic", "top", 9)
+    channel_topic_extend_top = ("话题自动拓展(每日Top)", "channel_topic_extend", "top", 9)
+    channel_image_search_video_top = ("识图直接供给(每日Top)", "channel_image_search_video", "top", 9)
+    channel_image_search_topic_top = ("识图找话题(每日Top)", "channel_image_search_topic", "top", 9)
+    channel_image_search_topic_extend_top = ("识图找话题拓展(每日Top)", "channel_image_search_topic_extend", "top", 9)
+    channel_image_search_video_all_cate_top = ("识图直接供给_全品类(每日Top)", "channel_image_search_video_all_cate", "top", 9)
+    video_decode_accurate_text_top = ("视频解构精准文本(每日Top)", "video_decode_accurate_text", "top", 9)
+    keywords_top = ("视频解构关键词(每日Top)", "keywords", "top", 9)
 
-    def __init__(self, crawler_mode, video_source, task_start_hour):
+    def __init__(self, name, crawler_mode, video_source, task_start_hour):
+        self.name = name
         self.crawler_mode = crawler_mode
         self.video_source = video_source
         self.task_start_hour = task_start_hour

+ 11 - 7
monitor/automation_provide_job_monitor.py

@@ -1,5 +1,5 @@
 from datetime import datetime, time
-from typing import List, Tuple
+from typing import List
 
 from aliyun.log import LogClient
 from aliyun.log.auth import AUTH_VERSION_4
@@ -86,13 +86,17 @@ def job_run_state(start_ts: int, end_ts: int):
         feishu_inform_util.send_card_msg_to_feishu(webhook, new_card_json)
 
 
-def crawler_mode_not_success_warning(start_ts: int, end_ts: int, crawler_mode_and_video_source_list: List[Tuple[str, str]]):
-    for crawler_mode, video_source in crawler_mode_and_video_source_list:
+def crawler_mode_not_success_warning(start_ts: int, end_ts: int, job_info_list: List[AutomationJobCronInfo]):
+    for job_info in job_info_list:
+        crawler_mode = job_info.crawler_mode
+        video_source = job_info.video_source
+        job_name = job_info.name
+
         query_sql = f"crawlerMode : {crawler_mode} and videoSource : {video_source} and result : true | select count(1) as cnt from log"
         resp = client.get_log(project=project, logstore=log_store, from_time=start_ts, to_time=end_ts, query=query_sql)
         success_cnt = int(resp.get_body().get('data')[0]['cnt'])
         if success_cnt <= 0:
-            msg = f"- 供给方式: {crawler_mode} \n- 视频来源: {video_source} \n- 当天还没有成功执行的任务,请关注"
+            msg = f"- 供给方式: {job_name} \n- 视频来源: {video_source} \n- 当天还没有成功执行的任务,请关注"
             new_card_json = {**card_json, **{}}
             new_card_json['header']['template'] = 'red'
             new_card_json['body']['elements'] = [{
@@ -116,14 +120,14 @@ def main():
 
     current_hour = today.hour
 
-    crawler_mode_and_video_source_list = []
+    need_monitor_job_info_list = []
     for cron_info in AutomationJobCronInfo:
         if current_hour < cron_info.task_start_hour:
             continue
 
-        crawler_mode_and_video_source_list.append((cron_info.crawler_mode, cron_info.video_source))
+        need_monitor_job_info_list.append(cron_info)
 
-    crawler_mode_not_success_warning(start_ts, end_ts, crawler_mode_and_video_source_list)
+    crawler_mode_not_success_warning(start_ts, end_ts, need_monitor_job_info_list)
 
 
 if __name__ == "__main__":