فهرست منبع

微信文章详情更新

luojunhui 1 ماه پیش
والد
کامیت
8756de1158
2فایلهای تغییر یافته به همراه36 افزوده شده و 6 حذف شده
  1. 28 6
      applications/tasks/data_recycle_tasks/article_detail_stat.py
  2. 8 0
      applications/tasks/task_handler.py

+ 28 - 6
applications/tasks/data_recycle_tasks/article_detail_stat.py

@@ -1,5 +1,6 @@
 import json
 import time
+import traceback
 from datetime import datetime, timedelta
 
 from applications.api import feishu_robot
@@ -33,9 +34,9 @@ class ArticleDetailStatConst:
 
 
 class ArticleDetailStatMapper(ArticleDetailStatConst):
-    def __init__(self, pool, log_client):
+    def __init__(self, pool, log_service):
         self.pool = pool
-        self.log_client = log_client
+        self.log_service = log_service
 
     # 获取账号信息
     async def fetch_monitor_accounts(self):
@@ -135,8 +136,8 @@ class ArticleDetailStatMapper(ArticleDetailStatConst):
 
 
 class ArticleDetailStat(ArticleDetailStatMapper):
-    def __init__(self, pool, log_client):
-        super().__init__(pool, log_client)
+    def __init__(self, pool, log_service):
+        super().__init__(pool, log_service)
 
     # 存储账号信息
     async def save_account_details(self, account, fetch_response):
@@ -282,7 +283,7 @@ class ArticleDetailStat(ArticleDetailStatMapper):
         # yesterday_string = datetime.strftime(datetime.now() - timedelta(days=5), "%Y-%m-%d")
         dt_list = [
             (datetime.now() - timedelta(days=i)).strftime("%Y-%m-%d")
-            for i in range(1, 31)
+            for i in range(1, 2)
         ]
         for dt in dt_list:
             print(f"{account['account_name']} crawl {dt} read_data")
@@ -304,5 +305,26 @@ class ArticleDetailStat(ArticleDetailStatMapper):
     # 入口函数
     async def deal(self):
         accounts = await self.fetch_monitor_accounts()
+        if not accounts:
+            return
+
         for account in accounts:
-            await self.process_single_account(account)
+            try:
+                await self.process_single_account(account)
+                await self.log_service.log(
+                    contents={
+                        "task": "article_detail_stat",
+                        "account_name": account["account_name"],
+                        "status": "success"
+                    }
+                )
+            except Exception as e:
+                await self.log_service.log(
+                    contents={
+                        "task": "article_detail_stat",
+                        "account_name": account["account_name"],
+                        "error": str(e),
+                        "traceback": traceback.format_exc(),
+                        "status": "fail"
+                    }
+                )

+ 8 - 0
applications/tasks/task_handler.py

@@ -15,6 +15,7 @@ from applications.tasks.crawler_tasks import WeixinAccountManager
 from applications.tasks.crawler_tasks import CrawlerGzhAccountArticles
 from applications.tasks.crawler_tasks import CrawlerGzhSearchArticles
 
+from applications.tasks.data_recycle_tasks import ArticleDetailStat
 from applications.tasks.data_recycle_tasks import RecycleDailyPublishArticlesTask
 from applications.tasks.data_recycle_tasks import RecycleOutsideAccountArticlesTask
 from applications.tasks.data_recycle_tasks import CheckDailyPublishArticlesTask
@@ -219,6 +220,13 @@ class TaskHandler:
         return TaskStatus.SUCCESS
 
     # ==================== 数据回收类任务 ====================
+    @register("article_detail_stat")
+    async def _article_detail_stat_handler(self) -> int:
+        """文章详情统计"""
+        task = ArticleDetailStat(self.db_client, self.log_client)
+        await task.deal()
+        return TaskStatus.SUCCESS
+
 
     @register("daily_publish_articles_recycle")
     async def _recycle_article_data_handler(self) -> int: