Explorar o código

Merge branch 'feature/luojunhui/20260113-fwh-illegal-alert' of Server/LongArticleTaskServer into master

luojunhui hai 1 mes
pai
achega
fd48eb2da1

+ 17 - 2
applications/tasks/data_recycle_tasks/recycle_daily_publish_articles.py

@@ -493,6 +493,7 @@ class RecycleFwhDailyPublishArticlesTask(Const):
         group_id: str,
         group_id: str,
         illegal_msg: str,
         illegal_msg: str,
         publish_date: str,
         publish_date: str,
+        article_title: str,
     ):
     ):
         await feishu_robot.bot(
         await feishu_robot.bot(
             title="服务号文章违规告警,请前往微信公众平台处理",
             title="服务号文章违规告警,请前往微信公众平台处理",
@@ -502,6 +503,7 @@ class RecycleFwhDailyPublishArticlesTask(Const):
                 "group_id": group_id,
                 "group_id": group_id,
                 "illegal_msg": illegal_msg,
                 "illegal_msg": illegal_msg,
                 "publish_date": str(publish_date),
                 "publish_date": str(publish_date),
+                "article_title": article_title,
             },
             },
             env="server_account_publish_monitor",
             env="server_account_publish_monitor",
         )
         )
@@ -548,7 +550,7 @@ class RecycleFwhDailyPublishArticlesTask(Const):
     async def get_stat_published_articles(self, gh_id):
     async def get_stat_published_articles(self, gh_id):
         earliest_timestamp = int(time.time()) - self.STAT_PERIOD
         earliest_timestamp = int(time.time()) - self.STAT_PERIOD
         fetch_query = """
         fetch_query = """
-            select publish_date, account_name, gh_id, user_group_id, url, publish_timestamp
+            select publish_date, account_name, gh_id, user_group_id, url, publish_timestamp, content_id
             from long_articles_group_send_result
             from long_articles_group_send_result
             where gh_id = %s and recycle_status = %s and create_time > %s;
             where gh_id = %s and recycle_status = %s and create_time > %s;
         """
         """
@@ -570,6 +572,7 @@ class RecycleFwhDailyPublishArticlesTask(Const):
             user_group_id = article["user_group_id"]
             user_group_id = article["user_group_id"]
             url = article["url"]
             url = article["url"]
             publish_date = article["publish_date"]
             publish_date = article["publish_date"]
+            content_id = article["content_id"]
             # get article detail info with spider
             # get article detail info with spider
 
 
             try:
             try:
@@ -578,15 +581,27 @@ class RecycleFwhDailyPublishArticlesTask(Const):
                 )
                 )
                 response_code = article_detail_info["code"]
                 response_code = article_detail_info["code"]
                 if response_code == self.ARTICLE_ILLEGAL_CODE:
                 if response_code == self.ARTICLE_ILLEGAL_CODE:
+                    query = """
+                        SELECT article_title FROM long_articles_text WHERE content_id = %s;
+                    """
+                    article_title = await self.pool.async_fetch(
+                        query=query,
+                        params=(content_id,),
+                    )
+                    if article_title:
+                        article_title = article_title[0]["article_title"]
+                    else:
+                        article_title = content_id
                     await self.illegal_article_bot(
                     await self.illegal_article_bot(
                         account_name=account_name,
                         account_name=account_name,
                         gh_id=gh_id,
                         gh_id=gh_id,
                         group_id=user_group_id,
                         group_id=user_group_id,
                         illegal_msg=article_detail_info["msg"],
                         illegal_msg=article_detail_info["msg"],
                         publish_date=publish_date,
                         publish_date=publish_date,
+                        article_title=article_title,
                     )
                     )
 
 
-                await asyncio.sleep(1)
+                await asyncio.sleep(3)
                 content_url = article_detail_info["data"]["data"]["content_link"]
                 content_url = article_detail_info["data"]["data"]["content_link"]
                 app_msg_id = content_url.split("mid=")[-1].split("&")[0]
                 app_msg_id = content_url.split("mid=")[-1].split("&")[0]
                 wx_sn = content_url.split("sn=")[-1]
                 wx_sn = content_url.split("sn=")[-1]