소스 검색

black file

luojunhui 6 일 전
부모
커밋
c44bff6e5c

+ 1 - 1
applications/api/__init__.py

@@ -43,5 +43,5 @@ __all__ = [
     "auto_create_crawler_task",
     "auto_bind_crawler_task_to_generate_task",
     "AsyncElasticSearchClient",
-    "insert_crawler_relation_to_aigc_system"
+    "insert_crawler_relation_to_aigc_system",
 ]

+ 9 - 6
applications/api/async_aigc_system_api.py

@@ -24,6 +24,7 @@ PERSON_COOKIE = {
     "uid": 1,
 }
 
+
 class RelationDict(TypedDict):
     videoPoolTraceId: str
     channelContentId: str
@@ -174,10 +175,12 @@ async def get_generate_task_detail(generate_task_id):
         return {}
 
 
-async def insert_crawler_relation_to_aigc_system(relation_list: List[RelationDict]) -> Optional[Dict]:
-        url = "http://aigc-api.cybertogether.net/aigc/crawler/content/videoPoolCrawlerRelation"
-        payload = json.dumps({"params": {"relations": relation_list}})
-        async with AsyncHttpClient(timeout=60) as client:
-            res = await client.post(url=url, headers=HEADERS, data=payload)
+async def insert_crawler_relation_to_aigc_system(
+    relation_list: List[RelationDict],
+) -> Optional[Dict]:
+    url = "http://aigc-api.cybertogether.net/aigc/crawler/content/videoPoolCrawlerRelation"
+    payload = json.dumps({"params": {"relations": relation_list}})
+    async with AsyncHttpClient(timeout=60) as client:
+        res = await client.post(url=url, headers=HEADERS, data=payload)
 
-        return res
+    return res

+ 1 - 1
applications/config/__init__.py

@@ -38,5 +38,5 @@ __all__ = [
     "input_source_map",
     "name_map",
     "CATEGORY_FEATURES",
-    "CATEGORY_MAP"
+    "CATEGORY_MAP",
 ]

+ 2 - 5
applications/config/cold_start_config.py

@@ -13,10 +13,7 @@ cold_start_category_map = {
     "家长里短": "20250813034159621236902",
     "军事历史": "20250813034227997109122",
     "财经科技": "20250813034253336624837",
-    "政治新闻": "20250813034320561348119"
+    "政治新闻": "20250813034320561348119",
 }
 
-input_source_map = {
-    "weixin": 5,
-    "toutiao": 6
-}
+input_source_map = {"weixin": 5, "toutiao": 6}

+ 2 - 2
applications/config/task_chinese_name.py

@@ -14,5 +14,5 @@ name_map = {
     "get_off_videos": "自动下架视频",
     "check_publish_video_audit_status": "校验发布视频状态",
     "check_kimi_balance": "检验kimi余额",
-    "account_category_analysis": "账号品类分析"
-}
+    "account_category_analysis": "账号品类分析",
+}

+ 5 - 8
applications/service/task_manager_service.py

@@ -106,17 +106,14 @@ class TaskManagerService(TaskConst):
         items = [
             {
                 **r,
-                "status_text": self.STATUS_TEXT.get(r["task_status"], str(r["task_status"])),
-                "task_name": get_task_chinese_name(_safe_json(r["data"]))
+                "status_text": self.STATUS_TEXT.get(
+                    r["task_status"], str(r["task_status"])
+                ),
+                "task_name": get_task_chinese_name(_safe_json(r["data"])),
             }
             for r in rows
         ]
-        return {
-            "total": total,
-            "page": page,
-            "page_size": page_size,
-            "items": items
-        }
+        return {"total": total, "page": page, "page_size": page_size, "items": items}
 
     async def get_task(self, task_id: int):
         pass

+ 1 - 1
applications/tasks/algorithm_tasks/__init__.py

@@ -1,3 +1,3 @@
 from .account_category_analysis import AccountCategoryAnalysis
 
-__all__ = ["AccountCategoryAnalysis"]
+__all__ = ["AccountCategoryAnalysis"]

+ 6 - 3
applications/tasks/analysis_task/crawler_detail.py

@@ -1,4 +1,3 @@
-
 from applications.api import feishu_robot
 
 
@@ -152,10 +151,14 @@ class CrawlerDetailDeal(CrawlerVideoDetailAnalysis, CrawlerArticleDetailAnalysis
 
         match media_type:
             case "video":
-                crawler_detail = await self.analysis_video_pool(sub_task, start_date, end_date)
+                crawler_detail = await self.analysis_video_pool(
+                    sub_task, start_date, end_date
+                )
 
             case "article":
-                crawler_detail = await self.analysis_article_pool(sub_task, start_date, end_date)
+                crawler_detail = await self.analysis_article_pool(
+                    sub_task, start_date, end_date
+                )
             case _:
                 return None
 

+ 2 - 2
applications/tasks/data_recycle_tasks/recycle_daily_publish_articles.py

@@ -32,8 +32,8 @@ class Const:
         "gh_6cfd1132df94",
         "gh_7f5075624a50",
         "gh_d4dffc34ac39",
-        'gh_c69776baf2cd',
-        'gh_9877c8541764'
+        "gh_c69776baf2cd",
+        "gh_9877c8541764",
     ]
 
     # 文章状态

+ 14 - 16
applications/utils/common.py

@@ -225,30 +225,28 @@ def get_task_chinese_name(data):
     """
     通过输入任务详情信息获取任务名称
     """
-    task_name = data['task_name']
+    task_name = data["task_name"]
     task_name_chinese = name_map.get(task_name, task_name)
 
     # account_method
-    if task_name == 'crawler_gzh_articles':
-        account_method = data.get('account_method', '')
-        account_method = account_method.replace("account_association", "账号联想").replace("search", "")
-        crawl_mode = data.get('crawl_mode', '')
+    if task_name == "crawler_gzh_articles":
+        account_method = data.get("account_method", "")
+        account_method = account_method.replace(
+            "account_association", "账号联想"
+        ).replace("search", "")
+        crawl_mode = data.get("crawl_mode", "")
         crawl_mode = crawl_mode.replace("search", "搜索").replace("account", "抓账号")
-        strategy = data.get('strategy', '')
+        strategy = data.get("strategy", "")
         return f"{task_name_chinese}\t{crawl_mode}\t{account_method}\t{strategy}"
-    elif task_name == 'article_pool_cold_start':
-        platform = data.get('platform', '')
-        platform = platform.replace('toutiao', '今日头条').replace("weixin", "微信")
-        strategy = data.get('strategy', '')
+    elif task_name == "article_pool_cold_start":
+        platform = data.get("platform", "")
+        platform = platform.replace("toutiao", "今日头条").replace("weixin", "微信")
+        strategy = data.get("strategy", "")
         strategy = strategy.replace("strategy", "策略")
-        category_list = data.get('category_list', [])
+        category_list = data.get("category_list", [])
         category_list = "、".join(category_list)
-        crawler_methods = data.get('crawler_methods', [])
+        crawler_methods = data.get("crawler_methods", [])
         crawler_methods = "、".join(crawler_methods)
         return f"{task_name_chinese}\t{platform}\t{crawler_methods}\t{category_list}\t{strategy}"
     else:
         return task_name_chinese
-
-
-
-