Explorar o código

feat:修改脚本

zhaohaipeng hai 1 semana
pai
achega
bc44c66979
Modificáronse 3 ficheiros con 28 adicións e 17 borrados
  1. 10 11
      script/apollo_main.py
  2. 7 6
      script/automation_provide_job_repair.py
  3. 11 0
      script/main.py

+ 10 - 11
script/apollo_main.py

@@ -20,22 +20,21 @@ def main():
     pre_filter_threshold = config_json['preFilterThreshold']
     prompt_id = config_json['promptId']
     model_value_config = config_json['modelValueConfig']
+    pre_filter_search_accounts = config_json['preFilterSearchAccountIds']
     for merge_cate2 in config_json['mergeCate2PatternPlanIdMap']:
         type_json = config_json['mergeCate2PatternPlanIdMap'][merge_cate2]
         if merge_cate2 not in new_config_json:
             new_config_json[merge_cate2] = {}
 
-        for produce_type in type_json:
-            produce_plan_ids = type_json[produce_type]
-
-            new_config_json[merge_cate2][produce_type] = {
-                "accountFilters": account_filters,
-                "contentFilters": content_filters,
-                "preFilterThreshold": pre_filter_threshold,
-                "promptId": prompt_id,
-                "modelValueConfig": model_value_config,
-                "producePlanIds": produce_plan_ids
-            }
+        new_config_json[merge_cate2] = {
+            "accountFilters": account_filters,
+            "contentFilters": content_filters,
+            "preFilterThreshold": pre_filter_threshold,
+            "promptId": prompt_id,
+            "modelValueConfig": model_value_config,
+            "patternProducePlanIdsMap": type_json,
+            "preFilterSearchAccountIds": pre_filter_search_accounts
+        }
 
     print(json.dumps(new_config_json, indent=4, ensure_ascii=False))
 

+ 7 - 6
script/automation_provide_job_repair.py

@@ -5,7 +5,7 @@ from client.AIGCClient import AIGCClient
 aigc_client = AIGCClient(token="8bf14f27fc3a486788f3383452422d72", base_url="https://aigc-api.aiddit.com")
 
 log_list = []
-with open("/Users/zhao/Downloads/cb08b033-8ab9-48e9-baf2-1ec5b031e0e5.json", 'r') as f:
+with open("/Users/zhao/Downloads/1680aba3-fa5b-4d53-8386-22cb1c3c8472.json", 'r') as f:
     line = f.readline()
     while line:
         log_list.append(json.loads(line))
@@ -14,16 +14,17 @@ with open("/Users/zhao/Downloads/cb08b033-8ab9-48e9-baf2-1ec5b031e0e5.json", 'r'
 for log in log_list:
     crawler_plan_id = log.get("crawlerPlanId")
     video_id = log.get("videoId")
-    if crawler_plan_id == '20260122023138114334284':
-        continue
+    input_mode_values = log.get('inputModeValues')
+
     error_msg, crawler_plan_info = aigc_client.get_content_crawler_plan_by_id(crawler_plan_id)
     if error_msg:
         print(f"获取 {crawler_plan_id} 的爬取信息异常")
         continue
+
     update_use_date = crawler_plan_info.get("updateData")
     crawler_plan_name = update_use_date['name']
-    if "视频vid" not in crawler_plan_name:
+    if "{话题}" not in crawler_plan_name:
         continue
-    new_crawler_plan_name = crawler_plan_name.replace("{视频vid}", video_id)
+    new_crawler_plan_name = crawler_plan_name.replace("{话题}", input_mode_values)
     update_use_date['name'] = new_crawler_plan_name
-    # aigc_client.crawler_plan_save(update_use_date)
+    aigc_client.crawler_plan_save(update_use_date)

+ 11 - 0
script/main.py

@@ -0,0 +1,11 @@
+import json
+
+log_list = []
+with open("/Users/zhao/Downloads/fc23380d-bac6-4b21-8474-d060edc3ee9f.json", "r") as f:
+    line = f.readline()
+    while line:
+        log_list.append(json.loads(line))
+        line = f.readline()
+
+for log in log_list:
+    print(json.dumps(log, ensure_ascii=False, indent=4))