automation_provide_job_repair.py 1.1 KB

1234567891011121314151617181920212223242526272829
  1. import json
  2. from client.AIGCClient import AIGCClient
  3. aigc_client = AIGCClient(token="8bf14f27fc3a486788f3383452422d72", base_url="https://aigc-api.aiddit.com")
  4. log_list = []
  5. with open("/Users/zhao/Downloads/cb08b033-8ab9-48e9-baf2-1ec5b031e0e5.json", 'r') as f:
  6. line = f.readline()
  7. while line:
  8. log_list.append(json.loads(line))
  9. line = f.readline()
  10. for log in log_list:
  11. crawler_plan_id = log.get("crawlerPlanId")
  12. video_id = log.get("videoId")
  13. if crawler_plan_id == '20260122023138114334284':
  14. continue
  15. error_msg, crawler_plan_info = aigc_client.get_content_crawler_plan_by_id(crawler_plan_id)
  16. if error_msg:
  17. print(f"获取 {crawler_plan_id} 的爬取信息异常")
  18. continue
  19. update_use_date = crawler_plan_info.get("updateData")
  20. crawler_plan_name = update_use_date['name']
  21. if "视频vid" not in crawler_plan_name:
  22. continue
  23. new_crawler_plan_name = crawler_plan_name.replace("{视频vid}", video_id)
  24. update_use_date['name'] = new_crawler_plan_name
  25. # aigc_client.crawler_plan_save(update_use_date)