async_aigc_system_api.py 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183
  1. import json
  2. from typing import Optional, Dict, List, TypedDict
  3. from applications.utils import AsyncHttpClient
  4. HEADERS = {
  5. "Accept": "application/json",
  6. "Accept-Language": "zh,zh-CN;q=0.9",
  7. "Content-Type": "application/json",
  8. "Origin": "http://admin.cybertogether.net",
  9. "Proxy-Connection": "keep-alive",
  10. "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
  11. }
  12. PERSON_COOKIE = {
  13. "token": "af54cdc404c3464d896745df389b2dce",
  14. "appType": 9,
  15. "platform": "pc",
  16. "appVersionCode": 1000,
  17. "clientTimestamp": 1,
  18. "fid": 1,
  19. "loginUid": 1,
  20. "pageSource": 1,
  21. "requestId": 1,
  22. "rid": 1,
  23. "uid": 1,
  24. }
  25. class RelationDict(TypedDict):
  26. videoPoolTraceId: str
  27. channelContentId: str
  28. platform: str
  29. async def delete_illegal_gzh_articles(gh_id: str, title: str):
  30. """
  31. Delete illegal gzh articles
  32. :param gh_id: gzh id
  33. :param title: article title
  34. """
  35. url = "http://101.37.174.139:80/articleAudit/titleDangerFindDelete"
  36. payload = {
  37. "title": title,
  38. "ghId": gh_id,
  39. }
  40. headers = {"Content-Type": "application/json;charset=UTF-8"}
  41. async with AsyncHttpClient(timeout=600) as client:
  42. res = await client.post(url=url, headers=headers, json=payload)
  43. return res
  44. async def auto_create_crawler_task(plan_id, plan_name, plan_tag, url_list, platform):
  45. """
  46. Create crawler task
  47. """
  48. match platform:
  49. case "weixin":
  50. channel = 5
  51. case "toutiao":
  52. channel = 6
  53. case _:
  54. raise RuntimeError(f"Unsupported platform: {platform}")
  55. url = "http://aigc-api.cybertogether.net/aigc/crawler/plan/save"
  56. payload = {
  57. "params": {
  58. "contentFilters": [],
  59. "accountFilters": [],
  60. "filterAccountMatchMode": 1,
  61. "filterContentMatchMode": 1,
  62. "selectModeValues": [],
  63. "searchModeValues": [],
  64. "contentModal": 3,
  65. "analyze": {},
  66. "crawlerComment": 0,
  67. "inputGroup": None,
  68. "inputSourceGroups": [],
  69. "modePublishTime": [],
  70. "planType": 2,
  71. "frequencyType": 2,
  72. "planTag": plan_tag,
  73. "tagPenetrateFlag": 0,
  74. "id": plan_id,
  75. "name": plan_name,
  76. "channel": channel,
  77. "crawlerMode": 5,
  78. "inputModeValues": url_list,
  79. "modePublishTimeStart": None,
  80. "modePublishTimeEnd": None,
  81. "executeRate": None,
  82. "executeDate": None,
  83. "executeWindowStart": None,
  84. "executeWindowEnd": None,
  85. "executeTimeInterval": None,
  86. "executeNum": None,
  87. "addModal": None,
  88. "addChannel": None,
  89. "fileUpload": None,
  90. "prompt": None,
  91. "acelFlag": None,
  92. "tasks": [],
  93. },
  94. "baseInfo": PERSON_COOKIE,
  95. }
  96. async with AsyncHttpClient(timeout=600) as client:
  97. res = await client.post(url=url, headers=HEADERS, json=payload)
  98. return res
  99. async def add_to_crawler_task():
  100. pass
  101. async def get_crawler_task_detail():
  102. pass
  103. async def auto_bind_crawler_task_to_generate_task(crawler_task_list, generate_task_id):
  104. url = "http://aigc-api.cybertogether.net/aigc/produce/plan/save"
  105. plan_info = await get_generate_task_detail(generate_task_id)
  106. input_source_groups = plan_info.get("inputSourceGroups")
  107. existed_crawler_task = input_source_groups[0].get("inputSources")
  108. new_task_list = existed_crawler_task + crawler_task_list
  109. input_source_group_0 = input_source_groups[0]
  110. input_source_group_0["inputSources"] = new_task_list
  111. payload = json.dumps(
  112. {
  113. "params": {
  114. "contentFilters": [],
  115. "produceModal": plan_info.get("produceModal"),
  116. "inputModal": plan_info.get("inputModal"),
  117. "tasks": plan_info.get("tasks", []),
  118. "modules": [],
  119. "moduleGroups": plan_info.get("moduleGroups"),
  120. "inputSourceGroups": [input_source_group_0],
  121. "layoutType": plan_info.get("layoutType"),
  122. "activeManualReview": plan_info.get("activeManualReview"),
  123. "totalProduceNum": plan_info.get("totalProduceNum"),
  124. "dailyProduceNum": plan_info.get("dailyProduceNum"),
  125. "maxConcurrentNum": plan_info.get("maxConcurrentNum"),
  126. "id": generate_task_id,
  127. "name": plan_info.get("name"),
  128. "planTag": plan_info.get("planTag"),
  129. "tagPenetrateFlag": plan_info.get("tagPenetrateFlag"),
  130. "inputType": plan_info.get("inputType"),
  131. "inputChannel": plan_info.get("inputChannel"),
  132. "activeManualReviewCount": plan_info.get("activeManualReviewCount"),
  133. "autoComposite": plan_info.get("autoComposite"),
  134. },
  135. "baseInfo": PERSON_COOKIE,
  136. }
  137. )
  138. async with AsyncHttpClient(timeout=600) as client:
  139. response = await client.post(url=url, headers=HEADERS, data=payload)
  140. return response
  141. async def get_generate_task_detail(generate_task_id):
  142. """
  143. 通过生成计划的 id,获取该生成计划已有的抓取计划 list
  144. :param generate_task_id:
  145. :return:
  146. """
  147. url = "http://aigc-api.cybertogether.net/aigc/produce/plan/detail"
  148. payload = json.dumps(
  149. {"params": {"id": generate_task_id}, "baseInfo": PERSON_COOKIE}
  150. )
  151. async with AsyncHttpClient(timeout=600) as client:
  152. res = await client.post(url=url, headers=HEADERS, data=payload)
  153. if res["msg"] == "success":
  154. return res["data"]
  155. else:
  156. return {}
  157. async def insert_crawler_relation_to_aigc_system(relation_list: List[RelationDict]) -> Optional[Dict]:
  158. url = "http://aigc-api.cybertogether.net/aigc/crawler/content/videoPoolCrawlerRelation"
  159. payload = json.dumps({"params": {"relations": relation_list}})
  160. async with AsyncHttpClient(timeout=60) as client:
  161. res = await client.post(url=url, headers=HEADERS, data=payload)
  162. return res