newContentIdTask.py 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. import asyncio
  7. from applications.config import Config
  8. from applications.log import logging
  9. from applications.functions.pqFunctions import publish_to_pq
  10. from applications.functions.common import shuffle_list
  11. from applications.functions.kimi import KimiServer
  12. from applications.spider import search_videos_from_web
  13. from applications.etl_function import *
  14. class NewContentIdTask(object):
  15. """
  16. 不存在历史已经发布的文章的匹配流程
  17. """
  18. TASK_INIT_STATUS = 0
  19. TASK_KIMI_FINISHED_STATUS = 1
  20. TASK_SPIDER_FINISHED_STATUS = 2
  21. TASK_ETL_FINISHED_STATUS = 3
  22. TASK_PUBLISHED_STATUS = 4
  23. TASK_PROCESSING_STATUS = 101
  24. TASK_FAIL_STATUS = 99
  25. ARTICLE_TEXT_TABLE_ERROR = 98
  26. TASK_MAX_PROCESS_TIMES = 3
  27. def __init__(self, mysql_client):
  28. self.mysql_client = mysql_client
  29. self.config = Config()
  30. self.article_match_video_table = self.config.article_match_video_table
  31. self.article_text_table = self.config.article_text_table
  32. self.article_crawler_video_table = self.config.article_crawler_video_table
  33. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  34. self.account_map = json.loads(self.config.get_config_value("accountMap"))
  35. self.spider_coroutines = self.config.get_config_value("spiderCoroutines")
  36. async def get_tasks(self):
  37. """
  38. 获取 task
  39. :return:
  40. """
  41. # 获取 content_status 为 处理中 的任务,判断时间, 如果超过 1h 则,则将它改为 0, process_times + 1
  42. select_processing_sql = f"""
  43. SELECT
  44. trace_id, content_status_update_time, process_times
  45. FROM
  46. {self.article_match_video_table}
  47. WHERE
  48. content_status = {self.TASK_PROCESSING_STATUS}
  49. and process_times <= {self.TASK_MAX_PROCESS_TIMES};
  50. """
  51. processing_articles = await self.mysql_client.async_select(select_processing_sql)
  52. if processing_articles:
  53. processing_list = [
  54. {
  55. "trace_id": item[0],
  56. "content_status_update_time": item[1],
  57. "process_times": item[2]
  58. }
  59. for item in processing_articles
  60. ]
  61. for obj in processing_list:
  62. if int(time.time()) - obj['content_status_update_time'] >= 3600:
  63. # 认为该任务失败
  64. await self.roll_back_content_status_when_fails(
  65. process_times=obj['process_times'] + 1,
  66. trace_id=obj['trace_id']
  67. )
  68. # 将 process_times > 3 的任务的状态修改为失败
  69. update_status_sql = f"""
  70. UPDATE
  71. {self.article_match_video_table}
  72. SET
  73. content_status = %s
  74. WHERE
  75. process_times > %s;
  76. """
  77. await self.mysql_client.async_insert(
  78. update_status_sql,
  79. params=(
  80. self.TASK_FAIL_STATUS,
  81. self.TASK_MAX_PROCESS_TIMES
  82. )
  83. )
  84. # 获取 process_times <= 3 且 content_status = 0 的任务
  85. select_sql = f"""
  86. SELECT
  87. trace_id, content_id, flow_pool_level, gh_id, process_times
  88. FROM
  89. {self.article_match_video_table}
  90. WHERE
  91. content_status = {self.TASK_INIT_STATUS}
  92. and process_times <= {self.TASK_MAX_PROCESS_TIMES}
  93. LIMIT {self.spider_coroutines};
  94. """
  95. tasks = await self.mysql_client.async_select(select_sql)
  96. if tasks:
  97. return [
  98. {
  99. "trace_id": i[0],
  100. "content_id": i[1],
  101. "flow_pool_level": i[2],
  102. "gh_id": i[3],
  103. "process_times": i[4]
  104. }
  105. for i in tasks
  106. ]
  107. else:
  108. return []
  109. async def get_video_list(self, content_id):
  110. """
  111. 判断该文章是否存在历史匹配视频
  112. :param content_id
  113. :return:
  114. """
  115. sql = f"""
  116. SELECT id
  117. FROM {self.article_crawler_video_table}
  118. WHERE content_id = '{content_id}' and download_status = 2;
  119. """
  120. res_tuple = await self.mysql_client.async_select(sql)
  121. if len(res_tuple) >= 3:
  122. return True
  123. else:
  124. return False
  125. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  126. """
  127. :param new_content_status:
  128. :param trace_id:
  129. :param ori_content_status:
  130. :return:
  131. """
  132. update_sql = f"""
  133. UPDATE {self.article_match_video_table}
  134. SET content_status = %s, content_status_update_time = %s
  135. WHERE trace_id = %s and content_status = %s;
  136. """
  137. row_counts = await self.mysql_client.async_insert(
  138. sql=update_sql,
  139. params=(
  140. new_content_status,
  141. int(time.time()),
  142. trace_id,
  143. ori_content_status
  144. )
  145. )
  146. return row_counts
  147. async def roll_back_content_status_when_fails(self, process_times, trace_id):
  148. """
  149. 处理失败,回滚至初始状态,处理次数加 1
  150. :param process_times:
  151. :param trace_id:
  152. :return:
  153. """
  154. update_article_sql = f"""
  155. UPDATE {self.article_match_video_table}
  156. SET
  157. content_status = %s,
  158. content_status_update_time = %s,
  159. process_times = %s,
  160. WHERE trace_id = %s and content_status = %s;
  161. """
  162. await self.mysql_client.async_insert(
  163. sql=update_article_sql,
  164. params=(
  165. self.TASK_INIT_STATUS,
  166. int(time.time()),
  167. process_times + 1,
  168. trace_id,
  169. self.TASK_PROCESSING_STATUS
  170. )
  171. )
  172. async def judge_whether_same_content_id_is_processing(self, content_id):
  173. """
  174. 同一个 content_id只需要处理一次
  175. :param content_id:
  176. :return:
  177. """
  178. select_sql = f"""
  179. SELECT distinct content_status
  180. FROM {self.article_match_video_table}
  181. WHERE content_id = '{content_id}';
  182. """
  183. result = await self.mysql_client.async_select(select_sql)
  184. if result:
  185. for item in result:
  186. content_status = item[0]
  187. if content_status not in {self.TASK_INIT_STATUS, self.TASK_PUBLISHED_STATUS} :
  188. return True
  189. return False
  190. else:
  191. return False
  192. async def get_downloaded_videos(self, content_id):
  193. """
  194. 获取已下载的视频
  195. :return:
  196. """
  197. sql = f"""
  198. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  199. FROM {self.article_crawler_video_table}
  200. WHERE content_id = '{content_id}' and download_status = 2
  201. ORDER BY score DESC;
  202. """
  203. res_tuple = await self.mysql_client.async_select(sql)
  204. return [
  205. {
  206. "platform": i[0],
  207. "play_count": i[1],
  208. "like_count": i[2],
  209. "video_oss_path": i[3],
  210. "cover_oss_path": i[4],
  211. "uid": i[5]
  212. }
  213. for i in res_tuple
  214. ]
  215. async def get_kimi_status(self, content_id):
  216. """
  217. 通过 content_id 获取kimi info
  218. :return:
  219. """
  220. select_sql = f"""
  221. select kimi_status
  222. from {self.article_text_table}
  223. where content_id = '{content_id}';
  224. """
  225. response = await self.mysql_client.async_select(select_sql)
  226. if response:
  227. kimi_status = response[0][0]
  228. return kimi_status
  229. else:
  230. return self.ARTICLE_TEXT_TABLE_ERROR
  231. async def kimi_task(self, params):
  232. """
  233. 执行 kimi 任务
  234. :return:
  235. """
  236. KIMI_SUCCESS_STATUS = 1
  237. KIMI_FAIL_STATUS = 2
  238. content_id = params['content_id']
  239. trace_id = params['trace_id']
  240. process_times = params['process_times']
  241. kimi_status_code = await self.get_kimi_status(content_id=content_id)
  242. if kimi_status_code == KIMI_SUCCESS_STATUS:
  243. affected_rows = await self.update_content_status(
  244. new_content_status=self.TASK_KIMI_FINISHED_STATUS,
  245. trace_id=trace_id,
  246. ori_content_status=self.TASK_INIT_STATUS
  247. )
  248. if affected_rows == 0:
  249. logging(
  250. code="6000",
  251. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  252. )
  253. return
  254. get_kimi_sql = f"""
  255. SELECT article_title, kimi_title, kimi_summary, kimi_keys
  256. FROM {self.article_text_table}
  257. WHERE content_id = '{content_id}';
  258. """
  259. kimi_info = await self.mysql_client.async_select(get_kimi_sql)
  260. return {
  261. "kimi_title": kimi_info[0][1],
  262. "ori_title": kimi_info[0][0],
  263. "kimi_summary": kimi_info[0][2],
  264. "kimi_keys": json.loads(kimi_info[0][3])
  265. }
  266. elif kimi_status_code == self.ARTICLE_TEXT_TABLE_ERROR:
  267. logging(
  268. code="4000",
  269. info="long_articles_text表中未找到 content_id"
  270. )
  271. else:
  272. # 开始处理,讲 content_status 从 0 改为 101
  273. affected_rows = await self.update_content_status(
  274. new_content_status=self.TASK_PROCESSING_STATUS,
  275. trace_id=trace_id,
  276. ori_content_status=self.TASK_INIT_STATUS
  277. )
  278. if affected_rows == 0:
  279. logging(
  280. code="6000",
  281. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  282. )
  283. return
  284. K = KimiServer()
  285. try:
  286. select_sql = f"""
  287. select article_title, article_text
  288. from {self.article_text_table}
  289. where content_id = '{content_id}'
  290. """
  291. res = await self.mysql_client.async_select(select_sql)
  292. article_obj = {
  293. "article_title": res[0][0],
  294. "article_text": res[0][1],
  295. "content_id": content_id
  296. }
  297. kimi_info = await K.search_kimi_schedule(params=article_obj)
  298. kimi_title = kimi_info['k_title']
  299. content_title = kimi_info['content_title'].replace("'", "").replace('"', "")
  300. content_keys = json.dumps(kimi_info['content_keys'], ensure_ascii=False)
  301. update_kimi_sql = f"""
  302. UPDATE {self.article_text_table}
  303. SET
  304. kimi_title = %s,
  305. kimi_summary = %s,
  306. kimi_keys = %s,
  307. kimi_status = %s
  308. WHERE content_id = %s;"""
  309. await self.mysql_client.async_insert(
  310. sql=update_kimi_sql,
  311. params=(kimi_title, content_title, content_keys, KIMI_SUCCESS_STATUS, params['content_id'])
  312. )
  313. await self.update_content_status(
  314. new_content_status=self.TASK_KIMI_FINISHED_STATUS,
  315. trace_id=trace_id,
  316. ori_content_status=self.TASK_PROCESSING_STATUS
  317. )
  318. return {
  319. "kimi_title": kimi_title,
  320. "ori_title": article_obj['article_title'],
  321. "kimi_summary": content_title,
  322. "kimi_keys": kimi_info['content_keys']
  323. }
  324. except Exception as e:
  325. # kimi 任务处理失败
  326. update_kimi_sql = f"""
  327. UPDATE {self.article_text_table}
  328. SET
  329. kimi_status = %s
  330. WHERE content_id = %s
  331. """
  332. await self.mysql_client.async_insert(
  333. sql=update_kimi_sql,
  334. params=(
  335. KIMI_FAIL_STATUS,
  336. content_id
  337. )
  338. )
  339. # 将状态由 101 回退为 0
  340. await self.roll_back_content_status_when_fails(
  341. process_times=process_times,
  342. trace_id=trace_id
  343. )
  344. return {}
  345. async def spider_task(self, params, kimi_result):
  346. """
  347. 爬虫任务
  348. :return:
  349. """
  350. SPIDER_INIT_STATUS = 1
  351. trace_id = params['trace_id']
  352. content_id = params['content_id']
  353. process_times = params['process_times']
  354. gh_id = params['gh_id']
  355. select_sql = f"""
  356. select count(id) from {self.article_crawler_video_table} where content_id = '{content_id}';
  357. """
  358. count_tuple = await self.mysql_client.async_select(select_sql)
  359. counts = count_tuple[0][0]
  360. if counts >= 3:
  361. await self.update_content_status(
  362. new_content_status=self.TASK_SPIDER_FINISHED_STATUS,
  363. trace_id=trace_id,
  364. ori_content_status=SPIDER_INIT_STATUS
  365. )
  366. return True
  367. # 开始处理,将状态由 1 改成 101
  368. affected_rows = await self.update_content_status(
  369. new_content_status=self.TASK_PROCESSING_STATUS,
  370. ori_content_status=SPIDER_INIT_STATUS,
  371. trace_id=trace_id
  372. )
  373. if affected_rows == 0:
  374. logging(
  375. code="6000",
  376. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  377. )
  378. return False
  379. try:
  380. search_videos_count = await search_videos_from_web(
  381. info={
  382. "ori_title": kimi_result['ori_title'],
  383. "kimi_summary": kimi_result['kimi_summary'],
  384. "kimi_keys": kimi_result['kimi_keys'],
  385. "trace_id": trace_id,
  386. "gh_id": gh_id,
  387. "content_id": content_id,
  388. "crawler_video_table": self.article_crawler_video_table
  389. },
  390. gh_id_map=self.account_map,
  391. db_client=self.mysql_client
  392. )
  393. if search_videos_count >= 3:
  394. # 表示爬虫任务执行成功, 将状态从 101 改为 2
  395. await self.update_content_status(
  396. new_content_status=self.TASK_SPIDER_FINISHED_STATUS,
  397. trace_id=trace_id,
  398. ori_content_status=self.TASK_PROCESSING_STATUS
  399. )
  400. return True
  401. else:
  402. await self.roll_back_content_status_when_fails(
  403. process_times=process_times + 1,
  404. trace_id=trace_id
  405. )
  406. return False
  407. except Exception as e:
  408. await self.roll_back_content_status_when_fails(
  409. process_times=process_times + 1,
  410. trace_id=trace_id
  411. )
  412. print("爬虫处理失败: {}".format(e))
  413. return False
  414. async def etl_task(self, params):
  415. """
  416. download && upload videos
  417. :param params:
  418. :return:
  419. """
  420. VIDEO_DOWNLOAD_SUCCESS_STATUS = 2
  421. VIDEO_DOWNLOAD_FAIL_STATUS = 3
  422. ETL_TASK_INIT_STATUS = 2
  423. trace_id = params['trace_id']
  424. content_id = params['content_id']
  425. # 判断是否有三条已经下载完成的视频
  426. select_sql = f"""
  427. select count(id)
  428. from {self.article_crawler_video_table}
  429. where content_id = '{content_id}' and download_status = {VIDEO_DOWNLOAD_SUCCESS_STATUS};
  430. """
  431. video_count_tuple = await self.mysql_client.async_select(select_sql)
  432. video_count = video_count_tuple[0][0]
  433. if video_count >= 3:
  434. affect_rows = await self.update_content_status(
  435. ori_content_status=ETL_TASK_INIT_STATUS,
  436. trace_id=trace_id,
  437. new_content_status=self.TASK_ETL_FINISHED_STATUS
  438. )
  439. if affect_rows == 0:
  440. logging(
  441. code="6000",
  442. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  443. )
  444. return False
  445. return True
  446. else:
  447. # 开始处理, 将文章状态修改为处理状态
  448. affected_rows = await self.update_content_status(
  449. ori_content_status=ETL_TASK_INIT_STATUS,
  450. trace_id=trace_id,
  451. new_content_status=self.TASK_PROCESSING_STATUS
  452. )
  453. if affected_rows == 0:
  454. logging(
  455. code="6000",
  456. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  457. )
  458. return False
  459. select_sql = f"""
  460. SELECT id, out_video_id, platform, video_title, video_url, cover_url, user_id, trace_id
  461. FROM {self.article_crawler_video_table}
  462. WHERE content_id = '{content_id}' and download_status != {VIDEO_DOWNLOAD_SUCCESS_STATUS}
  463. ORDER BY score DESC;
  464. """
  465. videos_need_to_download_tuple = await self.mysql_client.async_select(select_sql)
  466. downloaded_count = 0
  467. for line in videos_need_to_download_tuple:
  468. params = {
  469. "id": line[0],
  470. "video_id": line[1],
  471. "platform": line[2],
  472. "video_title": line[3],
  473. "video_url": line[4],
  474. "cover_url": line[5],
  475. "user_id": line[6],
  476. "trace_id": line[7]
  477. }
  478. try:
  479. local_video_path, local_cover_path = generate_video_path(params['platform'], params['video_id'])
  480. # download videos
  481. file_path = await download_video(
  482. file_path=local_video_path,
  483. platform=params['platform'],
  484. video_url=params['video_url']
  485. )
  486. # download cover
  487. cover_path = await download_cover(
  488. file_path=local_cover_path,
  489. platform=params['platform'],
  490. cover_url=params['cover_url']
  491. )
  492. oss_video = await upload_to_oss(
  493. local_video_path=file_path,
  494. download_type="video"
  495. )
  496. if cover_path:
  497. oss_cover = await upload_to_oss(
  498. local_video_path=cover_path,
  499. download_type="image"
  500. )
  501. else:
  502. oss_cover = None
  503. update_sql = f"""
  504. UPDATE {self.article_crawler_video_table}
  505. SET video_oss_path = %s, cover_oss_path = %s, download_status = %s
  506. WHERE id = %s;
  507. """
  508. await self.mysql_client.async_insert(
  509. sql=update_sql,
  510. params=(
  511. oss_video,
  512. oss_cover,
  513. VIDEO_DOWNLOAD_SUCCESS_STATUS,
  514. params['id']
  515. )
  516. )
  517. downloaded_count += 1
  518. if downloaded_count > 3:
  519. await self.update_content_status(
  520. ori_content_status=self.TASK_PROCESSING_STATUS,
  521. trace_id=trace_id,
  522. new_content_status=self.TASK_ETL_FINISHED_STATUS
  523. )
  524. return True
  525. except Exception as e:
  526. update_sql = f"""
  527. UPDATE {self.article_crawler_video_table}
  528. SET download_status = %s
  529. WHERE id = %s;
  530. """
  531. await self.mysql_client.async_insert(
  532. sql=update_sql,
  533. params=(VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  534. )
  535. if downloaded_count >= 3:
  536. await self.update_content_status(
  537. ori_content_status=self.TASK_PROCESSING_STATUS,
  538. trace_id=trace_id,
  539. new_content_status=self.TASK_ETL_FINISHED_STATUS
  540. )
  541. return True
  542. else:
  543. await self.roll_back_content_status_when_fails(
  544. process_times=params['process_times'] + 1,
  545. trace_id=params['trace_id']
  546. )
  547. return False
  548. async def publish_task(self, params, kimi_title):
  549. """
  550. 发布任务
  551. :param kimi_title:
  552. :param params:
  553. :return:
  554. """
  555. PUBLISH_DEFAULT_STATUS = 3
  556. gh_id = params['gh_id']
  557. flow_pool_level = params['flow_pool_level']
  558. content_id = params['content_id']
  559. trace_id = params['trace_id']
  560. process_times = params['process_times']
  561. # 开始处理,将状态修改为操作状态
  562. affected_rows = await self.update_content_status(
  563. ori_content_status=PUBLISH_DEFAULT_STATUS,
  564. trace_id=trace_id,
  565. new_content_status=self.TASK_PROCESSING_STATUS
  566. )
  567. if affected_rows == 0:
  568. logging(
  569. code="6000",
  570. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  571. )
  572. return False
  573. try:
  574. download_videos = await self.get_downloaded_videos(content_id)
  575. match flow_pool_level:
  576. case "autoArticlePoolLevel4":
  577. # 冷启层, 全量做
  578. video_list = shuffle_list(download_videos)[:3]
  579. case "autoArticlePoolLevel3":
  580. if self.gh_id_dict.get(gh_id):
  581. video_list = shuffle_list(download_videos)[:3]
  582. else:
  583. video_list = download_videos[:3]
  584. case "autoArticlePoolLevel2":
  585. # 次条,只针对具体账号做
  586. video_list = []
  587. case "autoArticlePoolLevel1":
  588. # 头条,先不做
  589. video_list = download_videos[:3]
  590. case _:
  591. video_list = download_videos[:3]
  592. L = []
  593. for video_obj in video_list:
  594. params = {
  595. "videoPath": video_obj['video_oss_path'],
  596. "uid": video_obj['uid'],
  597. "title": kimi_title
  598. }
  599. response = await publish_to_pq(params)
  600. # time.sleep(2)
  601. obj = {
  602. "uid": video_obj['uid'],
  603. "source": video_obj['platform'],
  604. "kimiTitle": kimi_title,
  605. "videoId": response['data']['id'],
  606. "videoCover": response['data']['shareImgPath'],
  607. "videoPath": response['data']['videoPath'],
  608. "videoOss": video_obj['video_oss_path']
  609. }
  610. L.append(obj)
  611. update_sql = f"""
  612. UPDATE {self.article_match_video_table}
  613. SET content_status = %s, response = %s, process_times = %s
  614. WHERE trace_id = %s and content_status = %s;
  615. """
  616. # 从操作中状态修改为已发布状态
  617. await self.mysql_client.async_insert(
  618. sql=update_sql,
  619. params=(
  620. self.TASK_PUBLISHED_STATUS,
  621. json.dumps(L, ensure_ascii=False),
  622. process_times + 1,
  623. trace_id,
  624. self.TASK_PROCESSING_STATUS
  625. )
  626. )
  627. except Exception as e:
  628. await self.roll_back_content_status_when_fails(
  629. process_times=params['process_times'] + 1,
  630. trace_id=params['trace_id']
  631. )
  632. print(e)
  633. async def start_process(self, params):
  634. """
  635. 处理单篇文章
  636. :param params:
  637. :return:
  638. """
  639. # step1: 执行 kimi 操作
  640. # time.sleep(5) # 测试多个进程操作同一个 task 的等待时间
  641. kimi_result = await self.kimi_task(params)
  642. trace_id = params['trace_id']
  643. if kimi_result:
  644. # 等待 kimi 操作执行完成之后,开始执行 spider_task
  645. print("kimi success")
  646. logging(
  647. code=3001,
  648. info="kimi success",
  649. trace_id=trace_id
  650. )
  651. spider_flag = await self.spider_task(params=params, kimi_result=kimi_result)
  652. if spider_flag:
  653. # 等待爬虫执行完成后,开始执行 etl_task
  654. print("spider success")
  655. logging(
  656. code=3002,
  657. info="spider_success",
  658. trace_id=trace_id
  659. )
  660. etl_flag = await self.etl_task(params)
  661. if etl_flag:
  662. # 等待下载上传完成,执行发布任务
  663. print("etl success")
  664. logging(
  665. code="3003",
  666. info="etl_success",
  667. trace_id=trace_id
  668. )
  669. try:
  670. await self.publish_task(params, kimi_result['kimi_title'])
  671. logging(
  672. code="3004",
  673. info="publish_success",
  674. trace_id=trace_id
  675. )
  676. except Exception as e:
  677. logging(
  678. code="6004",
  679. info="publish 失败--{}".format(e),
  680. trace_id=params['trace_id']
  681. )
  682. else:
  683. logging(
  684. code="6003",
  685. info="ETL 处理失败",
  686. trace_id=params['trace_id']
  687. )
  688. else:
  689. logging(
  690. code="6002",
  691. info="爬虫处理失败",
  692. trace_id=params['trace_id']
  693. )
  694. else:
  695. logging(
  696. code="6001",
  697. info="kimi 处理失败",
  698. trace_id=params['trace_id']
  699. )
  700. async def process_task(self, params):
  701. """
  702. 处理任务
  703. :return:
  704. """
  705. content_id = params['content_id']
  706. download_videos = await self.get_video_list(content_id)
  707. if not download_videos:
  708. # 开始处理, 判断是否有相同的文章 id 正在处理
  709. processing_flag = await self.judge_whether_same_content_id_is_processing(content_id)
  710. if processing_flag:
  711. logging(
  712. code="9001",
  713. info="该 content id 正在处理中, 跳过此任务"
  714. )
  715. else:
  716. await self.start_process(params=params)
  717. else:
  718. print("存在已下载视频")
  719. async def deal(self):
  720. """
  721. function
  722. :return:
  723. """
  724. task_list = await self.get_tasks()
  725. print(task_list)
  726. logging(
  727. code="5001",
  728. info="Match Task Got {} this time".format(len(task_list)),
  729. function="Publish Task"
  730. )
  731. if task_list:
  732. tasks = [self.process_task(params) for params in task_list]
  733. await asyncio.gather(*tasks)
  734. else:
  735. logging(
  736. code="9008",
  737. info="没有要处理的请求"
  738. )