newContentIdTask.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. import asyncio
  7. from applications.config import Config
  8. from applications.log import logging
  9. from applications.functions.pqFunctions import publish_to_pq, get_pq_video_detail
  10. from applications.functions.common import shuffle_list
  11. from applications.functions.kimi import KimiServer
  12. from applications.spider import search_videos_from_web
  13. from applications.etl_function import *
  14. class NewContentIdTask(object):
  15. """
  16. 不存在历史已经发布的文章的匹配流程
  17. """
  18. TASK_INIT_STATUS = 0
  19. TASK_KIMI_FINISHED_STATUS = 1
  20. TASK_SPIDER_FINISHED_STATUS = 2
  21. TASK_ETL_FINISHED_STATUS = 3
  22. TASK_PUBLISHED_STATUS = 4
  23. TASK_PROCESSING_STATUS = 101
  24. TASK_FAIL_STATUS = 99
  25. ARTICLE_TEXT_TABLE_ERROR = 98
  26. TASK_MAX_PROCESS_TIMES = 3
  27. def __init__(self, mysql_client):
  28. self.mysql_client = mysql_client
  29. self.config = Config()
  30. self.article_match_video_table = self.config.article_match_video_table
  31. self.article_text_table = self.config.article_text_table
  32. self.article_crawler_video_table = self.config.article_crawler_video_table
  33. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  34. self.account_map = json.loads(self.config.get_config_value("accountMap"))
  35. self.spider_coroutines = self.config.get_config_value("spiderCoroutines")
  36. async def get_tasks(self):
  37. """
  38. 获取 task
  39. :return:
  40. """
  41. # 获取 content_status 为 处理中 的任务,判断时间, 如果超过 1h 则,则将它改为 0, process_times + 1
  42. select_processing_sql = f"""
  43. SELECT
  44. trace_id, content_status_update_time, process_times
  45. FROM
  46. {self.article_match_video_table}
  47. WHERE
  48. content_status = {self.TASK_PROCESSING_STATUS}
  49. and process_times <= {self.TASK_MAX_PROCESS_TIMES};
  50. """
  51. processing_articles = await self.mysql_client.async_select(select_processing_sql)
  52. if processing_articles:
  53. processing_list = [
  54. {
  55. "trace_id": item[0],
  56. "content_status_update_time": item[1],
  57. "process_times": item[2]
  58. }
  59. for item in processing_articles
  60. ]
  61. for obj in processing_list:
  62. if int(time.time()) - obj['content_status_update_time'] >= 3600:
  63. # 认为该任务失败
  64. await self.roll_back_content_status_when_fails(
  65. process_times=obj['process_times'] + 1,
  66. trace_id=obj['trace_id']
  67. )
  68. # 将 process_times > 3 且状态不为 4 的任务的状态修改为失败,
  69. update_status_sql = f"""
  70. UPDATE
  71. {self.article_match_video_table}
  72. SET
  73. content_status = %s
  74. WHERE
  75. process_times > %s and content_status != %s;
  76. """
  77. await self.mysql_client.async_insert(
  78. update_status_sql,
  79. params=(
  80. self.TASK_FAIL_STATUS,
  81. self.TASK_MAX_PROCESS_TIMES,
  82. self.TASK_PUBLISHED_STATUS
  83. )
  84. )
  85. # 获取 process_times <= 3 且 content_status = 0 的任务
  86. select_sql = f"""
  87. SELECT
  88. trace_id, content_id, flow_pool_level, gh_id, process_times
  89. FROM
  90. {self.article_match_video_table}
  91. WHERE
  92. content_status = {self.TASK_INIT_STATUS}
  93. and process_times <= {self.TASK_MAX_PROCESS_TIMES}
  94. ORDER BY flow_pool_level, request_timestamp
  95. LIMIT {self.spider_coroutines};
  96. """
  97. tasks = await self.mysql_client.async_select(select_sql)
  98. if tasks:
  99. return [
  100. {
  101. "trace_id": i[0],
  102. "content_id": i[1],
  103. "flow_pool_level": i[2],
  104. "gh_id": i[3],
  105. "process_times": i[4]
  106. }
  107. for i in tasks
  108. ]
  109. else:
  110. return []
  111. async def get_video_list(self, content_id):
  112. """
  113. 判断该文章是否存在历史匹配视频
  114. :param content_id
  115. :return:
  116. """
  117. sql = f"""
  118. SELECT id
  119. FROM {self.article_crawler_video_table}
  120. WHERE content_id = '{content_id}' and download_status = 2;
  121. """
  122. res_tuple = await self.mysql_client.async_select(sql)
  123. if len(res_tuple) >= 3:
  124. return True
  125. else:
  126. return False
  127. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  128. """
  129. :param new_content_status:
  130. :param trace_id:
  131. :param ori_content_status:
  132. :return:
  133. """
  134. update_sql = f"""
  135. UPDATE {self.article_match_video_table}
  136. SET content_status = %s, content_status_update_time = %s
  137. WHERE trace_id = %s and content_status = %s;
  138. """
  139. row_counts = await self.mysql_client.async_insert(
  140. sql=update_sql,
  141. params=(
  142. new_content_status,
  143. int(time.time()),
  144. trace_id,
  145. ori_content_status
  146. )
  147. )
  148. return row_counts
  149. async def roll_back_content_status_when_fails(self, process_times, trace_id):
  150. """
  151. 处理失败,回滚至初始状态,处理次数加 1
  152. :param process_times:
  153. :param trace_id:
  154. :return:
  155. """
  156. update_article_sql = f"""
  157. UPDATE {self.article_match_video_table}
  158. SET
  159. content_status = %s,
  160. content_status_update_time = %s,
  161. process_times = %s
  162. WHERE trace_id = %s and content_status = %s;
  163. """
  164. await self.mysql_client.async_insert(
  165. sql=update_article_sql,
  166. params=(
  167. self.TASK_INIT_STATUS,
  168. int(time.time()),
  169. process_times + 1,
  170. trace_id,
  171. self.TASK_PROCESSING_STATUS
  172. )
  173. )
  174. async def judge_whether_same_content_id_is_processing(self, content_id):
  175. """
  176. 同一个 content_id 只需要处理一次
  177. :param content_id:
  178. :return:
  179. success: 4
  180. init: 0
  181. fail: 99
  182. """
  183. select_sql = f"""
  184. SELECT distinct content_status
  185. FROM {self.article_match_video_table}
  186. WHERE content_id = '{content_id}';
  187. """
  188. result = await self.mysql_client.async_select(select_sql)
  189. if result:
  190. for item in result:
  191. content_status = item[0]
  192. # if content_status not in {self.TASK_INIT_STATUS, self.TASK_PUBLISHED_STATUS} :
  193. if content_status in {
  194. self.TASK_KIMI_FINISHED_STATUS,
  195. self.TASK_SPIDER_FINISHED_STATUS,
  196. self.TASK_ETL_FINISHED_STATUS,
  197. self.TASK_PROCESSING_STATUS,
  198. self.TASK_PUBLISHED_STATUS
  199. }:
  200. return True
  201. return False
  202. else:
  203. return False
  204. async def get_downloaded_videos(self, content_id):
  205. """
  206. 获取已下载的视频
  207. :return:
  208. """
  209. sql = f"""
  210. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  211. FROM {self.article_crawler_video_table}
  212. WHERE content_id = '{content_id}' and download_status = 2
  213. ORDER BY score DESC;
  214. """
  215. res_tuple = await self.mysql_client.async_select(sql)
  216. return [
  217. {
  218. "platform": i[0],
  219. "play_count": i[1],
  220. "like_count": i[2],
  221. "video_oss_path": i[3],
  222. "cover_oss_path": i[4],
  223. "uid": i[5]
  224. }
  225. for i in res_tuple
  226. ]
  227. async def get_kimi_status(self, content_id):
  228. """
  229. 通过 content_id 获取kimi info
  230. :return:
  231. """
  232. select_sql = f"""
  233. select kimi_status
  234. from {self.article_text_table}
  235. where content_id = '{content_id}';
  236. """
  237. response = await self.mysql_client.async_select(select_sql)
  238. if response:
  239. kimi_status = response[0][0]
  240. return kimi_status
  241. else:
  242. return self.ARTICLE_TEXT_TABLE_ERROR
  243. async def kimi_task(self, params):
  244. """
  245. 执行 kimi 任务
  246. :return:
  247. """
  248. KIMI_SUCCESS_STATUS = 1
  249. KIMI_FAIL_STATUS = 2
  250. content_id = params['content_id']
  251. trace_id = params['trace_id']
  252. process_times = params['process_times']
  253. kimi_status_code = await self.get_kimi_status(content_id=content_id)
  254. if kimi_status_code == KIMI_SUCCESS_STATUS:
  255. affected_rows = await self.update_content_status(
  256. new_content_status=self.TASK_KIMI_FINISHED_STATUS,
  257. trace_id=trace_id,
  258. ori_content_status=self.TASK_INIT_STATUS
  259. )
  260. if affected_rows == 0:
  261. logging(
  262. code="6000",
  263. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  264. )
  265. return
  266. get_kimi_sql = f"""
  267. SELECT article_title, kimi_title, kimi_summary, kimi_keys
  268. FROM {self.article_text_table}
  269. WHERE content_id = '{content_id}';
  270. """
  271. kimi_info = await self.mysql_client.async_select(get_kimi_sql)
  272. return {
  273. "kimi_title": kimi_info[0][1],
  274. "ori_title": kimi_info[0][0],
  275. "kimi_summary": kimi_info[0][2],
  276. "kimi_keys": json.loads(kimi_info[0][3])
  277. }
  278. elif kimi_status_code == self.ARTICLE_TEXT_TABLE_ERROR:
  279. logging(
  280. code="4000",
  281. info="long_articles_text表中未找到 content_id"
  282. )
  283. else:
  284. # 开始处理,讲 content_status 从 0 改为 101
  285. affected_rows = await self.update_content_status(
  286. new_content_status=self.TASK_PROCESSING_STATUS,
  287. trace_id=trace_id,
  288. ori_content_status=self.TASK_INIT_STATUS
  289. )
  290. if affected_rows == 0:
  291. logging(
  292. code="6000",
  293. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  294. )
  295. return
  296. K = KimiServer()
  297. try:
  298. select_sql = f"""
  299. select article_title, article_text
  300. from {self.article_text_table}
  301. where content_id = '{content_id}'
  302. """
  303. res = await self.mysql_client.async_select(select_sql)
  304. article_obj = {
  305. "article_title": res[0][0],
  306. "article_text": res[0][1],
  307. "content_id": content_id
  308. }
  309. kimi_info = await K.search_kimi_schedule(params=article_obj)
  310. kimi_title = kimi_info['k_title']
  311. content_title = kimi_info['content_title'].replace("'", "").replace('"', "")
  312. content_keys = json.dumps(kimi_info['content_keys'], ensure_ascii=False)
  313. update_kimi_sql = f"""
  314. UPDATE {self.article_text_table}
  315. SET
  316. kimi_title = %s,
  317. kimi_summary = %s,
  318. kimi_keys = %s,
  319. kimi_status = %s
  320. WHERE content_id = %s;"""
  321. await self.mysql_client.async_insert(
  322. sql=update_kimi_sql,
  323. params=(kimi_title, content_title, content_keys, KIMI_SUCCESS_STATUS, params['content_id'])
  324. )
  325. await self.update_content_status(
  326. new_content_status=self.TASK_KIMI_FINISHED_STATUS,
  327. trace_id=trace_id,
  328. ori_content_status=self.TASK_PROCESSING_STATUS
  329. )
  330. return {
  331. "kimi_title": kimi_title,
  332. "ori_title": article_obj['article_title'],
  333. "kimi_summary": content_title,
  334. "kimi_keys": kimi_info['content_keys']
  335. }
  336. except Exception as e:
  337. # kimi 任务处理失败
  338. update_kimi_sql = f"""
  339. UPDATE {self.article_text_table}
  340. SET
  341. kimi_status = %s
  342. WHERE content_id = %s
  343. """
  344. await self.mysql_client.async_insert(
  345. sql=update_kimi_sql,
  346. params=(
  347. KIMI_FAIL_STATUS,
  348. content_id
  349. )
  350. )
  351. # 将状态由 101 回退为 0
  352. await self.roll_back_content_status_when_fails(
  353. process_times=process_times,
  354. trace_id=trace_id
  355. )
  356. return {}
  357. async def spider_task(self, params, kimi_result):
  358. """
  359. 爬虫任务
  360. :return:
  361. """
  362. SPIDER_INIT_STATUS = 1
  363. trace_id = params['trace_id']
  364. content_id = params['content_id']
  365. process_times = params['process_times']
  366. gh_id = params['gh_id']
  367. select_sql = f"""
  368. select count(id) from {self.article_crawler_video_table} where content_id = '{content_id}';
  369. """
  370. count_tuple = await self.mysql_client.async_select(select_sql)
  371. counts = count_tuple[0][0]
  372. if counts >= 3:
  373. await self.update_content_status(
  374. new_content_status=self.TASK_SPIDER_FINISHED_STATUS,
  375. trace_id=trace_id,
  376. ori_content_status=SPIDER_INIT_STATUS
  377. )
  378. return True
  379. # 开始处理,将状态由 1 改成 101
  380. affected_rows = await self.update_content_status(
  381. new_content_status=self.TASK_PROCESSING_STATUS,
  382. ori_content_status=SPIDER_INIT_STATUS,
  383. trace_id=trace_id
  384. )
  385. if affected_rows == 0:
  386. logging(
  387. code="6000",
  388. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  389. )
  390. return False
  391. try:
  392. search_videos_count = await search_videos_from_web(
  393. info={
  394. "ori_title": kimi_result['ori_title'],
  395. "kimi_summary": kimi_result['kimi_summary'],
  396. "kimi_keys": kimi_result['kimi_keys'],
  397. "trace_id": trace_id,
  398. "gh_id": gh_id,
  399. "content_id": content_id,
  400. "crawler_video_table": self.article_crawler_video_table
  401. },
  402. gh_id_map=self.account_map,
  403. db_client=self.mysql_client
  404. )
  405. if search_videos_count >= 3:
  406. # 表示爬虫任务执行成功, 将状态从 101 改为 2
  407. await self.update_content_status(
  408. new_content_status=self.TASK_SPIDER_FINISHED_STATUS,
  409. trace_id=trace_id,
  410. ori_content_status=self.TASK_PROCESSING_STATUS
  411. )
  412. return True
  413. else:
  414. await self.roll_back_content_status_when_fails(
  415. process_times=process_times + 1,
  416. trace_id=trace_id
  417. )
  418. return False
  419. except Exception as e:
  420. await self.roll_back_content_status_when_fails(
  421. process_times=process_times + 1,
  422. trace_id=trace_id
  423. )
  424. print("爬虫处理失败: {}".format(e))
  425. return False
  426. async def etl_task(self, params):
  427. """
  428. download && upload videos
  429. :param params:
  430. :return:
  431. """
  432. VIDEO_DOWNLOAD_SUCCESS_STATUS = 2
  433. VIDEO_DOWNLOAD_FAIL_STATUS = 3
  434. ETL_TASK_INIT_STATUS = 2
  435. trace_id = params['trace_id']
  436. content_id = params['content_id']
  437. # 判断是否有三条已经下载完成的视频
  438. select_sql = f"""
  439. select count(id)
  440. from {self.article_crawler_video_table}
  441. where content_id = '{content_id}' and download_status = {VIDEO_DOWNLOAD_SUCCESS_STATUS};
  442. """
  443. video_count_tuple = await self.mysql_client.async_select(select_sql)
  444. video_count = video_count_tuple[0][0]
  445. if video_count >= 3:
  446. affect_rows = await self.update_content_status(
  447. ori_content_status=ETL_TASK_INIT_STATUS,
  448. trace_id=trace_id,
  449. new_content_status=self.TASK_ETL_FINISHED_STATUS
  450. )
  451. if affect_rows == 0:
  452. logging(
  453. code="6000",
  454. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  455. )
  456. return False
  457. return True
  458. else:
  459. # 开始处理, 将文章状态修改为处理状态
  460. affected_rows = await self.update_content_status(
  461. ori_content_status=ETL_TASK_INIT_STATUS,
  462. trace_id=trace_id,
  463. new_content_status=self.TASK_PROCESSING_STATUS
  464. )
  465. if affected_rows == 0:
  466. logging(
  467. code="6000",
  468. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  469. )
  470. return False
  471. select_sql = f"""
  472. SELECT id, out_video_id, platform, video_title, video_url, cover_url, user_id, trace_id
  473. FROM {self.article_crawler_video_table}
  474. WHERE content_id = '{content_id}' and download_status != {VIDEO_DOWNLOAD_SUCCESS_STATUS}
  475. ORDER BY score DESC;
  476. """
  477. videos_need_to_download_tuple = await self.mysql_client.async_select(select_sql)
  478. downloaded_count = 0
  479. for line in videos_need_to_download_tuple:
  480. params = {
  481. "id": line[0],
  482. "video_id": line[1],
  483. "platform": line[2],
  484. "video_title": line[3],
  485. "video_url": line[4],
  486. "cover_url": line[5],
  487. "user_id": line[6],
  488. "trace_id": line[7]
  489. }
  490. try:
  491. local_video_path, local_cover_path = generate_video_path(params['platform'], params['video_id'])
  492. # download videos
  493. file_path = await download_video(
  494. file_path=local_video_path,
  495. platform=params['platform'],
  496. video_url=params['video_url']
  497. )
  498. # download cover
  499. cover_path = await download_cover(
  500. file_path=local_cover_path,
  501. platform=params['platform'],
  502. cover_url=params['cover_url']
  503. )
  504. oss_video = await upload_to_oss(
  505. local_video_path=file_path,
  506. download_type="video"
  507. )
  508. if cover_path:
  509. oss_cover = await upload_to_oss(
  510. local_video_path=cover_path,
  511. download_type="image"
  512. )
  513. else:
  514. oss_cover = None
  515. update_sql = f"""
  516. UPDATE {self.article_crawler_video_table}
  517. SET video_oss_path = %s, cover_oss_path = %s, download_status = %s
  518. WHERE id = %s;
  519. """
  520. await self.mysql_client.async_insert(
  521. sql=update_sql,
  522. params=(
  523. oss_video,
  524. oss_cover,
  525. VIDEO_DOWNLOAD_SUCCESS_STATUS,
  526. params['id']
  527. )
  528. )
  529. downloaded_count += 1
  530. if downloaded_count > 3:
  531. await self.update_content_status(
  532. ori_content_status=self.TASK_PROCESSING_STATUS,
  533. trace_id=trace_id,
  534. new_content_status=self.TASK_ETL_FINISHED_STATUS
  535. )
  536. return True
  537. except Exception as e:
  538. update_sql = f"""
  539. UPDATE {self.article_crawler_video_table}
  540. SET download_status = %s
  541. WHERE id = %s;
  542. """
  543. await self.mysql_client.async_insert(
  544. sql=update_sql,
  545. params=(VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  546. )
  547. if downloaded_count >= 3:
  548. await self.update_content_status(
  549. ori_content_status=self.TASK_PROCESSING_STATUS,
  550. trace_id=trace_id,
  551. new_content_status=self.TASK_ETL_FINISHED_STATUS
  552. )
  553. return True
  554. else:
  555. await self.roll_back_content_status_when_fails(
  556. process_times=params['process_times'] + 1,
  557. trace_id=params['trace_id']
  558. )
  559. return False
  560. async def publish_task(self, params, kimi_title):
  561. """
  562. 发布任务
  563. :param kimi_title:
  564. :param params:
  565. :return:
  566. """
  567. PUBLISH_DEFAULT_STATUS = 3
  568. gh_id = params['gh_id']
  569. flow_pool_level = params['flow_pool_level']
  570. content_id = params['content_id']
  571. trace_id = params['trace_id']
  572. process_times = params['process_times']
  573. # 开始处理,将状态修改为操作状态
  574. affected_rows = await self.update_content_status(
  575. ori_content_status=PUBLISH_DEFAULT_STATUS,
  576. trace_id=trace_id,
  577. new_content_status=self.TASK_PROCESSING_STATUS
  578. )
  579. if affected_rows == 0:
  580. logging(
  581. code="6000",
  582. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  583. )
  584. return False
  585. try:
  586. download_videos = await self.get_downloaded_videos(content_id)
  587. match flow_pool_level:
  588. case "autoArticlePoolLevel4":
  589. # 冷启层, 全量做
  590. video_list = shuffle_list(download_videos)[:3]
  591. case "autoArticlePoolLevel3":
  592. if self.gh_id_dict.get(gh_id):
  593. video_list = shuffle_list(download_videos)[:3]
  594. else:
  595. video_list = download_videos[:3]
  596. case "autoArticlePoolLevel2":
  597. # 次条,只针对具体账号做
  598. video_list = []
  599. case "autoArticlePoolLevel1":
  600. # 头条,先不做
  601. video_list = download_videos[:3]
  602. case _:
  603. video_list = download_videos[:3]
  604. L = []
  605. for video_obj in video_list:
  606. params = {
  607. "videoPath": video_obj['video_oss_path'],
  608. "uid": video_obj['uid'],
  609. "title": kimi_title
  610. }
  611. publish_response = await publish_to_pq(params)
  612. video_id = publish_response['data']['id']
  613. response = await get_pq_video_detail(video_id)
  614. obj = {
  615. "uid": video_obj['uid'],
  616. "source": video_obj['platform'],
  617. "kimiTitle": kimi_title,
  618. "videoId": response['data'][0]['id'],
  619. "videoCover": response['data'][0]['shareImgPath'],
  620. "videoPath": response['data'][0]['videoPath'],
  621. "videoOss": video_obj['video_oss_path']
  622. }
  623. L.append(obj)
  624. update_sql = f"""
  625. UPDATE {self.article_match_video_table}
  626. SET content_status = %s, response = %s, process_times = %s
  627. WHERE trace_id = %s and content_status = %s;
  628. """
  629. # 从操作中状态修改为已发布状态
  630. await self.mysql_client.async_insert(
  631. sql=update_sql,
  632. params=(
  633. self.TASK_PUBLISHED_STATUS,
  634. json.dumps(L, ensure_ascii=False),
  635. process_times + 1,
  636. trace_id,
  637. self.TASK_PROCESSING_STATUS
  638. )
  639. )
  640. except Exception as e:
  641. await self.roll_back_content_status_when_fails(
  642. process_times=params['process_times'] + 1,
  643. trace_id=params['trace_id']
  644. )
  645. print(e)
  646. async def start_process(self, params):
  647. """
  648. 处理单篇文章
  649. :param params:
  650. :return:
  651. """
  652. # step1: 执行 kimi 操作
  653. # time.sleep(5) # 测试多个进程操作同一个 task 的等待时间
  654. kimi_result = await self.kimi_task(params)
  655. trace_id = params['trace_id']
  656. if kimi_result:
  657. # 等待 kimi 操作执行完成之后,开始执行 spider_task
  658. print("kimi success")
  659. logging(
  660. code=3001,
  661. info="kimi success",
  662. trace_id=trace_id
  663. )
  664. spider_flag = await self.spider_task(params=params, kimi_result=kimi_result)
  665. if spider_flag:
  666. # 等待爬虫执行完成后,开始执行 etl_task
  667. print("spider success")
  668. logging(
  669. code=3002,
  670. info="spider_success",
  671. trace_id=trace_id
  672. )
  673. etl_flag = await self.etl_task(params)
  674. if etl_flag:
  675. # 等待下载上传完成,执行发布任务
  676. print("etl success")
  677. logging(
  678. code="3003",
  679. info="etl_success",
  680. trace_id=trace_id
  681. )
  682. try:
  683. await self.publish_task(params, kimi_result['kimi_title'])
  684. logging(
  685. code="3004",
  686. info="publish_success",
  687. trace_id=trace_id
  688. )
  689. except Exception as e:
  690. logging(
  691. code="6004",
  692. info="publish 失败--{}".format(e),
  693. trace_id=params['trace_id']
  694. )
  695. else:
  696. logging(
  697. code="6003",
  698. info="ETL 处理失败",
  699. trace_id=params['trace_id']
  700. )
  701. else:
  702. logging(
  703. code="6002",
  704. info="爬虫处理失败",
  705. trace_id=params['trace_id']
  706. )
  707. else:
  708. logging(
  709. code="6001",
  710. info="kimi 处理失败",
  711. trace_id=params['trace_id']
  712. )
  713. async def process_task(self, params):
  714. """
  715. 处理任务
  716. :return:
  717. """
  718. content_id = params['content_id']
  719. download_videos = await self.get_video_list(content_id)
  720. if not download_videos:
  721. # 开始处理, 判断是否有相同的文章 id 正在处理
  722. processing_flag = await self.judge_whether_same_content_id_is_processing(content_id)
  723. if processing_flag:
  724. logging(
  725. code="9001",
  726. info="该 content id 正在处理中, 跳过此任务--{}".format(content_id)
  727. )
  728. else:
  729. await self.start_process(params=params)
  730. else:
  731. print("存在已下载视频")
  732. async def deal(self):
  733. """
  734. function
  735. :return:
  736. """
  737. task_list = await self.get_tasks()
  738. print(task_list)
  739. task_dict = {}
  740. # 对 content_id去重
  741. for task in task_list:
  742. key = task['content_id']
  743. task_dict[key] = task
  744. process_list = []
  745. for item in task_dict:
  746. process_list.append(task_dict[item])
  747. logging(
  748. code="5001",
  749. info="Match Task Got {} this time".format(len(process_list)),
  750. function="Publish Task"
  751. )
  752. if task_list:
  753. total_task = len(process_list)
  754. a = time.time()
  755. print("开始处理,一共{}个任务".format(total_task))
  756. tasks = [self.process_task(params) for params in process_list]
  757. await asyncio.gather(*tasks)
  758. b = time.time()
  759. print("处理时间: {} s".format(b - a))
  760. else:
  761. logging(
  762. code="9008",
  763. info="没有要处理的请求"
  764. )