newContentIdTask.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. import asyncio
  7. from applications.config import Config
  8. from applications.log import logging
  9. from applications.functions.pqFunctions import publish_to_pq, get_pq_video_detail
  10. from applications.functions.common import shuffle_list
  11. from applications.functions.kimi import KimiServer
  12. from applications.spider import search_videos_from_web
  13. from applications.etl_function import *
  14. class NewContentIdTask(object):
  15. """
  16. 不存在历史已经发布的文章的匹配流程
  17. """
  18. TASK_INIT_STATUS = 0
  19. TASK_KIMI_FINISHED_STATUS = 1
  20. TASK_SPIDER_FINISHED_STATUS = 2
  21. TASK_ETL_FINISHED_STATUS = 3
  22. TASK_PUBLISHED_STATUS = 4
  23. TASK_PROCESSING_STATUS = 101
  24. TASK_FAIL_STATUS = 99
  25. ARTICLE_TEXT_TABLE_ERROR = 98
  26. TASK_MAX_PROCESS_TIMES = 3
  27. def __init__(self, mysql_client):
  28. self.mysql_client = mysql_client
  29. self.config = Config()
  30. self.article_match_video_table = self.config.article_match_video_table
  31. self.article_text_table = self.config.article_text_table
  32. self.article_crawler_video_table = self.config.article_crawler_video_table
  33. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  34. self.account_map = json.loads(self.config.get_config_value("accountMap"))
  35. self.spider_coroutines = self.config.get_config_value("spiderCoroutines")
  36. async def get_tasks(self):
  37. """
  38. 获取 task
  39. :return:
  40. """
  41. # 获取 content_status 为 处理中 的任务,判断时间, 如果超过 1h 则,则将它改为 0, process_times + 1
  42. select_processing_sql = f"""
  43. SELECT
  44. trace_id, content_status_update_time, process_times
  45. FROM
  46. {self.article_match_video_table}
  47. WHERE
  48. content_status = {self.TASK_PROCESSING_STATUS}
  49. and process_times <= {self.TASK_MAX_PROCESS_TIMES};
  50. """
  51. processing_articles = await self.mysql_client.async_select(select_processing_sql)
  52. if processing_articles:
  53. processing_list = [
  54. {
  55. "trace_id": item[0],
  56. "content_status_update_time": item[1],
  57. "process_times": item[2]
  58. }
  59. for item in processing_articles
  60. ]
  61. for obj in processing_list:
  62. if int(time.time()) - obj['content_status_update_time'] >= 3600:
  63. # 认为该任务失败
  64. await self.roll_back_content_status_when_fails(
  65. process_times=obj['process_times'] + 1,
  66. trace_id=obj['trace_id']
  67. )
  68. # 将 process_times > 3 且状态不为 4 的任务的状态修改为失败,
  69. update_status_sql = f"""
  70. UPDATE
  71. {self.article_match_video_table}
  72. SET
  73. content_status = %s
  74. WHERE
  75. process_times > %s and content_status != %s;
  76. """
  77. await self.mysql_client.async_insert(
  78. update_status_sql,
  79. params=(
  80. self.TASK_FAIL_STATUS,
  81. self.TASK_MAX_PROCESS_TIMES,
  82. self.TASK_PUBLISHED_STATUS
  83. )
  84. )
  85. # 获取 process_times <= 3 且 content_status = 0 的任务
  86. select_sql = f"""
  87. SELECT
  88. trace_id, content_id, flow_pool_level, gh_id, process_times
  89. FROM
  90. {self.article_match_video_table}
  91. WHERE
  92. content_status = {self.TASK_INIT_STATUS}
  93. and process_times <= {self.TASK_MAX_PROCESS_TIMES}
  94. LIMIT {self.spider_coroutines};
  95. """
  96. tasks = await self.mysql_client.async_select(select_sql)
  97. if tasks:
  98. return [
  99. {
  100. "trace_id": i[0],
  101. "content_id": i[1],
  102. "flow_pool_level": i[2],
  103. "gh_id": i[3],
  104. "process_times": i[4]
  105. }
  106. for i in tasks
  107. ]
  108. else:
  109. return []
  110. async def get_video_list(self, content_id):
  111. """
  112. 判断该文章是否存在历史匹配视频
  113. :param content_id
  114. :return:
  115. """
  116. sql = f"""
  117. SELECT id
  118. FROM {self.article_crawler_video_table}
  119. WHERE content_id = '{content_id}' and download_status = 2;
  120. """
  121. res_tuple = await self.mysql_client.async_select(sql)
  122. if len(res_tuple) >= 3:
  123. return True
  124. else:
  125. return False
  126. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  127. """
  128. :param new_content_status:
  129. :param trace_id:
  130. :param ori_content_status:
  131. :return:
  132. """
  133. update_sql = f"""
  134. UPDATE {self.article_match_video_table}
  135. SET content_status = %s, content_status_update_time = %s
  136. WHERE trace_id = %s and content_status = %s;
  137. """
  138. row_counts = await self.mysql_client.async_insert(
  139. sql=update_sql,
  140. params=(
  141. new_content_status,
  142. int(time.time()),
  143. trace_id,
  144. ori_content_status
  145. )
  146. )
  147. return row_counts
  148. async def roll_back_content_status_when_fails(self, process_times, trace_id):
  149. """
  150. 处理失败,回滚至初始状态,处理次数加 1
  151. :param process_times:
  152. :param trace_id:
  153. :return:
  154. """
  155. update_article_sql = f"""
  156. UPDATE {self.article_match_video_table}
  157. SET
  158. content_status = %s,
  159. content_status_update_time = %s,
  160. process_times = %s
  161. WHERE trace_id = %s and content_status = %s;
  162. """
  163. await self.mysql_client.async_insert(
  164. sql=update_article_sql,
  165. params=(
  166. self.TASK_INIT_STATUS,
  167. int(time.time()),
  168. process_times + 1,
  169. trace_id,
  170. self.TASK_PROCESSING_STATUS
  171. )
  172. )
  173. async def judge_whether_same_content_id_is_processing(self, content_id):
  174. """
  175. 同一个 content_id只需要处理一次
  176. :param content_id:
  177. :return:
  178. success: 4
  179. init: 0
  180. fail: 99
  181. """
  182. select_sql = f"""
  183. SELECT distinct content_status
  184. FROM {self.article_match_video_table}
  185. WHERE content_id = '{content_id}';
  186. """
  187. result = await self.mysql_client.async_select(select_sql)
  188. if result:
  189. for item in result:
  190. content_status = item[0]
  191. # if content_status not in {self.TASK_INIT_STATUS, self.TASK_PUBLISHED_STATUS} :
  192. if content_status in {
  193. self.TASK_KIMI_FINISHED_STATUS,
  194. self.TASK_SPIDER_FINISHED_STATUS,
  195. self.TASK_ETL_FINISHED_STATUS,
  196. self.TASK_PROCESSING_STATUS
  197. }:
  198. return True
  199. return False
  200. else:
  201. return False
  202. async def get_downloaded_videos(self, content_id):
  203. """
  204. 获取已下载的视频
  205. :return:
  206. """
  207. sql = f"""
  208. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  209. FROM {self.article_crawler_video_table}
  210. WHERE content_id = '{content_id}' and download_status = 2
  211. ORDER BY score DESC;
  212. """
  213. res_tuple = await self.mysql_client.async_select(sql)
  214. return [
  215. {
  216. "platform": i[0],
  217. "play_count": i[1],
  218. "like_count": i[2],
  219. "video_oss_path": i[3],
  220. "cover_oss_path": i[4],
  221. "uid": i[5]
  222. }
  223. for i in res_tuple
  224. ]
  225. async def get_kimi_status(self, content_id):
  226. """
  227. 通过 content_id 获取kimi info
  228. :return:
  229. """
  230. select_sql = f"""
  231. select kimi_status
  232. from {self.article_text_table}
  233. where content_id = '{content_id}';
  234. """
  235. response = await self.mysql_client.async_select(select_sql)
  236. if response:
  237. kimi_status = response[0][0]
  238. return kimi_status
  239. else:
  240. return self.ARTICLE_TEXT_TABLE_ERROR
  241. async def kimi_task(self, params):
  242. """
  243. 执行 kimi 任务
  244. :return:
  245. """
  246. KIMI_SUCCESS_STATUS = 1
  247. KIMI_FAIL_STATUS = 2
  248. content_id = params['content_id']
  249. trace_id = params['trace_id']
  250. process_times = params['process_times']
  251. kimi_status_code = await self.get_kimi_status(content_id=content_id)
  252. if kimi_status_code == KIMI_SUCCESS_STATUS:
  253. affected_rows = await self.update_content_status(
  254. new_content_status=self.TASK_KIMI_FINISHED_STATUS,
  255. trace_id=trace_id,
  256. ori_content_status=self.TASK_INIT_STATUS
  257. )
  258. if affected_rows == 0:
  259. logging(
  260. code="6000",
  261. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  262. )
  263. return
  264. get_kimi_sql = f"""
  265. SELECT article_title, kimi_title, kimi_summary, kimi_keys
  266. FROM {self.article_text_table}
  267. WHERE content_id = '{content_id}';
  268. """
  269. kimi_info = await self.mysql_client.async_select(get_kimi_sql)
  270. return {
  271. "kimi_title": kimi_info[0][1],
  272. "ori_title": kimi_info[0][0],
  273. "kimi_summary": kimi_info[0][2],
  274. "kimi_keys": json.loads(kimi_info[0][3])
  275. }
  276. elif kimi_status_code == self.ARTICLE_TEXT_TABLE_ERROR:
  277. logging(
  278. code="4000",
  279. info="long_articles_text表中未找到 content_id"
  280. )
  281. else:
  282. # 开始处理,讲 content_status 从 0 改为 101
  283. affected_rows = await self.update_content_status(
  284. new_content_status=self.TASK_PROCESSING_STATUS,
  285. trace_id=trace_id,
  286. ori_content_status=self.TASK_INIT_STATUS
  287. )
  288. if affected_rows == 0:
  289. logging(
  290. code="6000",
  291. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  292. )
  293. return
  294. K = KimiServer()
  295. try:
  296. select_sql = f"""
  297. select article_title, article_text
  298. from {self.article_text_table}
  299. where content_id = '{content_id}'
  300. """
  301. res = await self.mysql_client.async_select(select_sql)
  302. article_obj = {
  303. "article_title": res[0][0],
  304. "article_text": res[0][1],
  305. "content_id": content_id
  306. }
  307. kimi_info = await K.search_kimi_schedule(params=article_obj)
  308. kimi_title = kimi_info['k_title']
  309. content_title = kimi_info['content_title'].replace("'", "").replace('"', "")
  310. content_keys = json.dumps(kimi_info['content_keys'], ensure_ascii=False)
  311. update_kimi_sql = f"""
  312. UPDATE {self.article_text_table}
  313. SET
  314. kimi_title = %s,
  315. kimi_summary = %s,
  316. kimi_keys = %s,
  317. kimi_status = %s
  318. WHERE content_id = %s;"""
  319. await self.mysql_client.async_insert(
  320. sql=update_kimi_sql,
  321. params=(kimi_title, content_title, content_keys, KIMI_SUCCESS_STATUS, params['content_id'])
  322. )
  323. await self.update_content_status(
  324. new_content_status=self.TASK_KIMI_FINISHED_STATUS,
  325. trace_id=trace_id,
  326. ori_content_status=self.TASK_PROCESSING_STATUS
  327. )
  328. return {
  329. "kimi_title": kimi_title,
  330. "ori_title": article_obj['article_title'],
  331. "kimi_summary": content_title,
  332. "kimi_keys": kimi_info['content_keys']
  333. }
  334. except Exception as e:
  335. # kimi 任务处理失败
  336. update_kimi_sql = f"""
  337. UPDATE {self.article_text_table}
  338. SET
  339. kimi_status = %s
  340. WHERE content_id = %s
  341. """
  342. await self.mysql_client.async_insert(
  343. sql=update_kimi_sql,
  344. params=(
  345. KIMI_FAIL_STATUS,
  346. content_id
  347. )
  348. )
  349. # 将状态由 101 回退为 0
  350. await self.roll_back_content_status_when_fails(
  351. process_times=process_times,
  352. trace_id=trace_id
  353. )
  354. return {}
  355. async def spider_task(self, params, kimi_result):
  356. """
  357. 爬虫任务
  358. :return:
  359. """
  360. SPIDER_INIT_STATUS = 1
  361. trace_id = params['trace_id']
  362. content_id = params['content_id']
  363. process_times = params['process_times']
  364. gh_id = params['gh_id']
  365. select_sql = f"""
  366. select count(id) from {self.article_crawler_video_table} where content_id = '{content_id}';
  367. """
  368. count_tuple = await self.mysql_client.async_select(select_sql)
  369. counts = count_tuple[0][0]
  370. if counts >= 3:
  371. await self.update_content_status(
  372. new_content_status=self.TASK_SPIDER_FINISHED_STATUS,
  373. trace_id=trace_id,
  374. ori_content_status=SPIDER_INIT_STATUS
  375. )
  376. return True
  377. # 开始处理,将状态由 1 改成 101
  378. affected_rows = await self.update_content_status(
  379. new_content_status=self.TASK_PROCESSING_STATUS,
  380. ori_content_status=SPIDER_INIT_STATUS,
  381. trace_id=trace_id
  382. )
  383. if affected_rows == 0:
  384. logging(
  385. code="6000",
  386. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  387. )
  388. return False
  389. try:
  390. search_videos_count = await search_videos_from_web(
  391. info={
  392. "ori_title": kimi_result['ori_title'],
  393. "kimi_summary": kimi_result['kimi_summary'],
  394. "kimi_keys": kimi_result['kimi_keys'],
  395. "trace_id": trace_id,
  396. "gh_id": gh_id,
  397. "content_id": content_id,
  398. "crawler_video_table": self.article_crawler_video_table
  399. },
  400. gh_id_map=self.account_map,
  401. db_client=self.mysql_client
  402. )
  403. if search_videos_count >= 3:
  404. # 表示爬虫任务执行成功, 将状态从 101 改为 2
  405. await self.update_content_status(
  406. new_content_status=self.TASK_SPIDER_FINISHED_STATUS,
  407. trace_id=trace_id,
  408. ori_content_status=self.TASK_PROCESSING_STATUS
  409. )
  410. return True
  411. else:
  412. await self.roll_back_content_status_when_fails(
  413. process_times=process_times + 1,
  414. trace_id=trace_id
  415. )
  416. return False
  417. except Exception as e:
  418. await self.roll_back_content_status_when_fails(
  419. process_times=process_times + 1,
  420. trace_id=trace_id
  421. )
  422. print("爬虫处理失败: {}".format(e))
  423. return False
  424. async def etl_task(self, params):
  425. """
  426. download && upload videos
  427. :param params:
  428. :return:
  429. """
  430. VIDEO_DOWNLOAD_SUCCESS_STATUS = 2
  431. VIDEO_DOWNLOAD_FAIL_STATUS = 3
  432. ETL_TASK_INIT_STATUS = 2
  433. trace_id = params['trace_id']
  434. content_id = params['content_id']
  435. # 判断是否有三条已经下载完成的视频
  436. select_sql = f"""
  437. select count(id)
  438. from {self.article_crawler_video_table}
  439. where content_id = '{content_id}' and download_status = {VIDEO_DOWNLOAD_SUCCESS_STATUS};
  440. """
  441. video_count_tuple = await self.mysql_client.async_select(select_sql)
  442. video_count = video_count_tuple[0][0]
  443. if video_count >= 3:
  444. affect_rows = await self.update_content_status(
  445. ori_content_status=ETL_TASK_INIT_STATUS,
  446. trace_id=trace_id,
  447. new_content_status=self.TASK_ETL_FINISHED_STATUS
  448. )
  449. if affect_rows == 0:
  450. logging(
  451. code="6000",
  452. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  453. )
  454. return False
  455. return True
  456. else:
  457. # 开始处理, 将文章状态修改为处理状态
  458. affected_rows = await self.update_content_status(
  459. ori_content_status=ETL_TASK_INIT_STATUS,
  460. trace_id=trace_id,
  461. new_content_status=self.TASK_PROCESSING_STATUS
  462. )
  463. if affected_rows == 0:
  464. logging(
  465. code="6000",
  466. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  467. )
  468. return False
  469. select_sql = f"""
  470. SELECT id, out_video_id, platform, video_title, video_url, cover_url, user_id, trace_id
  471. FROM {self.article_crawler_video_table}
  472. WHERE content_id = '{content_id}' and download_status != {VIDEO_DOWNLOAD_SUCCESS_STATUS}
  473. ORDER BY score DESC;
  474. """
  475. videos_need_to_download_tuple = await self.mysql_client.async_select(select_sql)
  476. downloaded_count = 0
  477. for line in videos_need_to_download_tuple:
  478. params = {
  479. "id": line[0],
  480. "video_id": line[1],
  481. "platform": line[2],
  482. "video_title": line[3],
  483. "video_url": line[4],
  484. "cover_url": line[5],
  485. "user_id": line[6],
  486. "trace_id": line[7]
  487. }
  488. try:
  489. local_video_path, local_cover_path = generate_video_path(params['platform'], params['video_id'])
  490. # download videos
  491. file_path = await download_video(
  492. file_path=local_video_path,
  493. platform=params['platform'],
  494. video_url=params['video_url']
  495. )
  496. # download cover
  497. cover_path = await download_cover(
  498. file_path=local_cover_path,
  499. platform=params['platform'],
  500. cover_url=params['cover_url']
  501. )
  502. oss_video = await upload_to_oss(
  503. local_video_path=file_path,
  504. download_type="video"
  505. )
  506. if cover_path:
  507. oss_cover = await upload_to_oss(
  508. local_video_path=cover_path,
  509. download_type="image"
  510. )
  511. else:
  512. oss_cover = None
  513. update_sql = f"""
  514. UPDATE {self.article_crawler_video_table}
  515. SET video_oss_path = %s, cover_oss_path = %s, download_status = %s
  516. WHERE id = %s;
  517. """
  518. await self.mysql_client.async_insert(
  519. sql=update_sql,
  520. params=(
  521. oss_video,
  522. oss_cover,
  523. VIDEO_DOWNLOAD_SUCCESS_STATUS,
  524. params['id']
  525. )
  526. )
  527. downloaded_count += 1
  528. if downloaded_count > 3:
  529. await self.update_content_status(
  530. ori_content_status=self.TASK_PROCESSING_STATUS,
  531. trace_id=trace_id,
  532. new_content_status=self.TASK_ETL_FINISHED_STATUS
  533. )
  534. return True
  535. except Exception as e:
  536. update_sql = f"""
  537. UPDATE {self.article_crawler_video_table}
  538. SET download_status = %s
  539. WHERE id = %s;
  540. """
  541. await self.mysql_client.async_insert(
  542. sql=update_sql,
  543. params=(VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  544. )
  545. if downloaded_count >= 3:
  546. await self.update_content_status(
  547. ori_content_status=self.TASK_PROCESSING_STATUS,
  548. trace_id=trace_id,
  549. new_content_status=self.TASK_ETL_FINISHED_STATUS
  550. )
  551. return True
  552. else:
  553. await self.roll_back_content_status_when_fails(
  554. process_times=params['process_times'] + 1,
  555. trace_id=params['trace_id']
  556. )
  557. return False
  558. async def publish_task(self, params, kimi_title):
  559. """
  560. 发布任务
  561. :param kimi_title:
  562. :param params:
  563. :return:
  564. """
  565. PUBLISH_DEFAULT_STATUS = 3
  566. gh_id = params['gh_id']
  567. flow_pool_level = params['flow_pool_level']
  568. content_id = params['content_id']
  569. trace_id = params['trace_id']
  570. process_times = params['process_times']
  571. # 开始处理,将状态修改为操作状态
  572. affected_rows = await self.update_content_status(
  573. ori_content_status=PUBLISH_DEFAULT_STATUS,
  574. trace_id=trace_id,
  575. new_content_status=self.TASK_PROCESSING_STATUS
  576. )
  577. if affected_rows == 0:
  578. logging(
  579. code="6000",
  580. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  581. )
  582. return False
  583. try:
  584. download_videos = await self.get_downloaded_videos(content_id)
  585. match flow_pool_level:
  586. case "autoArticlePoolLevel4":
  587. # 冷启层, 全量做
  588. video_list = shuffle_list(download_videos)[:3]
  589. case "autoArticlePoolLevel3":
  590. if self.gh_id_dict.get(gh_id):
  591. video_list = shuffle_list(download_videos)[:3]
  592. else:
  593. video_list = download_videos[:3]
  594. case "autoArticlePoolLevel2":
  595. # 次条,只针对具体账号做
  596. video_list = []
  597. case "autoArticlePoolLevel1":
  598. # 头条,先不做
  599. video_list = download_videos[:3]
  600. case _:
  601. video_list = download_videos[:3]
  602. L = []
  603. for video_obj in video_list:
  604. params = {
  605. "videoPath": video_obj['video_oss_path'],
  606. "uid": video_obj['uid'],
  607. "title": kimi_title
  608. }
  609. publish_response = await publish_to_pq(params)
  610. video_id = publish_response['data']['id']
  611. response = await get_pq_video_detail(video_id)
  612. obj = {
  613. "uid": video_obj['uid'],
  614. "source": video_obj['platform'],
  615. "kimiTitle": kimi_title,
  616. "videoId": response['data'][0]['id'],
  617. "videoCover": response['data'][0]['shareImgPath'],
  618. "videoPath": response['data'][0]['videoPath'],
  619. "videoOss": video_obj['video_oss_path']
  620. }
  621. L.append(obj)
  622. update_sql = f"""
  623. UPDATE {self.article_match_video_table}
  624. SET content_status = %s, response = %s, process_times = %s
  625. WHERE trace_id = %s and content_status = %s;
  626. """
  627. # 从操作中状态修改为已发布状态
  628. await self.mysql_client.async_insert(
  629. sql=update_sql,
  630. params=(
  631. self.TASK_PUBLISHED_STATUS,
  632. json.dumps(L, ensure_ascii=False),
  633. process_times + 1,
  634. trace_id,
  635. self.TASK_PROCESSING_STATUS
  636. )
  637. )
  638. except Exception as e:
  639. await self.roll_back_content_status_when_fails(
  640. process_times=params['process_times'] + 1,
  641. trace_id=params['trace_id']
  642. )
  643. print(e)
  644. async def start_process(self, params):
  645. """
  646. 处理单篇文章
  647. :param params:
  648. :return:
  649. """
  650. # step1: 执行 kimi 操作
  651. # time.sleep(5) # 测试多个进程操作同一个 task 的等待时间
  652. kimi_result = await self.kimi_task(params)
  653. trace_id = params['trace_id']
  654. if kimi_result:
  655. # 等待 kimi 操作执行完成之后,开始执行 spider_task
  656. print("kimi success")
  657. logging(
  658. code=3001,
  659. info="kimi success",
  660. trace_id=trace_id
  661. )
  662. spider_flag = await self.spider_task(params=params, kimi_result=kimi_result)
  663. if spider_flag:
  664. # 等待爬虫执行完成后,开始执行 etl_task
  665. print("spider success")
  666. logging(
  667. code=3002,
  668. info="spider_success",
  669. trace_id=trace_id
  670. )
  671. etl_flag = await self.etl_task(params)
  672. if etl_flag:
  673. # 等待下载上传完成,执行发布任务
  674. print("etl success")
  675. logging(
  676. code="3003",
  677. info="etl_success",
  678. trace_id=trace_id
  679. )
  680. try:
  681. await self.publish_task(params, kimi_result['kimi_title'])
  682. logging(
  683. code="3004",
  684. info="publish_success",
  685. trace_id=trace_id
  686. )
  687. except Exception as e:
  688. logging(
  689. code="6004",
  690. info="publish 失败--{}".format(e),
  691. trace_id=params['trace_id']
  692. )
  693. else:
  694. logging(
  695. code="6003",
  696. info="ETL 处理失败",
  697. trace_id=params['trace_id']
  698. )
  699. else:
  700. logging(
  701. code="6002",
  702. info="爬虫处理失败",
  703. trace_id=params['trace_id']
  704. )
  705. else:
  706. logging(
  707. code="6001",
  708. info="kimi 处理失败",
  709. trace_id=params['trace_id']
  710. )
  711. async def process_task(self, params):
  712. """
  713. 处理任务
  714. :return:
  715. """
  716. content_id = params['content_id']
  717. download_videos = await self.get_video_list(content_id)
  718. if not download_videos:
  719. # 开始处理, 判断是否有相同的文章 id 正在处理
  720. processing_flag = await self.judge_whether_same_content_id_is_processing(content_id)
  721. if processing_flag:
  722. logging(
  723. code="9001",
  724. info="该 content id 正在处理中, 跳过此任务--{}".format(content_id)
  725. )
  726. else:
  727. await self.start_process(params=params)
  728. else:
  729. print("存在已下载视频")
  730. async def deal(self):
  731. """
  732. function
  733. :return:
  734. """
  735. task_list = await self.get_tasks()
  736. print(task_list)
  737. task_dict = {}
  738. # 对 content_id去重
  739. for task in task_list:
  740. key = task['content_id']
  741. task_dict[key] = task
  742. process_list = []
  743. for item in task_dict:
  744. process_list.append(task_dict[item])
  745. logging(
  746. code="5001",
  747. info="Match Task Got {} this time".format(len(process_list)),
  748. function="Publish Task"
  749. )
  750. if task_list:
  751. total_task = len(process_list)
  752. a = time.time()
  753. print("开始处理,一共{}个任务".format(total_task))
  754. tasks = [self.process_task(params) for params in process_list]
  755. await asyncio.gather(*tasks)
  756. b = time.time()
  757. print("处理时间: {} s".format(b - a))
  758. else:
  759. logging(
  760. code="9008",
  761. info="没有要处理的请求"
  762. )