newContentIdTask.py 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. import asyncio
  7. from applications.config import Config
  8. from applications.log import logging
  9. from applications.functions.pqFunctions import publish_to_pq, get_pq_video_detail
  10. from applications.functions.common import shuffle_list
  11. from applications.functions.kimi import KimiServer
  12. from applications.spider import search_videos_from_web
  13. from applications.etl_function import *
  14. from applications.feishu import bot
  15. class NewContentIdTask(object):
  16. """
  17. 不存在历史已经发布的文章的匹配流程
  18. """
  19. TASK_INIT_STATUS = 0
  20. TASK_KIMI_FINISHED_STATUS = 1
  21. TASK_SPIDER_FINISHED_STATUS = 2
  22. TASK_ETL_FINISHED_STATUS = 3
  23. TASK_PUBLISHED_STATUS = 4
  24. TASK_PROCESSING_STATUS = 101
  25. TASK_FAIL_STATUS = 99
  26. KIMI_ILLEGAL_STATUS = 95
  27. ARTICLE_TEXT_TABLE_ERROR = 98
  28. TASK_MAX_PROCESS_TIMES = 3
  29. def __init__(self, mysql_client):
  30. self.mysql_client = mysql_client
  31. self.config = Config()
  32. self.article_match_video_table = self.config.article_match_video_table
  33. self.article_text_table = self.config.article_text_table
  34. self.article_crawler_video_table = self.config.article_crawler_video_table
  35. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  36. self.account_map = json.loads(self.config.get_config_value("accountMap"))
  37. self.spider_coroutines = self.config.get_config_value("spiderCoroutines")
  38. async def get_tasks(self):
  39. """
  40. 获取 task
  41. :return:
  42. """
  43. # 获取 content_status 为 处理中 的任务,判断时间, 如果超过 1h 则,则将它改为 0, process_times + 1
  44. select_processing_sql = f"""
  45. SELECT
  46. trace_id, content_status_update_time, process_times
  47. FROM
  48. {self.article_match_video_table}
  49. WHERE
  50. content_status = {self.TASK_PROCESSING_STATUS}
  51. and process_times <= {self.TASK_MAX_PROCESS_TIMES};
  52. """
  53. processing_articles = await self.mysql_client.async_select(select_processing_sql)
  54. if processing_articles:
  55. processing_list = [
  56. {
  57. "trace_id": item[0],
  58. "content_status_update_time": item[1],
  59. "process_times": item[2]
  60. }
  61. for item in processing_articles
  62. ]
  63. for obj in processing_list:
  64. if int(time.time()) - obj['content_status_update_time'] >= 3600:
  65. # 认为该任务失败
  66. await self.roll_back_content_status_when_fails(
  67. process_times=obj['process_times'] + 1,
  68. trace_id=obj['trace_id']
  69. )
  70. # 将 process_times > 3 且状态不为 4 的任务的状态修改为失败,
  71. update_status_sql = f"""
  72. UPDATE
  73. {self.article_match_video_table}
  74. SET
  75. content_status = %s
  76. WHERE
  77. process_times > %s and content_status != %s;
  78. """
  79. await self.mysql_client.async_insert(
  80. update_status_sql,
  81. params=(
  82. self.TASK_FAIL_STATUS,
  83. self.TASK_MAX_PROCESS_TIMES,
  84. self.TASK_PUBLISHED_STATUS
  85. )
  86. )
  87. # 获取 process_times <= 3 且 content_status = 0 的任务
  88. select_sql = f"""
  89. SELECT
  90. trace_id, content_id, flow_pool_level, gh_id, process_times
  91. FROM
  92. {self.article_match_video_table}
  93. WHERE
  94. content_status = {self.TASK_INIT_STATUS}
  95. and process_times <= {self.TASK_MAX_PROCESS_TIMES}
  96. ORDER BY flow_pool_level, request_timestamp
  97. LIMIT {self.spider_coroutines};
  98. """
  99. tasks = await self.mysql_client.async_select(select_sql)
  100. if tasks:
  101. return [
  102. {
  103. "trace_id": i[0],
  104. "content_id": i[1],
  105. "flow_pool_level": i[2],
  106. "gh_id": i[3],
  107. "process_times": i[4]
  108. }
  109. for i in tasks
  110. ]
  111. else:
  112. return []
  113. async def get_video_list(self, content_id):
  114. """
  115. 判断该文章是否存在历史匹配视频
  116. :param content_id
  117. :return:
  118. """
  119. sql = f"""
  120. SELECT id
  121. FROM {self.article_crawler_video_table}
  122. WHERE content_id = '{content_id}' and download_status = 2;
  123. """
  124. res_tuple = await self.mysql_client.async_select(sql)
  125. if len(res_tuple) >= 3:
  126. return True
  127. else:
  128. return False
  129. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  130. """
  131. :param new_content_status:
  132. :param trace_id:
  133. :param ori_content_status:
  134. :return:
  135. """
  136. update_sql = f"""
  137. UPDATE {self.article_match_video_table}
  138. SET content_status = %s, content_status_update_time = %s
  139. WHERE trace_id = %s and content_status = %s;
  140. """
  141. row_counts = await self.mysql_client.async_insert(
  142. sql=update_sql,
  143. params=(
  144. new_content_status,
  145. int(time.time()),
  146. trace_id,
  147. ori_content_status
  148. )
  149. )
  150. return row_counts
  151. async def roll_back_content_status_when_fails(self, process_times, trace_id):
  152. """
  153. 处理失败,回滚至初始状态,处理次数加 1
  154. :param process_times:
  155. :param trace_id:
  156. :return:
  157. """
  158. update_article_sql = f"""
  159. UPDATE {self.article_match_video_table}
  160. SET
  161. content_status = %s,
  162. content_status_update_time = %s,
  163. process_times = %s
  164. WHERE trace_id = %s and content_status = %s;
  165. """
  166. await self.mysql_client.async_insert(
  167. sql=update_article_sql,
  168. params=(
  169. self.TASK_INIT_STATUS,
  170. int(time.time()),
  171. process_times + 1,
  172. trace_id,
  173. self.TASK_PROCESSING_STATUS
  174. )
  175. )
  176. async def judge_whether_same_content_id_is_processing(self, content_id):
  177. """
  178. 同一个 content_id 只需要处理一次
  179. :param content_id:
  180. :return:
  181. success: 4
  182. init: 0
  183. fail: 99
  184. """
  185. select_sql = f"""
  186. SELECT distinct content_status
  187. FROM {self.article_match_video_table}
  188. WHERE content_id = '{content_id}';
  189. """
  190. result = await self.mysql_client.async_select(select_sql)
  191. if result:
  192. for item in result:
  193. content_status = item[0]
  194. # if content_status not in {self.TASK_INIT_STATUS, self.TASK_PUBLISHED_STATUS} :
  195. if content_status in {
  196. self.TASK_KIMI_FINISHED_STATUS,
  197. self.TASK_SPIDER_FINISHED_STATUS,
  198. self.TASK_ETL_FINISHED_STATUS,
  199. self.TASK_PROCESSING_STATUS,
  200. self.TASK_PUBLISHED_STATUS
  201. }:
  202. return True
  203. return False
  204. else:
  205. return False
  206. async def get_downloaded_videos(self, content_id):
  207. """
  208. 获取已下载的视频
  209. :return:
  210. """
  211. sql = f"""
  212. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  213. FROM {self.article_crawler_video_table}
  214. WHERE content_id = '{content_id}' and download_status = 2
  215. ORDER BY score DESC;
  216. """
  217. res_tuple = await self.mysql_client.async_select(sql)
  218. return [
  219. {
  220. "platform": i[0],
  221. "play_count": i[1],
  222. "like_count": i[2],
  223. "video_oss_path": i[3],
  224. "cover_oss_path": i[4],
  225. "uid": i[5]
  226. }
  227. for i in res_tuple
  228. ]
  229. async def get_kimi_status(self, content_id):
  230. """
  231. 通过 content_id 获取kimi info
  232. :return:
  233. """
  234. select_sql = f"""
  235. select kimi_status
  236. from {self.article_text_table}
  237. where content_id = '{content_id}';
  238. """
  239. response = await self.mysql_client.async_select(select_sql)
  240. if response:
  241. kimi_status = response[0][0]
  242. return kimi_status
  243. else:
  244. return self.ARTICLE_TEXT_TABLE_ERROR
  245. async def kimi_task(self, params):
  246. """
  247. 执行 kimi 任务
  248. :return:
  249. """
  250. KIMI_SUCCESS_STATUS = 1
  251. KIMI_FAIL_STATUS = 2
  252. content_id = params['content_id']
  253. trace_id = params['trace_id']
  254. process_times = params['process_times']
  255. kimi_status_code = await self.get_kimi_status(content_id=content_id)
  256. if kimi_status_code == KIMI_SUCCESS_STATUS:
  257. affected_rows = await self.update_content_status(
  258. new_content_status=self.TASK_KIMI_FINISHED_STATUS,
  259. trace_id=trace_id,
  260. ori_content_status=self.TASK_INIT_STATUS
  261. )
  262. if affected_rows == 0:
  263. logging(
  264. code="6000",
  265. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  266. )
  267. return
  268. get_kimi_sql = f"""
  269. SELECT article_title, kimi_title, kimi_summary, kimi_keys
  270. FROM {self.article_text_table}
  271. WHERE content_id = '{content_id}';
  272. """
  273. kimi_info = await self.mysql_client.async_select(get_kimi_sql)
  274. return {
  275. "kimi_title": kimi_info[0][1],
  276. "ori_title": kimi_info[0][0],
  277. "kimi_summary": kimi_info[0][2],
  278. "kimi_keys": json.loads(kimi_info[0][3])
  279. }
  280. elif kimi_status_code == self.ARTICLE_TEXT_TABLE_ERROR:
  281. logging(
  282. code="4000",
  283. info="long_articles_text表中未找到 content_id"
  284. )
  285. else:
  286. # 开始处理,讲 content_status 从 0 改为 101
  287. affected_rows = await self.update_content_status(
  288. new_content_status=self.TASK_PROCESSING_STATUS,
  289. trace_id=trace_id,
  290. ori_content_status=self.TASK_INIT_STATUS
  291. )
  292. if affected_rows == 0:
  293. logging(
  294. code="6000",
  295. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  296. )
  297. return
  298. K = KimiServer()
  299. try:
  300. select_sql = f"""
  301. select article_title, article_text
  302. from {self.article_text_table}
  303. where content_id = '{content_id}'
  304. """
  305. res = await self.mysql_client.async_select(select_sql)
  306. article_obj = {
  307. "article_title": res[0][0],
  308. "article_text": res[0][1],
  309. "content_id": content_id
  310. }
  311. kimi_info = await K.search_kimi_schedule(params=article_obj)
  312. kimi_title = kimi_info['k_title']
  313. content_title = kimi_info['content_title'].replace("'", "").replace('"', "")
  314. content_keys = json.dumps(kimi_info['content_keys'], ensure_ascii=False)
  315. update_kimi_sql = f"""
  316. UPDATE {self.article_text_table}
  317. SET
  318. kimi_title = %s,
  319. kimi_summary = %s,
  320. kimi_keys = %s,
  321. kimi_status = %s
  322. WHERE content_id = %s;"""
  323. await self.mysql_client.async_insert(
  324. sql=update_kimi_sql,
  325. params=(kimi_title, content_title, content_keys, KIMI_SUCCESS_STATUS, params['content_id'])
  326. )
  327. await self.update_content_status(
  328. new_content_status=self.TASK_KIMI_FINISHED_STATUS,
  329. trace_id=trace_id,
  330. ori_content_status=self.TASK_PROCESSING_STATUS
  331. )
  332. return {
  333. "kimi_title": kimi_title,
  334. "ori_title": article_obj['article_title'],
  335. "kimi_summary": content_title,
  336. "kimi_keys": kimi_info['content_keys']
  337. }
  338. except Exception as e:
  339. # kimi 任务处理失败
  340. update_kimi_sql = f"""
  341. UPDATE {self.article_text_table}
  342. SET
  343. kimi_status = %s
  344. WHERE content_id = %s
  345. """
  346. await self.mysql_client.async_insert(
  347. sql=update_kimi_sql,
  348. params=(
  349. KIMI_FAIL_STATUS,
  350. content_id
  351. )
  352. )
  353. # 将状态由 101 回退为 0
  354. await self.roll_back_content_status_when_fails(
  355. process_times=process_times,
  356. trace_id=trace_id
  357. )
  358. return {}
  359. async def spider_task(self, params, kimi_result):
  360. """
  361. 爬虫任务
  362. :return:
  363. """
  364. SPIDER_INIT_STATUS = 1
  365. DOWNLOAD_SUCCESS_STATUS = 2
  366. trace_id = params['trace_id']
  367. content_id = params['content_id']
  368. process_times = params['process_times']
  369. gh_id = params['gh_id']
  370. select_sql = f"""
  371. select count(id)
  372. from {self.article_crawler_video_table}
  373. where content_id = '{content_id}'
  374. and download_status = {DOWNLOAD_SUCCESS_STATUS};
  375. """
  376. count_tuple = await self.mysql_client.async_select(select_sql)
  377. counts = count_tuple[0][0]
  378. if counts >= 3:
  379. await self.update_content_status(
  380. new_content_status=self.TASK_SPIDER_FINISHED_STATUS,
  381. trace_id=trace_id,
  382. ori_content_status=SPIDER_INIT_STATUS
  383. )
  384. return True
  385. # 开始处理,将状态由 1 改成 101
  386. affected_rows = await self.update_content_status(
  387. new_content_status=self.TASK_PROCESSING_STATUS,
  388. ori_content_status=SPIDER_INIT_STATUS,
  389. trace_id=trace_id
  390. )
  391. if affected_rows == 0:
  392. logging(
  393. code="6000",
  394. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  395. )
  396. return False
  397. try:
  398. search_videos_count = await search_videos_from_web(
  399. info={
  400. "ori_title": kimi_result['ori_title'],
  401. "kimi_summary": kimi_result['kimi_summary'],
  402. "kimi_keys": kimi_result['kimi_keys'],
  403. "trace_id": trace_id,
  404. "gh_id": gh_id,
  405. "content_id": content_id,
  406. "crawler_video_table": self.article_crawler_video_table
  407. },
  408. gh_id_map=self.account_map,
  409. db_client=self.mysql_client
  410. )
  411. if search_videos_count >= 3:
  412. # 表示爬虫任务执行成功, 将状态从 101 改为 2
  413. await self.update_content_status(
  414. new_content_status=self.TASK_SPIDER_FINISHED_STATUS,
  415. trace_id=trace_id,
  416. ori_content_status=self.TASK_PROCESSING_STATUS
  417. )
  418. return True
  419. else:
  420. await self.roll_back_content_status_when_fails(
  421. process_times=process_times + 1,
  422. trace_id=trace_id
  423. )
  424. return False
  425. except Exception as e:
  426. await self.roll_back_content_status_when_fails(
  427. process_times=process_times + 1,
  428. trace_id=trace_id
  429. )
  430. print("爬虫处理失败: {}".format(e))
  431. return False
  432. async def etl_task(self, params):
  433. """
  434. download && upload videos
  435. :param params:
  436. :return:
  437. """
  438. VIDEO_DOWNLOAD_SUCCESS_STATUS = 2
  439. VIDEO_DOWNLOAD_FAIL_STATUS = 3
  440. ETL_TASK_INIT_STATUS = 2
  441. trace_id = params['trace_id']
  442. content_id = params['content_id']
  443. process_times = params['process_times']
  444. # 判断是否有三条已经下载完成的视频
  445. select_sql = f"""
  446. select count(id)
  447. from {self.article_crawler_video_table}
  448. where content_id = '{content_id}' and download_status = {VIDEO_DOWNLOAD_SUCCESS_STATUS};
  449. """
  450. video_count_tuple = await self.mysql_client.async_select(select_sql)
  451. video_count = video_count_tuple[0][0]
  452. if video_count >= 3:
  453. affect_rows = await self.update_content_status(
  454. ori_content_status=ETL_TASK_INIT_STATUS,
  455. trace_id=trace_id,
  456. new_content_status=self.TASK_ETL_FINISHED_STATUS
  457. )
  458. if affect_rows == 0:
  459. logging(
  460. code="6000",
  461. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  462. )
  463. return False
  464. return True
  465. else:
  466. # 开始处理, 将文章状态修改为处理状态
  467. affected_rows = await self.update_content_status(
  468. ori_content_status=ETL_TASK_INIT_STATUS,
  469. trace_id=trace_id,
  470. new_content_status=self.TASK_PROCESSING_STATUS
  471. )
  472. if affected_rows == 0:
  473. logging(
  474. code="6000",
  475. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  476. )
  477. return False
  478. select_sql = f"""
  479. SELECT id, out_video_id, platform, video_title, video_url, cover_url, user_id, trace_id
  480. FROM {self.article_crawler_video_table}
  481. WHERE content_id = '{content_id}' and download_status != {VIDEO_DOWNLOAD_SUCCESS_STATUS}
  482. ORDER BY score DESC;
  483. """
  484. videos_need_to_download_tuple = await self.mysql_client.async_select(select_sql)
  485. downloaded_count = 0
  486. for line in videos_need_to_download_tuple:
  487. params = {
  488. "id": line[0],
  489. "video_id": line[1],
  490. "platform": line[2],
  491. "video_title": line[3],
  492. "video_url": line[4],
  493. "cover_url": line[5],
  494. "user_id": line[6],
  495. "trace_id": line[7]
  496. }
  497. try:
  498. local_video_path, local_cover_path = generate_video_path(params['platform'], params['video_id'])
  499. # download videos
  500. file_path = await download_video(
  501. file_path=local_video_path,
  502. platform=params['platform'],
  503. video_url=params['video_url']
  504. )
  505. # download cover
  506. cover_path = await download_cover(
  507. file_path=local_cover_path,
  508. platform=params['platform'],
  509. cover_url=params['cover_url']
  510. )
  511. oss_video = await upload_to_oss(
  512. local_video_path=file_path,
  513. download_type="video"
  514. )
  515. if cover_path:
  516. oss_cover = await upload_to_oss(
  517. local_video_path=cover_path,
  518. download_type="image"
  519. )
  520. else:
  521. oss_cover = None
  522. update_sql = f"""
  523. UPDATE {self.article_crawler_video_table}
  524. SET video_oss_path = %s, cover_oss_path = %s, download_status = %s
  525. WHERE id = %s;
  526. """
  527. await self.mysql_client.async_insert(
  528. sql=update_sql,
  529. params=(
  530. oss_video,
  531. oss_cover,
  532. VIDEO_DOWNLOAD_SUCCESS_STATUS,
  533. params['id']
  534. )
  535. )
  536. downloaded_count += 1
  537. if downloaded_count > 3:
  538. await self.update_content_status(
  539. ori_content_status=self.TASK_PROCESSING_STATUS,
  540. trace_id=trace_id,
  541. new_content_status=self.TASK_ETL_FINISHED_STATUS
  542. )
  543. return True
  544. except Exception as e:
  545. update_sql = f"""
  546. UPDATE {self.article_crawler_video_table}
  547. SET download_status = %s
  548. WHERE id = %s;
  549. """
  550. await self.mysql_client.async_insert(
  551. sql=update_sql,
  552. params=(VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  553. )
  554. if downloaded_count >= 3:
  555. await self.update_content_status(
  556. ori_content_status=self.TASK_PROCESSING_STATUS,
  557. trace_id=trace_id,
  558. new_content_status=self.TASK_ETL_FINISHED_STATUS
  559. )
  560. return True
  561. else:
  562. await self.roll_back_content_status_when_fails(
  563. process_times=process_times + 1,
  564. trace_id=trace_id
  565. )
  566. return False
  567. async def publish_task(self, params, kimi_title):
  568. """
  569. 发布任务
  570. :param kimi_title:
  571. :param params:
  572. :return:
  573. """
  574. PUBLISH_DEFAULT_STATUS = 3
  575. gh_id = params['gh_id']
  576. flow_pool_level = params['flow_pool_level']
  577. content_id = params['content_id']
  578. trace_id = params['trace_id']
  579. process_times = params['process_times']
  580. # 开始处理,将状态修改为操作状态
  581. affected_rows = await self.update_content_status(
  582. ori_content_status=PUBLISH_DEFAULT_STATUS,
  583. trace_id=trace_id,
  584. new_content_status=self.TASK_PROCESSING_STATUS
  585. )
  586. if affected_rows == 0:
  587. logging(
  588. code="6000",
  589. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  590. )
  591. return False
  592. try:
  593. download_videos = await self.get_downloaded_videos(content_id)
  594. match flow_pool_level:
  595. case "autoArticlePoolLevel4":
  596. # 冷启层, 全量做
  597. video_list = shuffle_list(download_videos)[:3]
  598. case "autoArticlePoolLevel3":
  599. if self.gh_id_dict.get(gh_id):
  600. video_list = shuffle_list(download_videos)[:3]
  601. else:
  602. video_list = download_videos[:3]
  603. case "autoArticlePoolLevel2":
  604. # 次条,只针对具体账号做
  605. video_list = []
  606. case "autoArticlePoolLevel1":
  607. # 头条,先不做
  608. video_list = download_videos[:3]
  609. case _:
  610. video_list = download_videos[:3]
  611. L = []
  612. for video_obj in video_list:
  613. params = {
  614. "videoPath": video_obj['video_oss_path'],
  615. "uid": video_obj['uid'],
  616. "title": kimi_title
  617. }
  618. publish_response = await publish_to_pq(params)
  619. video_id = publish_response['data']['id']
  620. response = await get_pq_video_detail(video_id)
  621. obj = {
  622. "uid": video_obj['uid'],
  623. "source": video_obj['platform'],
  624. "kimiTitle": kimi_title,
  625. "videoId": response['data'][0]['id'],
  626. "videoCover": response['data'][0]['shareImgPath'],
  627. "videoPath": response['data'][0]['videoPath'],
  628. "videoOss": video_obj['video_oss_path']
  629. }
  630. L.append(obj)
  631. update_sql = f"""
  632. UPDATE {self.article_match_video_table}
  633. SET content_status = %s, response = %s, process_times = %s
  634. WHERE trace_id = %s and content_status = %s;
  635. """
  636. # 从操作中状态修改为已发布状态
  637. await self.mysql_client.async_insert(
  638. sql=update_sql,
  639. params=(
  640. self.TASK_PUBLISHED_STATUS,
  641. json.dumps(L, ensure_ascii=False),
  642. process_times + 1,
  643. trace_id,
  644. self.TASK_PROCESSING_STATUS
  645. )
  646. )
  647. except Exception as e:
  648. await self.roll_back_content_status_when_fails(
  649. process_times=params['process_times'] + 1,
  650. trace_id=params['trace_id']
  651. )
  652. print(e)
  653. async def start_process(self, params):
  654. """
  655. 处理单篇文章
  656. :param params:
  657. :return:
  658. """
  659. # step1: 执行 kimi 操作
  660. # time.sleep(5) # 测试多个进程操作同一个 task 的等待时间
  661. kimi_result = await self.kimi_task(params)
  662. trace_id = params['trace_id']
  663. process_times = params['process_times']
  664. content_id = params['content_id']
  665. print(kimi_result)
  666. if kimi_result:
  667. # 等待 kimi 操作执行完成之后,开始执行 spider_task
  668. print("kimi success")
  669. logging(
  670. code=3001,
  671. info="kimi success",
  672. trace_id=trace_id
  673. )
  674. spider_flag = await self.spider_task(params=params, kimi_result=kimi_result)
  675. if spider_flag:
  676. # 等待爬虫执行完成后,开始执行 etl_task
  677. print("spider success")
  678. logging(
  679. code=3002,
  680. info="spider_success",
  681. trace_id=trace_id
  682. )
  683. etl_flag = await self.etl_task(params)
  684. if etl_flag:
  685. # 等待下载上传完成,执行发布任务
  686. print("etl success")
  687. logging(
  688. code="3003",
  689. info="etl_success",
  690. trace_id=trace_id
  691. )
  692. try:
  693. await self.publish_task(params, kimi_result['kimi_title'])
  694. logging(
  695. code="3004",
  696. info="publish_success",
  697. trace_id=trace_id
  698. )
  699. except Exception as e:
  700. logging(
  701. code="6004",
  702. info="publish 失败--{}".format(e),
  703. trace_id=params['trace_id']
  704. )
  705. else:
  706. logging(
  707. code="6003",
  708. info="ETL 处理失败",
  709. trace_id=params['trace_id']
  710. )
  711. else:
  712. logging(
  713. code="6002",
  714. info="爬虫处理失败",
  715. trace_id=params['trace_id']
  716. )
  717. else:
  718. logging(
  719. code="6001",
  720. info="kimi 处理失败",
  721. trace_id=trace_id
  722. )
  723. if process_times >= self.TASK_MAX_PROCESS_TIMES:
  724. logging(
  725. code="6011",
  726. info="kimi处理次数达到上限, 放弃处理",
  727. trace_id=trace_id
  728. )
  729. # 将相同的content_id && content_status = 0的状态修改为kimi 失败状态
  730. update_sql = f"""
  731. UPDATE {self.article_match_video_table}
  732. SET content_status = %s
  733. WHERE content_id = %s and content_status = %s;
  734. """
  735. affected_rows = await self.mysql_client.async_insert(
  736. sql=update_sql,
  737. params=(
  738. self.KIMI_ILLEGAL_STATUS,
  739. content_id,
  740. self.TASK_INIT_STATUS
  741. )
  742. )
  743. bot(
  744. title="KIMI 处理失败",
  745. detail={
  746. "content_id": content_id,
  747. "affected_rows": affected_rows
  748. }
  749. )
  750. async def process_task(self, params):
  751. """
  752. 处理任务
  753. :return:
  754. """
  755. content_id = params['content_id']
  756. download_videos = await self.get_video_list(content_id)
  757. if not download_videos:
  758. # 开始处理, 判断是否有相同的文章 id 正在处理
  759. processing_flag = await self.judge_whether_same_content_id_is_processing(content_id)
  760. if processing_flag:
  761. logging(
  762. code="9001",
  763. info="该 content id 正在处理中, 跳过此任务--{}".format(content_id)
  764. )
  765. else:
  766. await self.start_process(params=params)
  767. else:
  768. print("存在已下载视频")
  769. async def deal(self):
  770. """
  771. function
  772. :return:
  773. """
  774. task_list = await self.get_tasks()
  775. task_dict = {}
  776. # 对 content_id去重
  777. for task in task_list:
  778. key = task['content_id']
  779. task_dict[key] = task
  780. process_list = []
  781. for item in task_dict:
  782. process_list.append(task_dict[item])
  783. logging(
  784. code="5001",
  785. info="Match Task Got {} this time".format(len(process_list)),
  786. function="Publish Task"
  787. )
  788. if task_list:
  789. total_task = len(process_list)
  790. print(process_list)
  791. a = time.time()
  792. print("开始处理,一共{}个任务".format(total_task))
  793. tasks = [self.process_task(params) for params in process_list]
  794. await asyncio.gather(*tasks)
  795. b = time.time()
  796. print("处理时间: {} s".format(b - a))
  797. else:
  798. logging(
  799. code="9008",
  800. info="没有要处理的请求"
  801. )