newContentIdTask.py 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. from applications.config import Config
  7. from applications.log import logging
  8. from applications.functions.pqFunctions import publish_to_pq, get_pq_video_detail
  9. from applications.functions.common import shuffle_list
  10. from applications.functions.kimi import KimiServer
  11. from applications.spider import search_videos_from_web
  12. from applications.etl_function import *
  13. from applications.feishu import bot
  14. from applications.functions.aigc import record_trace_id
  15. class NewContentIdTask(object):
  16. """
  17. 不存在历史已经发布的文章的匹配流程
  18. """
  19. TASK_INIT_STATUS = 0
  20. TASK_KIMI_FINISHED_STATUS = 1
  21. TASK_SPIDER_FINISHED_STATUS = 2
  22. TASK_ETL_FINISHED_STATUS = 3
  23. TASK_PUBLISHED_STATUS = 4
  24. TASK_PROCESSING_STATUS = 101
  25. TASK_FAIL_STATUS = 99
  26. KIMI_ILLEGAL_STATUS = 95
  27. ARTICLE_TEXT_TABLE_ERROR = 98
  28. TASK_MAX_PROCESS_TIMES = 3
  29. RECORD_SUCCESS_TRACE_ID_CODE = 2
  30. def __init__(self, mysql_client):
  31. self.mysql_client = mysql_client
  32. self.config = Config()
  33. self.article_match_video_table = self.config.article_match_video_table
  34. self.article_text_table = self.config.article_text_table
  35. self.article_crawler_video_table = self.config.article_crawler_video_table
  36. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  37. self.account_map = json.loads(self.config.get_config_value("accountMap"))
  38. self.spider_coroutines = self.config.get_config_value("spiderCoroutines")
  39. async def get_tasks(self):
  40. """
  41. 获取 task
  42. :return:
  43. """
  44. # 获取 content_status 为 处理中 的任务,判断时间, 如果超过 1h 则,则将它改为 0, process_times + 1
  45. select_processing_sql = f"""
  46. SELECT
  47. trace_id, content_status_update_time, process_times
  48. FROM
  49. {self.article_match_video_table}
  50. WHERE
  51. content_status = {self.TASK_PROCESSING_STATUS}
  52. and process_times <= {self.TASK_MAX_PROCESS_TIMES};
  53. """
  54. processing_articles = await self.mysql_client.async_select(select_processing_sql)
  55. if processing_articles:
  56. processing_list = [
  57. {
  58. "trace_id": item[0],
  59. "content_status_update_time": item[1],
  60. "process_times": item[2]
  61. }
  62. for item in processing_articles
  63. ]
  64. for obj in processing_list:
  65. if int(time.time()) - obj['content_status_update_time'] >= 3600:
  66. # 认为该任务失败
  67. await self.roll_back_content_status_when_fails(
  68. process_times=obj['process_times'] + 1,
  69. trace_id=obj['trace_id']
  70. )
  71. # 将 process_times > 3 且状态不为 4 的任务的状态修改为失败,
  72. update_status_sql = f"""
  73. UPDATE
  74. {self.article_match_video_table}
  75. SET
  76. content_status = %s
  77. WHERE
  78. process_times > %s and content_status != %s;
  79. """
  80. await self.mysql_client.async_insert(
  81. update_status_sql,
  82. params=(
  83. self.TASK_FAIL_STATUS,
  84. self.TASK_MAX_PROCESS_TIMES,
  85. self.TASK_PUBLISHED_STATUS
  86. )
  87. )
  88. # 获取 process_times <= 3 且 content_status = 0 的任务
  89. select_sql = f"""
  90. SELECT
  91. trace_id, content_id, flow_pool_level, gh_id, process_times
  92. FROM
  93. {self.article_match_video_table}
  94. WHERE
  95. content_status = {self.TASK_INIT_STATUS}
  96. and process_times <= {self.TASK_MAX_PROCESS_TIMES}
  97. ORDER BY flow_pool_level, request_timestamp
  98. LIMIT {self.spider_coroutines};
  99. """
  100. tasks = await self.mysql_client.async_select(select_sql)
  101. if tasks:
  102. return [
  103. {
  104. "trace_id": i[0],
  105. "content_id": i[1],
  106. "flow_pool_level": i[2],
  107. "gh_id": i[3],
  108. "process_times": i[4]
  109. }
  110. for i in tasks
  111. ]
  112. else:
  113. return []
  114. async def get_video_list(self, content_id):
  115. """
  116. 判断该文章是否存在历史匹配视频
  117. :param content_id
  118. :return:
  119. """
  120. sql = f"""
  121. SELECT id
  122. FROM {self.article_crawler_video_table}
  123. WHERE content_id = '{content_id}' and download_status = 2;
  124. """
  125. res_tuple = await self.mysql_client.async_select(sql)
  126. if len(res_tuple) >= 3:
  127. return True
  128. else:
  129. return False
  130. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  131. """
  132. :param new_content_status:
  133. :param trace_id:
  134. :param ori_content_status:
  135. :return:
  136. """
  137. update_sql = f"""
  138. UPDATE {self.article_match_video_table}
  139. SET content_status = %s, content_status_update_time = %s
  140. WHERE trace_id = %s and content_status = %s;
  141. """
  142. row_counts = await self.mysql_client.async_insert(
  143. sql=update_sql,
  144. params=(
  145. new_content_status,
  146. int(time.time()),
  147. trace_id,
  148. ori_content_status
  149. )
  150. )
  151. return row_counts
  152. async def roll_back_content_status_when_fails(self, process_times, trace_id):
  153. """
  154. 处理失败,回滚至初始状态,处理次数加 1
  155. :param process_times:
  156. :param trace_id:
  157. :return:
  158. """
  159. update_article_sql = f"""
  160. UPDATE {self.article_match_video_table}
  161. SET
  162. content_status = %s,
  163. content_status_update_time = %s,
  164. process_times = %s
  165. WHERE trace_id = %s and content_status = %s;
  166. """
  167. await self.mysql_client.async_insert(
  168. sql=update_article_sql,
  169. params=(
  170. self.TASK_INIT_STATUS,
  171. int(time.time()),
  172. process_times + 1,
  173. trace_id,
  174. self.TASK_PROCESSING_STATUS
  175. )
  176. )
  177. async def judge_whether_same_content_id_is_processing(self, content_id):
  178. """
  179. 同一个 content_id 只需要处理一次
  180. :param content_id:
  181. :return:
  182. success: 4
  183. init: 0
  184. fail: 99
  185. """
  186. select_sql = f"""
  187. SELECT distinct content_status
  188. FROM {self.article_match_video_table}
  189. WHERE content_id = '{content_id}';
  190. """
  191. result = await self.mysql_client.async_select(select_sql)
  192. if result:
  193. for item in result:
  194. content_status = item[0]
  195. # if content_status not in {self.TASK_INIT_STATUS, self.TASK_PUBLISHED_STATUS} :
  196. if content_status in {
  197. self.TASK_KIMI_FINISHED_STATUS,
  198. self.TASK_SPIDER_FINISHED_STATUS,
  199. self.TASK_ETL_FINISHED_STATUS,
  200. self.TASK_PROCESSING_STATUS,
  201. self.TASK_PUBLISHED_STATUS
  202. }:
  203. return True
  204. return False
  205. else:
  206. return False
  207. async def get_downloaded_videos(self, content_id):
  208. """
  209. 获取已下载的视频
  210. :return:
  211. """
  212. sql = f"""
  213. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  214. FROM {self.article_crawler_video_table}
  215. WHERE content_id = '{content_id}' and download_status = 2
  216. ORDER BY score DESC;
  217. """
  218. res_tuple = await self.mysql_client.async_select(sql)
  219. return [
  220. {
  221. "platform": i[0],
  222. "play_count": i[1],
  223. "like_count": i[2],
  224. "video_oss_path": i[3],
  225. "cover_oss_path": i[4],
  226. "uid": i[5]
  227. }
  228. for i in res_tuple
  229. ]
  230. async def get_kimi_status(self, content_id):
  231. """
  232. 通过 content_id 获取kimi info
  233. :return:
  234. """
  235. select_sql = f"""
  236. select kimi_status
  237. from {self.article_text_table}
  238. where content_id = '{content_id}';
  239. """
  240. response = await self.mysql_client.async_select(select_sql)
  241. if response:
  242. kimi_status = response[0][0]
  243. return kimi_status
  244. else:
  245. return self.ARTICLE_TEXT_TABLE_ERROR
  246. async def kimi_task(self, params):
  247. """
  248. 执行 kimi 任务
  249. :return:
  250. """
  251. KIMI_SUCCESS_STATUS = 1
  252. KIMI_FAIL_STATUS = 2
  253. content_id = params['content_id']
  254. trace_id = params['trace_id']
  255. process_times = params['process_times']
  256. kimi_status_code = await self.get_kimi_status(content_id=content_id)
  257. if kimi_status_code == KIMI_SUCCESS_STATUS:
  258. affected_rows = await self.update_content_status(
  259. new_content_status=self.TASK_KIMI_FINISHED_STATUS,
  260. trace_id=trace_id,
  261. ori_content_status=self.TASK_INIT_STATUS
  262. )
  263. if affected_rows == 0:
  264. logging(
  265. code="6000",
  266. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  267. )
  268. return
  269. get_kimi_sql = f"""
  270. SELECT article_title, kimi_title, kimi_summary, kimi_keys
  271. FROM {self.article_text_table}
  272. WHERE content_id = '{content_id}';
  273. """
  274. kimi_info = await self.mysql_client.async_select(get_kimi_sql)
  275. return {
  276. "kimi_title": kimi_info[0][1],
  277. "ori_title": kimi_info[0][0],
  278. "kimi_summary": kimi_info[0][2],
  279. "kimi_keys": json.loads(kimi_info[0][3])
  280. }
  281. elif kimi_status_code == self.ARTICLE_TEXT_TABLE_ERROR:
  282. logging(
  283. code="4000",
  284. info="long_articles_text表中未找到 content_id"
  285. )
  286. else:
  287. # 开始处理,讲 content_status 从 0 改为 101
  288. affected_rows = await self.update_content_status(
  289. new_content_status=self.TASK_PROCESSING_STATUS,
  290. trace_id=trace_id,
  291. ori_content_status=self.TASK_INIT_STATUS
  292. )
  293. if affected_rows == 0:
  294. logging(
  295. code="6000",
  296. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  297. )
  298. return
  299. K = KimiServer()
  300. try:
  301. select_sql = f"""
  302. select article_title, article_text
  303. from {self.article_text_table}
  304. where content_id = '{content_id}'
  305. """
  306. res = await self.mysql_client.async_select(select_sql)
  307. article_obj = {
  308. "article_title": res[0][0],
  309. "article_text": res[0][1],
  310. "content_id": content_id
  311. }
  312. kimi_info = await K.search_kimi_schedule(params=article_obj)
  313. kimi_title = kimi_info['k_title']
  314. content_title = kimi_info['content_title'].replace("'", "").replace('"', "")
  315. content_keys = json.dumps(kimi_info['content_keys'], ensure_ascii=False)
  316. update_kimi_sql = f"""
  317. UPDATE {self.article_text_table}
  318. SET
  319. kimi_title = %s,
  320. kimi_summary = %s,
  321. kimi_keys = %s,
  322. kimi_status = %s
  323. WHERE content_id = %s;"""
  324. await self.mysql_client.async_insert(
  325. sql=update_kimi_sql,
  326. params=(kimi_title, content_title, content_keys, KIMI_SUCCESS_STATUS, params['content_id'])
  327. )
  328. await self.update_content_status(
  329. new_content_status=self.TASK_KIMI_FINISHED_STATUS,
  330. trace_id=trace_id,
  331. ori_content_status=self.TASK_PROCESSING_STATUS
  332. )
  333. return {
  334. "kimi_title": kimi_title,
  335. "ori_title": article_obj['article_title'],
  336. "kimi_summary": content_title,
  337. "kimi_keys": kimi_info['content_keys']
  338. }
  339. except Exception as e:
  340. # kimi 任务处理失败
  341. update_kimi_sql = f"""
  342. UPDATE {self.article_text_table}
  343. SET
  344. kimi_status = %s
  345. WHERE content_id = %s
  346. """
  347. await self.mysql_client.async_insert(
  348. sql=update_kimi_sql,
  349. params=(
  350. KIMI_FAIL_STATUS,
  351. content_id
  352. )
  353. )
  354. # 将状态由 101 回退为 0
  355. await self.roll_back_content_status_when_fails(
  356. process_times=process_times,
  357. trace_id=trace_id
  358. )
  359. return {}
  360. async def spider_task(self, params, kimi_result):
  361. """
  362. 爬虫任务
  363. :return:
  364. """
  365. SPIDER_INIT_STATUS = 1
  366. DOWNLOAD_SUCCESS_STATUS = 2
  367. trace_id = params['trace_id']
  368. content_id = params['content_id']
  369. process_times = params['process_times']
  370. gh_id = params['gh_id']
  371. select_sql = f"""
  372. select count(id)
  373. from {self.article_crawler_video_table}
  374. where content_id = '{content_id}'
  375. and download_status = {DOWNLOAD_SUCCESS_STATUS};
  376. """
  377. count_tuple = await self.mysql_client.async_select(select_sql)
  378. counts = count_tuple[0][0]
  379. if counts >= 3:
  380. await self.update_content_status(
  381. new_content_status=self.TASK_SPIDER_FINISHED_STATUS,
  382. trace_id=trace_id,
  383. ori_content_status=SPIDER_INIT_STATUS
  384. )
  385. return True
  386. # 开始处理,将状态由 1 改成 101
  387. affected_rows = await self.update_content_status(
  388. new_content_status=self.TASK_PROCESSING_STATUS,
  389. ori_content_status=SPIDER_INIT_STATUS,
  390. trace_id=trace_id
  391. )
  392. if affected_rows == 0:
  393. logging(
  394. code="6000",
  395. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  396. )
  397. return False
  398. try:
  399. logging(
  400. code="spider_1001",
  401. info="开始执行搜索任务",
  402. trace_id=trace_id,
  403. data=kimi_result
  404. )
  405. search_videos_count = await search_videos_from_web(
  406. info={
  407. "ori_title": kimi_result['ori_title'],
  408. "kimi_summary": kimi_result['kimi_summary'],
  409. "kimi_keys": kimi_result['kimi_keys'],
  410. "trace_id": trace_id,
  411. "gh_id": gh_id,
  412. "content_id": content_id,
  413. "crawler_video_table": self.article_crawler_video_table
  414. },
  415. gh_id_map=self.account_map,
  416. db_client=self.mysql_client
  417. )
  418. if search_videos_count >= 3:
  419. # 表示爬虫任务执行成功, 将状态从 101 改为 2
  420. logging(
  421. code="spider_1002",
  422. info="搜索成功",
  423. trace_id=trace_id,
  424. data=kimi_result
  425. )
  426. await self.update_content_status(
  427. new_content_status=self.TASK_SPIDER_FINISHED_STATUS,
  428. trace_id=trace_id,
  429. ori_content_status=self.TASK_PROCESSING_STATUS
  430. )
  431. return True
  432. else:
  433. logging(
  434. code="spider_1003",
  435. info="搜索失败",
  436. trace_id=trace_id,
  437. data=kimi_result
  438. )
  439. await self.roll_back_content_status_when_fails(
  440. process_times=process_times + 1,
  441. trace_id=trace_id
  442. )
  443. return False
  444. except Exception as e:
  445. await self.roll_back_content_status_when_fails(
  446. process_times=process_times + 1,
  447. trace_id=trace_id
  448. )
  449. print("爬虫处理失败: {}".format(e))
  450. return False
  451. async def etl_task(self, params):
  452. """
  453. download && upload videos
  454. :param params:
  455. :return:
  456. """
  457. VIDEO_DOWNLOAD_SUCCESS_STATUS = 2
  458. VIDEO_DOWNLOAD_FAIL_STATUS = 3
  459. ETL_TASK_INIT_STATUS = 2
  460. trace_id = params['trace_id']
  461. content_id = params['content_id']
  462. process_times = params['process_times']
  463. # 判断是否有三条已经下载完成的视频
  464. select_sql = f"""
  465. select count(id)
  466. from {self.article_crawler_video_table}
  467. where content_id = '{content_id}' and download_status = {VIDEO_DOWNLOAD_SUCCESS_STATUS};
  468. """
  469. video_count_tuple = await self.mysql_client.async_select(select_sql)
  470. video_count = video_count_tuple[0][0]
  471. if video_count >= 3:
  472. affect_rows = await self.update_content_status(
  473. ori_content_status=ETL_TASK_INIT_STATUS,
  474. trace_id=trace_id,
  475. new_content_status=self.TASK_ETL_FINISHED_STATUS
  476. )
  477. if affect_rows == 0:
  478. logging(
  479. code="6000",
  480. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  481. )
  482. return False
  483. return True
  484. else:
  485. # 开始处理, 将文章状态修改为处理状态
  486. affected_rows = await self.update_content_status(
  487. ori_content_status=ETL_TASK_INIT_STATUS,
  488. trace_id=trace_id,
  489. new_content_status=self.TASK_PROCESSING_STATUS
  490. )
  491. if affected_rows == 0:
  492. logging(
  493. code="6000",
  494. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  495. )
  496. return False
  497. select_sql = f"""
  498. SELECT id, out_video_id, platform, video_title, video_url, cover_url, user_id, trace_id
  499. FROM {self.article_crawler_video_table}
  500. WHERE content_id = '{content_id}' and download_status != {VIDEO_DOWNLOAD_SUCCESS_STATUS}
  501. ORDER BY score DESC;
  502. """
  503. videos_need_to_download_tuple = await self.mysql_client.async_select(select_sql)
  504. downloaded_count = 0
  505. for line in videos_need_to_download_tuple:
  506. params = {
  507. "id": line[0],
  508. "video_id": line[1],
  509. "platform": line[2],
  510. "video_title": line[3],
  511. "video_url": line[4],
  512. "cover_url": line[5],
  513. "user_id": line[6],
  514. "trace_id": line[7]
  515. }
  516. try:
  517. local_video_path, local_cover_path = generate_video_path(params['platform'], params['video_id'])
  518. # download videos
  519. file_path = await download_video(
  520. file_path=local_video_path,
  521. platform=params['platform'],
  522. video_url=params['video_url']
  523. )
  524. if not file_path:
  525. # 说明视频下载失败,无需上传该视频, 将该条记录设置为失败状态
  526. update_sql = f"""
  527. UPDATE {self.article_crawler_video_table}
  528. SET download_status = %s
  529. WHERE id = %s;
  530. """
  531. await self.mysql_client.async_insert(
  532. sql=update_sql,
  533. params=(VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  534. )
  535. logging(
  536. code="etl_1001",
  537. info="etl_下载视频失败",
  538. trace_id=trace_id,
  539. function="etl_task"
  540. )
  541. else:
  542. # download cover
  543. cover_path = await download_cover(
  544. file_path=local_cover_path,
  545. platform=params['platform'],
  546. cover_url=params['cover_url']
  547. )
  548. # upload video to oss
  549. oss_video = await upload_to_oss(
  550. local_video_path=file_path,
  551. download_type="video"
  552. )
  553. # upload cover to oss
  554. if cover_path:
  555. oss_cover = await upload_to_oss(
  556. local_video_path=cover_path,
  557. download_type="image"
  558. )
  559. else:
  560. oss_cover = None
  561. # change status to success
  562. update_sql = f"""
  563. UPDATE {self.article_crawler_video_table}
  564. SET video_oss_path = %s, cover_oss_path = %s, download_status = %s
  565. WHERE id = %s;
  566. """
  567. await self.mysql_client.async_insert(
  568. sql=update_sql,
  569. params=(
  570. oss_video,
  571. oss_cover,
  572. VIDEO_DOWNLOAD_SUCCESS_STATUS,
  573. params['id']
  574. )
  575. )
  576. downloaded_count += 1
  577. logging(
  578. code="etl_1002",
  579. info="etl_视频下载成功",
  580. trace_id=trace_id,
  581. function="etl_task"
  582. )
  583. # 如果下载的视频数已经大于3, 则直接退出循环,修改状态为ETL成功状态
  584. if downloaded_count > 3:
  585. await self.update_content_status(
  586. ori_content_status=self.TASK_PROCESSING_STATUS,
  587. trace_id=trace_id,
  588. new_content_status=self.TASK_ETL_FINISHED_STATUS
  589. )
  590. return True
  591. except Exception as e:
  592. update_sql = f"""
  593. UPDATE {self.article_crawler_video_table}
  594. SET download_status = %s
  595. WHERE id = %s;
  596. """
  597. await self.mysql_client.async_insert(
  598. sql=update_sql,
  599. params=(VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  600. )
  601. logging(
  602. code="etl_1001",
  603. info="etl_下载视频失败",
  604. trace_id=trace_id,
  605. function="etl_task"
  606. )
  607. if downloaded_count >= 3:
  608. await self.update_content_status(
  609. ori_content_status=self.TASK_PROCESSING_STATUS,
  610. trace_id=trace_id,
  611. new_content_status=self.TASK_ETL_FINISHED_STATUS
  612. )
  613. return True
  614. else:
  615. await self.roll_back_content_status_when_fails(
  616. process_times=process_times + 1,
  617. trace_id=trace_id
  618. )
  619. return False
  620. async def publish_task(self, params, kimi_title):
  621. """
  622. 发布任务
  623. :param kimi_title:
  624. :param params:
  625. :return:
  626. """
  627. PUBLISH_DEFAULT_STATUS = 3
  628. gh_id = params['gh_id']
  629. flow_pool_level = params['flow_pool_level']
  630. content_id = params['content_id']
  631. trace_id = params['trace_id']
  632. process_times = params['process_times']
  633. # 开始处理,将状态修改为操作状态
  634. affected_rows = await self.update_content_status(
  635. ori_content_status=PUBLISH_DEFAULT_STATUS,
  636. trace_id=trace_id,
  637. new_content_status=self.TASK_PROCESSING_STATUS
  638. )
  639. if affected_rows == 0:
  640. logging(
  641. code="6000",
  642. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  643. )
  644. return False
  645. try:
  646. download_videos = await self.get_downloaded_videos(content_id)
  647. match flow_pool_level:
  648. case "autoArticlePoolLevel4":
  649. # 冷启层, 全量做
  650. video_list = shuffle_list(download_videos)[:3]
  651. case "autoArticlePoolLevel3":
  652. if self.gh_id_dict.get(gh_id):
  653. video_list = shuffle_list(download_videos)[:3]
  654. else:
  655. video_list = download_videos[:3]
  656. case "autoArticlePoolLevel2":
  657. # 次条,只针对具体账号做
  658. video_list = []
  659. case "autoArticlePoolLevel1":
  660. # 头条,先不做
  661. video_list = download_videos[:3]
  662. case _:
  663. video_list = download_videos[:3]
  664. L = []
  665. for video_obj in video_list:
  666. params = {
  667. "videoPath": video_obj['video_oss_path'],
  668. "uid": video_obj['uid'],
  669. "title": kimi_title
  670. }
  671. publish_response = await publish_to_pq(params)
  672. video_id = publish_response['data']['id']
  673. response = await get_pq_video_detail(video_id)
  674. obj = {
  675. "uid": video_obj['uid'],
  676. "source": video_obj['platform'],
  677. "kimiTitle": kimi_title,
  678. "videoId": response['data'][0]['id'],
  679. "videoCover": response['data'][0]['shareImgPath'],
  680. "videoPath": response['data'][0]['videoPath'],
  681. "videoOss": video_obj['video_oss_path']
  682. }
  683. L.append(obj)
  684. update_sql = f"""
  685. UPDATE {self.article_match_video_table}
  686. SET content_status = %s, response = %s, process_times = %s
  687. WHERE trace_id = %s and content_status = %s;
  688. """
  689. # 从操作中状态修改为已发布状态
  690. await self.mysql_client.async_insert(
  691. sql=update_sql,
  692. params=(
  693. self.TASK_PUBLISHED_STATUS,
  694. json.dumps(L, ensure_ascii=False),
  695. process_times + 1,
  696. trace_id,
  697. self.TASK_PROCESSING_STATUS
  698. )
  699. )
  700. except Exception as e:
  701. await self.roll_back_content_status_when_fails(
  702. process_times=params['process_times'] + 1,
  703. trace_id=params['trace_id']
  704. )
  705. print(e)
  706. async def start_process(self, params):
  707. """
  708. 处理单篇文章
  709. :param params:
  710. :return:
  711. """
  712. # step1: 执行 kimi 操作
  713. # time.sleep(5) # 测试多个进程操作同一个 task 的等待时间
  714. kimi_result = await self.kimi_task(params)
  715. trace_id = params['trace_id']
  716. process_times = params['process_times']
  717. content_id = params['content_id']
  718. print(kimi_result)
  719. if kimi_result:
  720. # 等待 kimi 操作执行完成之后,开始执行 spider_task
  721. print("kimi success")
  722. logging(
  723. code=3001,
  724. info="kimi success",
  725. trace_id=trace_id
  726. )
  727. spider_flag = await self.spider_task(params=params, kimi_result=kimi_result)
  728. if spider_flag:
  729. # 等待爬虫执行完成后,开始执行 etl_task
  730. print("spider success")
  731. logging(
  732. code=3002,
  733. info="spider_success",
  734. trace_id=trace_id
  735. )
  736. etl_flag = await self.etl_task(params)
  737. if etl_flag:
  738. # 等待下载上传完成,执行发布任务
  739. print("etl success")
  740. logging(
  741. code="3003",
  742. info="etl_success",
  743. trace_id=trace_id
  744. )
  745. try:
  746. await self.publish_task(params, kimi_result['kimi_title'])
  747. logging(
  748. code="3004",
  749. info="publish_success",
  750. trace_id=trace_id
  751. )
  752. await record_trace_id(
  753. trace_id=trace_id,
  754. status=self.RECORD_SUCCESS_TRACE_ID_CODE
  755. )
  756. except Exception as e:
  757. logging(
  758. code="6004",
  759. info="publish 失败--{}".format(e),
  760. trace_id=params['trace_id']
  761. )
  762. else:
  763. logging(
  764. code="6003",
  765. info="ETL 处理失败",
  766. trace_id=params['trace_id']
  767. )
  768. else:
  769. logging(
  770. code="6002",
  771. info="爬虫处理失败",
  772. trace_id=params['trace_id']
  773. )
  774. else:
  775. logging(
  776. code="6001",
  777. info="kimi 处理失败",
  778. trace_id=trace_id
  779. )
  780. if process_times >= self.TASK_MAX_PROCESS_TIMES:
  781. logging(
  782. code="6011",
  783. info="kimi处理次数达到上限, 放弃处理",
  784. trace_id=trace_id
  785. )
  786. # 将相同的content_id && content_status = 0的状态修改为kimi 失败状态
  787. update_sql = f"""
  788. UPDATE {self.article_match_video_table}
  789. SET content_status = %s
  790. WHERE content_id = %s and content_status = %s;
  791. """
  792. affected_rows = await self.mysql_client.async_insert(
  793. sql=update_sql,
  794. params=(
  795. self.KIMI_ILLEGAL_STATUS,
  796. content_id,
  797. self.TASK_INIT_STATUS
  798. )
  799. )
  800. bot(
  801. title="KIMI 处理失败",
  802. detail={
  803. "content_id": content_id,
  804. "affected_rows": affected_rows
  805. }
  806. )
  807. async def process_task(self, params):
  808. """
  809. 处理任务
  810. :return:
  811. """
  812. content_id = params['content_id']
  813. download_videos = await self.get_video_list(content_id)
  814. if not download_videos:
  815. # 开始处理, 判断是否有相同的文章 id 正在处理
  816. processing_flag = await self.judge_whether_same_content_id_is_processing(content_id)
  817. if processing_flag:
  818. logging(
  819. code="9001",
  820. info="该 content id 正在处理中, 跳过此任务--{}".format(content_id)
  821. )
  822. else:
  823. await self.start_process(params=params)
  824. else:
  825. print("存在已下载视频")
  826. async def deal(self):
  827. """
  828. function
  829. :return:
  830. """
  831. task_list = await self.get_tasks()
  832. task_dict = {}
  833. # 对 content_id去重
  834. for task in task_list:
  835. key = task['content_id']
  836. task_dict[key] = task
  837. process_list = []
  838. for item in task_dict:
  839. process_list.append(task_dict[item])
  840. logging(
  841. code="5001",
  842. info="Match Task Got {} this time".format(len(process_list)),
  843. function="Publish Task"
  844. )
  845. if task_list:
  846. total_task = len(process_list)
  847. print(process_list)
  848. a = time.time()
  849. print("开始处理,一共{}个任务".format(total_task))
  850. tasks = [self.process_task(params) for params in process_list]
  851. await asyncio.gather(*tasks)
  852. b = time.time()
  853. print("处理时间: {} s".format(b - a))
  854. else:
  855. logging(
  856. code="9008",
  857. info="没有要处理的请求"
  858. )