newContentIdTask.py 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. from applications.config import Config
  7. from applications.log import logging
  8. from applications.functions.pqFunctions import publish_to_pq, get_pq_video_detail
  9. from applications.functions.common import shuffle_list
  10. from applications.functions.kimi import KimiServer
  11. from applications.spider import search_videos_from_web
  12. from applications.etl_function import *
  13. from applications.feishu import bot
  14. from applications.functions.aigc import record_trace_id
  15. class NewContentIdTask(object):
  16. """
  17. 不存在历史已经发布的文章的匹配流程
  18. """
  19. TASK_INIT_STATUS = 0
  20. TASK_KIMI_FINISHED_STATUS = 1
  21. TASK_SPIDER_FINISHED_STATUS = 2
  22. TASK_ETL_FINISHED_STATUS = 3
  23. TASK_PUBLISHED_STATUS = 4
  24. TASK_PROCESSING_STATUS = 101
  25. TASK_FAIL_STATUS = 99
  26. KIMI_ILLEGAL_STATUS = 95
  27. ARTICLE_TEXT_TABLE_ERROR = 98
  28. TASK_MAX_PROCESS_TIMES = 3
  29. RECORD_SUCCESS_TRACE_ID_CODE = 2
  30. def __init__(self, mysql_client):
  31. self.mysql_client = mysql_client
  32. self.config = Config()
  33. self.article_match_video_table = self.config.article_match_video_table
  34. self.article_text_table = self.config.article_text_table
  35. self.article_crawler_video_table = self.config.article_crawler_video_table
  36. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  37. self.account_map = json.loads(self.config.get_config_value("accountMap"))
  38. self.spider_coroutines = self.config.get_config_value("spiderCoroutines")
  39. self.new_method_gh_id = json.loads(self.config.get_config_value("newMethodGhId"))
  40. async def get_tasks(self):
  41. """
  42. 获取 task
  43. :return:
  44. """
  45. # 获取 content_status 为 处理中 的任务,判断时间, 如果超过 1h 则,则将它改为 0, process_times + 1
  46. select_processing_sql = f"""
  47. SELECT
  48. trace_id, content_status_update_time, process_times
  49. FROM
  50. {self.article_match_video_table}
  51. WHERE
  52. content_status = {self.TASK_PROCESSING_STATUS}
  53. and process_times <= {self.TASK_MAX_PROCESS_TIMES};
  54. """
  55. processing_articles = await self.mysql_client.async_select(select_processing_sql)
  56. if processing_articles:
  57. processing_list = [
  58. {
  59. "trace_id": item[0],
  60. "content_status_update_time": item[1],
  61. "process_times": item[2]
  62. }
  63. for item in processing_articles
  64. ]
  65. for obj in processing_list:
  66. if int(time.time()) - obj['content_status_update_time'] >= 3600:
  67. # 认为该任务失败
  68. await self.roll_back_content_status_when_fails(
  69. process_times=obj['process_times'] + 1,
  70. trace_id=obj['trace_id']
  71. )
  72. # 将 process_times > 3 且状态不为 4 的任务的状态修改为失败,
  73. update_status_sql = f"""
  74. UPDATE
  75. {self.article_match_video_table}
  76. SET
  77. content_status = %s
  78. WHERE
  79. process_times > %s and content_status != %s;
  80. """
  81. await self.mysql_client.async_insert(
  82. update_status_sql,
  83. params=(
  84. self.TASK_FAIL_STATUS,
  85. self.TASK_MAX_PROCESS_TIMES,
  86. self.TASK_PUBLISHED_STATUS
  87. )
  88. )
  89. # 获取 process_times <= 3 且 content_status = 0 的任务
  90. select_sql = f"""
  91. SELECT
  92. trace_id, content_id, flow_pool_level, gh_id, process_times
  93. FROM
  94. {self.article_match_video_table}
  95. WHERE
  96. content_status = {self.TASK_INIT_STATUS}
  97. and process_times <= {self.TASK_MAX_PROCESS_TIMES}
  98. ORDER BY flow_pool_level, request_timestamp
  99. LIMIT {self.spider_coroutines};
  100. """
  101. tasks = await self.mysql_client.async_select(select_sql)
  102. if tasks:
  103. return [
  104. {
  105. "trace_id": i[0],
  106. "content_id": i[1],
  107. "flow_pool_level": i[2],
  108. "gh_id": i[3],
  109. "process_times": i[4]
  110. }
  111. for i in tasks
  112. ]
  113. else:
  114. return []
  115. async def get_video_list(self, content_id):
  116. """
  117. 判断该文章是否存在历史匹配视频
  118. :param content_id
  119. :return:
  120. """
  121. sql = f"""
  122. SELECT id
  123. FROM {self.article_crawler_video_table}
  124. WHERE content_id = '{content_id}' and download_status = 2;
  125. """
  126. res_tuple = await self.mysql_client.async_select(sql)
  127. if len(res_tuple) >= 3:
  128. return True
  129. else:
  130. return False
  131. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  132. """
  133. :param new_content_status:
  134. :param trace_id:
  135. :param ori_content_status:
  136. :return:
  137. """
  138. update_sql = f"""
  139. UPDATE {self.article_match_video_table}
  140. SET content_status = %s, content_status_update_time = %s
  141. WHERE trace_id = %s and content_status = %s;
  142. """
  143. row_counts = await self.mysql_client.async_insert(
  144. sql=update_sql,
  145. params=(
  146. new_content_status,
  147. int(time.time()),
  148. trace_id,
  149. ori_content_status
  150. )
  151. )
  152. return row_counts
  153. async def roll_back_content_status_when_fails(self, process_times, trace_id):
  154. """
  155. 处理失败,回滚至初始状态,处理次数加 1
  156. :param process_times:
  157. :param trace_id:
  158. :return:
  159. """
  160. update_article_sql = f"""
  161. UPDATE {self.article_match_video_table}
  162. SET
  163. content_status = %s,
  164. content_status_update_time = %s,
  165. process_times = %s
  166. WHERE trace_id = %s and content_status = %s;
  167. """
  168. await self.mysql_client.async_insert(
  169. sql=update_article_sql,
  170. params=(
  171. self.TASK_INIT_STATUS,
  172. int(time.time()),
  173. process_times + 1,
  174. trace_id,
  175. self.TASK_PROCESSING_STATUS
  176. )
  177. )
  178. async def judge_whether_same_content_id_is_processing(self, content_id):
  179. """
  180. 同一个 content_id 只需要处理一次
  181. :param content_id:
  182. :return:
  183. success: 4
  184. init: 0
  185. fail: 99
  186. """
  187. select_sql = f"""
  188. SELECT distinct content_status
  189. FROM {self.article_match_video_table}
  190. WHERE content_id = '{content_id}';
  191. """
  192. result = await self.mysql_client.async_select(select_sql)
  193. if result:
  194. for item in result:
  195. content_status = item[0]
  196. # if content_status not in {self.TASK_INIT_STATUS, self.TASK_PUBLISHED_STATUS} :
  197. if content_status in {
  198. self.TASK_KIMI_FINISHED_STATUS,
  199. self.TASK_SPIDER_FINISHED_STATUS,
  200. self.TASK_ETL_FINISHED_STATUS,
  201. self.TASK_PROCESSING_STATUS,
  202. self.TASK_PUBLISHED_STATUS
  203. }:
  204. return True
  205. return False
  206. else:
  207. return False
  208. async def get_downloaded_videos(self, content_id):
  209. """
  210. 获取已下载的视频
  211. :return:
  212. """
  213. sql = f"""
  214. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  215. FROM {self.article_crawler_video_table}
  216. WHERE content_id = '{content_id}' and download_status = 2
  217. ORDER BY score DESC;
  218. """
  219. res_tuple = await self.mysql_client.async_select(sql)
  220. return [
  221. {
  222. "platform": i[0],
  223. "play_count": i[1],
  224. "like_count": i[2],
  225. "video_oss_path": i[3],
  226. "cover_oss_path": i[4],
  227. "uid": i[5]
  228. }
  229. for i in res_tuple
  230. ]
  231. async def get_kimi_status(self, content_id):
  232. """
  233. 通过 content_id 获取kimi info
  234. :return:
  235. """
  236. select_sql = f"""
  237. select kimi_status
  238. from {self.article_text_table}
  239. where content_id = '{content_id}';
  240. """
  241. response = await self.mysql_client.async_select(select_sql)
  242. if response:
  243. kimi_status = response[0][0]
  244. return kimi_status
  245. else:
  246. return self.ARTICLE_TEXT_TABLE_ERROR
  247. async def kimi_task(self, params):
  248. """
  249. 执行 kimi 任务
  250. :return:
  251. """
  252. KIMI_SUCCESS_STATUS = 1
  253. KIMI_FAIL_STATUS = 2
  254. content_id = params['content_id']
  255. trace_id = params['trace_id']
  256. process_times = params['process_times']
  257. kimi_status_code = await self.get_kimi_status(content_id=content_id)
  258. if kimi_status_code == KIMI_SUCCESS_STATUS:
  259. affected_rows = await self.update_content_status(
  260. new_content_status=self.TASK_KIMI_FINISHED_STATUS,
  261. trace_id=trace_id,
  262. ori_content_status=self.TASK_INIT_STATUS
  263. )
  264. if affected_rows == 0:
  265. logging(
  266. code="6000",
  267. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  268. )
  269. return
  270. get_kimi_sql = f"""
  271. SELECT article_title, kimi_title, kimi_summary, kimi_keys
  272. FROM {self.article_text_table}
  273. WHERE content_id = '{content_id}';
  274. """
  275. kimi_info = await self.mysql_client.async_select(get_kimi_sql)
  276. return {
  277. "kimi_title": kimi_info[0][1],
  278. "ori_title": kimi_info[0][0],
  279. "kimi_summary": kimi_info[0][2],
  280. "kimi_keys": json.loads(kimi_info[0][3])
  281. }
  282. elif kimi_status_code == self.ARTICLE_TEXT_TABLE_ERROR:
  283. logging(
  284. code="4000",
  285. info="long_articles_text表中未找到 content_id"
  286. )
  287. else:
  288. # 开始处理,讲 content_status 从 0 改为 101
  289. affected_rows = await self.update_content_status(
  290. new_content_status=self.TASK_PROCESSING_STATUS,
  291. trace_id=trace_id,
  292. ori_content_status=self.TASK_INIT_STATUS
  293. )
  294. if affected_rows == 0:
  295. logging(
  296. code="6000",
  297. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  298. )
  299. return
  300. K = KimiServer()
  301. try:
  302. select_sql = f"""
  303. select article_title, article_text
  304. from {self.article_text_table}
  305. where content_id = '{content_id}'
  306. """
  307. res = await self.mysql_client.async_select(select_sql)
  308. article_obj = {
  309. "article_title": res[0][0],
  310. "article_text": res[0][1],
  311. "content_id": content_id
  312. }
  313. kimi_info = await K.search_kimi_schedule(params=article_obj)
  314. kimi_title = kimi_info['k_title']
  315. content_title = kimi_info['content_title'].replace("'", "").replace('"', "")
  316. content_keys = json.dumps(kimi_info['content_keys'], ensure_ascii=False)
  317. update_kimi_sql = f"""
  318. UPDATE {self.article_text_table}
  319. SET
  320. kimi_title = %s,
  321. kimi_summary = %s,
  322. kimi_keys = %s,
  323. kimi_status = %s
  324. WHERE content_id = %s;"""
  325. await self.mysql_client.async_insert(
  326. sql=update_kimi_sql,
  327. params=(kimi_title, content_title, content_keys, KIMI_SUCCESS_STATUS, params['content_id'])
  328. )
  329. await self.update_content_status(
  330. new_content_status=self.TASK_KIMI_FINISHED_STATUS,
  331. trace_id=trace_id,
  332. ori_content_status=self.TASK_PROCESSING_STATUS
  333. )
  334. return {
  335. "kimi_title": kimi_title,
  336. "ori_title": article_obj['article_title'],
  337. "kimi_summary": content_title,
  338. "kimi_keys": kimi_info['content_keys']
  339. }
  340. except Exception as e:
  341. # kimi 任务处理失败
  342. update_kimi_sql = f"""
  343. UPDATE {self.article_text_table}
  344. SET
  345. kimi_status = %s
  346. WHERE content_id = %s
  347. """
  348. await self.mysql_client.async_insert(
  349. sql=update_kimi_sql,
  350. params=(
  351. KIMI_FAIL_STATUS,
  352. content_id
  353. )
  354. )
  355. # 将状态由 101 回退为 0
  356. await self.roll_back_content_status_when_fails(
  357. process_times=process_times,
  358. trace_id=trace_id
  359. )
  360. return {}
  361. async def spider_task(self, params, kimi_result):
  362. """
  363. 爬虫任务
  364. :return:
  365. """
  366. SPIDER_INIT_STATUS = 1
  367. DOWNLOAD_SUCCESS_STATUS = 2
  368. trace_id = params['trace_id']
  369. content_id = params['content_id']
  370. process_times = params['process_times']
  371. gh_id = params['gh_id']
  372. select_sql = f"""
  373. select count(id)
  374. from {self.article_crawler_video_table}
  375. where content_id = '{content_id}'
  376. and download_status = {DOWNLOAD_SUCCESS_STATUS};
  377. """
  378. count_tuple = await self.mysql_client.async_select(select_sql)
  379. counts = count_tuple[0][0]
  380. if counts >= 3:
  381. await self.update_content_status(
  382. new_content_status=self.TASK_SPIDER_FINISHED_STATUS,
  383. trace_id=trace_id,
  384. ori_content_status=SPIDER_INIT_STATUS
  385. )
  386. return True
  387. # 开始处理,将状态由 1 改成 101
  388. affected_rows = await self.update_content_status(
  389. new_content_status=self.TASK_PROCESSING_STATUS,
  390. ori_content_status=SPIDER_INIT_STATUS,
  391. trace_id=trace_id
  392. )
  393. if affected_rows == 0:
  394. logging(
  395. code="6000",
  396. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  397. )
  398. return False
  399. try:
  400. logging(
  401. code="spider_1001",
  402. info="开始执行搜索任务",
  403. trace_id=trace_id,
  404. data=kimi_result
  405. )
  406. search_videos_count = await search_videos_from_web(
  407. info={
  408. "ori_title": kimi_result['ori_title'],
  409. "kimi_summary": kimi_result['kimi_summary'],
  410. "kimi_keys": kimi_result['kimi_keys'],
  411. "trace_id": trace_id,
  412. "gh_id": gh_id,
  413. "content_id": content_id,
  414. "crawler_video_table": self.article_crawler_video_table
  415. },
  416. gh_id_map=self.account_map,
  417. db_client=self.mysql_client
  418. )
  419. if search_videos_count >= 3:
  420. # 表示爬虫任务执行成功, 将状态从 101 改为 2
  421. logging(
  422. code="spider_1002",
  423. info="搜索成功",
  424. trace_id=trace_id,
  425. data=kimi_result
  426. )
  427. await self.update_content_status(
  428. new_content_status=self.TASK_SPIDER_FINISHED_STATUS,
  429. trace_id=trace_id,
  430. ori_content_status=self.TASK_PROCESSING_STATUS
  431. )
  432. return True
  433. else:
  434. logging(
  435. code="spider_1003",
  436. info="搜索失败",
  437. trace_id=trace_id,
  438. data=kimi_result
  439. )
  440. await self.roll_back_content_status_when_fails(
  441. process_times=process_times + 1,
  442. trace_id=trace_id
  443. )
  444. return False
  445. except Exception as e:
  446. await self.roll_back_content_status_when_fails(
  447. process_times=process_times + 1,
  448. trace_id=trace_id
  449. )
  450. print("爬虫处理失败: {}".format(e))
  451. return False
  452. async def etl_task(self, params):
  453. """
  454. download && upload videos
  455. :param params:
  456. :return:
  457. """
  458. VIDEO_DOWNLOAD_SUCCESS_STATUS = 2
  459. VIDEO_DOWNLOAD_FAIL_STATUS = 3
  460. ETL_TASK_INIT_STATUS = 2
  461. trace_id = params['trace_id']
  462. content_id = params['content_id']
  463. process_times = params['process_times']
  464. # 判断是否有三条已经下载完成的视频
  465. select_sql = f"""
  466. select count(id)
  467. from {self.article_crawler_video_table}
  468. where content_id = '{content_id}' and download_status = {VIDEO_DOWNLOAD_SUCCESS_STATUS};
  469. """
  470. video_count_tuple = await self.mysql_client.async_select(select_sql)
  471. video_count = video_count_tuple[0][0]
  472. if video_count >= 3:
  473. affect_rows = await self.update_content_status(
  474. ori_content_status=ETL_TASK_INIT_STATUS,
  475. trace_id=trace_id,
  476. new_content_status=self.TASK_ETL_FINISHED_STATUS
  477. )
  478. if affect_rows == 0:
  479. logging(
  480. code="6000",
  481. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  482. )
  483. return False
  484. return True
  485. else:
  486. # 开始处理, 将文章状态修改为处理状态
  487. affected_rows = await self.update_content_status(
  488. ori_content_status=ETL_TASK_INIT_STATUS,
  489. trace_id=trace_id,
  490. new_content_status=self.TASK_PROCESSING_STATUS
  491. )
  492. if affected_rows == 0:
  493. logging(
  494. code="6000",
  495. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  496. )
  497. return False
  498. select_sql = f"""
  499. SELECT id, out_video_id, platform, video_title, video_url, cover_url, user_id, trace_id
  500. FROM {self.article_crawler_video_table}
  501. WHERE content_id = '{content_id}' and download_status != {VIDEO_DOWNLOAD_SUCCESS_STATUS}
  502. ORDER BY score DESC;
  503. """
  504. videos_need_to_download_tuple = await self.mysql_client.async_select(select_sql)
  505. downloaded_count = 0
  506. for line in videos_need_to_download_tuple:
  507. params = {
  508. "id": line[0],
  509. "video_id": line[1],
  510. "platform": line[2],
  511. "video_title": line[3],
  512. "video_url": line[4],
  513. "cover_url": line[5],
  514. "user_id": line[6],
  515. "trace_id": line[7]
  516. }
  517. try:
  518. local_video_path, local_cover_path = generate_video_path(params['platform'], params['video_id'])
  519. # download videos
  520. file_path = await download_video(
  521. file_path=local_video_path,
  522. platform=params['platform'],
  523. video_url=params['video_url']
  524. )
  525. if not file_path:
  526. # 说明视频下载失败,无需上传该视频, 将该条记录设置为失败状态
  527. update_sql = f"""
  528. UPDATE {self.article_crawler_video_table}
  529. SET download_status = %s
  530. WHERE id = %s;
  531. """
  532. await self.mysql_client.async_insert(
  533. sql=update_sql,
  534. params=(VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  535. )
  536. logging(
  537. code="etl_1001",
  538. info="etl_下载视频失败",
  539. trace_id=trace_id,
  540. function="etl_task"
  541. )
  542. else:
  543. # download cover
  544. cover_path = await download_cover(
  545. file_path=local_cover_path,
  546. platform=params['platform'],
  547. cover_url=params['cover_url']
  548. )
  549. # upload video to oss
  550. oss_video = await upload_to_oss(
  551. local_video_path=file_path,
  552. download_type="video"
  553. )
  554. # upload cover to oss
  555. if cover_path:
  556. oss_cover = await upload_to_oss(
  557. local_video_path=cover_path,
  558. download_type="image"
  559. )
  560. else:
  561. oss_cover = None
  562. # change status to success
  563. update_sql = f"""
  564. UPDATE {self.article_crawler_video_table}
  565. SET video_oss_path = %s, cover_oss_path = %s, download_status = %s
  566. WHERE id = %s;
  567. """
  568. await self.mysql_client.async_insert(
  569. sql=update_sql,
  570. params=(
  571. oss_video,
  572. oss_cover,
  573. VIDEO_DOWNLOAD_SUCCESS_STATUS,
  574. params['id']
  575. )
  576. )
  577. downloaded_count += 1
  578. logging(
  579. code="etl_1002",
  580. info="etl_视频下载成功",
  581. trace_id=trace_id,
  582. function="etl_task"
  583. )
  584. # 如果下载的视频数已经大于3, 则直接退出循环,修改状态为ETL成功状态
  585. if downloaded_count > 3:
  586. await self.update_content_status(
  587. ori_content_status=self.TASK_PROCESSING_STATUS,
  588. trace_id=trace_id,
  589. new_content_status=self.TASK_ETL_FINISHED_STATUS
  590. )
  591. return True
  592. except Exception as e:
  593. update_sql = f"""
  594. UPDATE {self.article_crawler_video_table}
  595. SET download_status = %s
  596. WHERE id = %s;
  597. """
  598. await self.mysql_client.async_insert(
  599. sql=update_sql,
  600. params=(VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  601. )
  602. logging(
  603. code="etl_1001",
  604. info="etl_下载视频失败",
  605. trace_id=trace_id,
  606. function="etl_task"
  607. )
  608. if downloaded_count >= 3:
  609. await self.update_content_status(
  610. ori_content_status=self.TASK_PROCESSING_STATUS,
  611. trace_id=trace_id,
  612. new_content_status=self.TASK_ETL_FINISHED_STATUS
  613. )
  614. return True
  615. else:
  616. await self.roll_back_content_status_when_fails(
  617. process_times=process_times + 1,
  618. trace_id=trace_id
  619. )
  620. return False
  621. async def publish_task(self, params, kimi_title):
  622. """
  623. 发布任务
  624. :param kimi_title:
  625. :param params:
  626. :return:
  627. """
  628. PUBLISH_DEFAULT_STATUS = 3
  629. gh_id = params['gh_id']
  630. flow_pool_level = params['flow_pool_level']
  631. content_id = params['content_id']
  632. trace_id = params['trace_id']
  633. process_times = params['process_times']
  634. # 开始处理,将状态修改为操作状态
  635. affected_rows = await self.update_content_status(
  636. ori_content_status=PUBLISH_DEFAULT_STATUS,
  637. trace_id=trace_id,
  638. new_content_status=self.TASK_PROCESSING_STATUS
  639. )
  640. if affected_rows == 0:
  641. logging(
  642. code="6000",
  643. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  644. )
  645. return False
  646. try:
  647. download_videos = await self.get_downloaded_videos(content_id)
  648. match flow_pool_level:
  649. case "autoArticlePoolLevel4":
  650. # 冷启层, 全量做
  651. video_list = shuffle_list(download_videos)[:3]
  652. case "autoArticlePoolLevel3":
  653. if self.gh_id_dict.get(gh_id):
  654. video_list = shuffle_list(download_videos)[:3]
  655. else:
  656. video_list = download_videos[:3]
  657. case "autoArticlePoolLevel2":
  658. # 次条,只针对具体账号做
  659. video_list = []
  660. case "autoArticlePoolLevel1":
  661. # 头条,先不做
  662. video_list = download_videos[:3]
  663. case _:
  664. video_list = download_videos[:3]
  665. L = []
  666. for video_obj in video_list:
  667. params = {
  668. "videoPath": video_obj['video_oss_path'],
  669. "uid": video_obj['uid'],
  670. "title": kimi_title
  671. }
  672. publish_response = await publish_to_pq(params)
  673. video_id = publish_response['data']['id']
  674. response = await get_pq_video_detail(video_id)
  675. obj = {
  676. "uid": video_obj['uid'],
  677. "source": video_obj['platform'],
  678. "kimiTitle": kimi_title,
  679. "videoId": response['data'][0]['id'],
  680. "videoCover": response['data'][0]['shareImgPath'],
  681. "videoPath": response['data'][0]['videoPath'],
  682. "videoOss": video_obj['video_oss_path']
  683. }
  684. L.append(obj)
  685. update_sql = f"""
  686. UPDATE {self.article_match_video_table}
  687. SET content_status = %s, response = %s, process_times = %s
  688. WHERE trace_id = %s and content_status = %s;
  689. """
  690. # 从操作中状态修改为已发布状态
  691. await self.mysql_client.async_insert(
  692. sql=update_sql,
  693. params=(
  694. self.TASK_PUBLISHED_STATUS,
  695. json.dumps(L, ensure_ascii=False),
  696. process_times + 1,
  697. trace_id,
  698. self.TASK_PROCESSING_STATUS
  699. )
  700. )
  701. except Exception as e:
  702. await self.roll_back_content_status_when_fails(
  703. process_times=params['process_times'] + 1,
  704. trace_id=params['trace_id']
  705. )
  706. print(e)
  707. async def start_process(self, params):
  708. """
  709. 处理单篇文章
  710. :param params:
  711. :return:
  712. """
  713. # step1: 执行 kimi 操作
  714. # time.sleep(5) # 测试多个进程操作同一个 task 的等待时间
  715. kimi_result = await self.kimi_task(params)
  716. trace_id = params['trace_id']
  717. process_times = params['process_times']
  718. content_id = params['content_id']
  719. gh_id = params['gh_id']
  720. print(kimi_result)
  721. if kimi_result:
  722. # 等待 kimi 操作执行完成之后,开始执行 spider_task
  723. print("kimi success")
  724. logging(
  725. code=3001,
  726. info="kimi success",
  727. trace_id=trace_id
  728. )
  729. spider_flag = await self.spider_task(params=params, kimi_result=kimi_result)
  730. if spider_flag:
  731. # 等待爬虫执行完成后,开始执行 etl_task
  732. print("spider success")
  733. logging(
  734. code=3002,
  735. info="spider_success",
  736. trace_id=trace_id
  737. )
  738. etl_flag = await self.etl_task(params)
  739. if etl_flag:
  740. # 等待下载上传完成,执行发布任务
  741. print("etl success")
  742. logging(
  743. code="3003",
  744. info="etl_success",
  745. trace_id=trace_id
  746. )
  747. if gh_id in self.new_method_gh_id:
  748. logging(
  749. code="3013",
  750. info="new_method_gh_id",
  751. trace_id=trace_id
  752. )
  753. return
  754. else:
  755. try:
  756. await self.publish_task(params, kimi_result['kimi_title'])
  757. logging(
  758. code="3004",
  759. info="publish_success",
  760. trace_id=trace_id
  761. )
  762. await record_trace_id(
  763. trace_id=trace_id,
  764. status=self.RECORD_SUCCESS_TRACE_ID_CODE
  765. )
  766. except Exception as e:
  767. logging(
  768. code="6004",
  769. info="publish 失败--{}".format(e),
  770. trace_id=params['trace_id']
  771. )
  772. else:
  773. logging(
  774. code="6003",
  775. info="ETL 处理失败",
  776. trace_id=params['trace_id']
  777. )
  778. else:
  779. logging(
  780. code="6002",
  781. info="爬虫处理失败",
  782. trace_id=params['trace_id']
  783. )
  784. else:
  785. logging(
  786. code="6001",
  787. info="kimi 处理失败",
  788. trace_id=trace_id
  789. )
  790. if process_times >= self.TASK_MAX_PROCESS_TIMES:
  791. logging(
  792. code="6011",
  793. info="kimi处理次数达到上限, 放弃处理",
  794. trace_id=trace_id
  795. )
  796. # 将相同的content_id && content_status = 0的状态修改为kimi 失败状态
  797. update_sql = f"""
  798. UPDATE {self.article_match_video_table}
  799. SET content_status = %s
  800. WHERE content_id = %s and content_status = %s;
  801. """
  802. affected_rows = await self.mysql_client.async_insert(
  803. sql=update_sql,
  804. params=(
  805. self.KIMI_ILLEGAL_STATUS,
  806. content_id,
  807. self.TASK_INIT_STATUS
  808. )
  809. )
  810. bot(
  811. title="KIMI 处理失败",
  812. detail={
  813. "content_id": content_id,
  814. "affected_rows": affected_rows
  815. }
  816. )
  817. async def process_task(self, params):
  818. """
  819. 处理任务
  820. :return:
  821. """
  822. content_id = params['content_id']
  823. download_videos = await self.get_video_list(content_id)
  824. if not download_videos:
  825. # 开始处理, 判断是否有相同的文章 id 正在处理
  826. processing_flag = await self.judge_whether_same_content_id_is_processing(content_id)
  827. if processing_flag:
  828. logging(
  829. code="9001",
  830. info="该 content id 正在处理中, 跳过此任务--{}".format(content_id)
  831. )
  832. else:
  833. await self.start_process(params=params)
  834. else:
  835. print("存在已下载视频")
  836. async def deal(self):
  837. """
  838. function
  839. :return:
  840. """
  841. task_list = await self.get_tasks()
  842. task_dict = {}
  843. # 对 content_id去重
  844. for task in task_list:
  845. key = task['content_id']
  846. task_dict[key] = task
  847. process_list = []
  848. for item in task_dict:
  849. process_list.append(task_dict[item])
  850. logging(
  851. code="5001",
  852. info="Match Task Got {} this time".format(len(process_list)),
  853. function="Publish Task"
  854. )
  855. if task_list:
  856. total_task = len(process_list)
  857. print(process_list)
  858. a = time.time()
  859. print("开始处理,一共{}个任务".format(total_task))
  860. tasks = [self.process_task(params) for params in process_list]
  861. await asyncio.gather(*tasks)
  862. b = time.time()
  863. print("处理时间: {} s".format(b - a))
  864. else:
  865. logging(
  866. code="9008",
  867. info="没有要处理的请求"
  868. )