new_contentId_task.py 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. from applications.config import Config, NewContentIdTaskConst
  7. from applications.log import logging
  8. from applications.functions.pqFunctions import publish_to_pq, get_pq_video_detail
  9. from applications.functions.common import shuffle_list
  10. from applications.functions.kimi import KimiServer
  11. from applications.spider import search_videos_from_web
  12. from applications.etl_function import *
  13. from applications.feishu import bot
  14. from applications.functions.aigc import record_trace_id
  15. class NewContentIdTask(object):
  16. """
  17. 不存在历史已经发布的文章的匹配流程
  18. """
  19. def __init__(self, mysql_client):
  20. self.mysql_client = mysql_client
  21. self.config = Config()
  22. self.article_match_video_table = self.config.article_match_video_table
  23. self.article_text_table = self.config.article_text_table
  24. self.article_crawler_video_table = self.config.article_crawler_video_table
  25. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  26. self.account_map = json.loads(self.config.get_config_value("accountMap"))
  27. self.spider_coroutines = self.config.get_config_value("spiderCoroutines")
  28. self.new_method_gh_id = json.loads(self.config.get_config_value("newMethodGhId"))
  29. self.const = NewContentIdTaskConst()
  30. async def get_tasks(self):
  31. """
  32. 获取 task
  33. :return:
  34. """
  35. # 获取 content_status 为 处理中 的任务,判断时间, 如果超过 1h 则,则将它改为 0, process_times + 1
  36. select_processing_sql = f"""
  37. SELECT
  38. trace_id, content_status_update_time, process_times
  39. FROM
  40. {self.article_match_video_table}
  41. WHERE
  42. content_status = {self.const.TASK_PROCESSING_STATUS}
  43. and process_times <= {self.const.TASK_MAX_PROCESS_TIMES};
  44. """
  45. processing_articles = await self.mysql_client.async_select(select_processing_sql)
  46. if processing_articles:
  47. processing_list = [
  48. {
  49. "trace_id": item[0],
  50. "content_status_update_time": item[1],
  51. "process_times": item[2]
  52. }
  53. for item in processing_articles
  54. ]
  55. for obj in processing_list:
  56. if int(time.time()) - obj['content_status_update_time'] >= self.const.TASK_PROCESSING_TIMEOUT:
  57. # 认为该任务失败
  58. await self.roll_back_content_status_when_fails(
  59. process_times=obj['process_times'] + 1,
  60. trace_id=obj['trace_id']
  61. )
  62. # 将 process_times > 3 且状态不为 4 的任务的状态修改为失败, 判断条件需要加上索引
  63. update_status_sql = f"""
  64. UPDATE
  65. {self.article_match_video_table}
  66. SET
  67. content_status = %s
  68. WHERE
  69. process_times > %s and content_status not in (%s, %s);
  70. """
  71. await self.mysql_client.async_insert(
  72. update_status_sql,
  73. params=(
  74. self.const.TASK_FAIL_STATUS,
  75. self.const.TASK_MAX_PROCESS_TIMES,
  76. self.const.TASK_ETL_COMPLETE_STATUS,
  77. self.const.TASK_PUBLISHED_STATUS
  78. )
  79. )
  80. # 获取 process_times <= 3 且 content_status = 0 的任务
  81. select_sql = f"""
  82. SELECT
  83. trace_id, content_id, flow_pool_level, gh_id, process_times, publish_flag
  84. FROM
  85. {self.article_match_video_table}
  86. WHERE
  87. content_status = {self.const.TASK_INIT_STATUS}
  88. and process_times <= {self.const.TASK_MAX_PROCESS_TIMES}
  89. ORDER BY flow_pool_level, request_timestamp
  90. LIMIT {self.spider_coroutines};
  91. """
  92. tasks = await self.mysql_client.async_select(select_sql)
  93. if tasks:
  94. return [
  95. {
  96. "trace_id": i[0],
  97. "content_id": i[1],
  98. "flow_pool_level": i[2],
  99. "gh_id": i[3],
  100. "process_times": i[4],
  101. "publish_flag": i[5]
  102. }
  103. for i in tasks
  104. ]
  105. else:
  106. return []
  107. async def get_video_list(self, content_id):
  108. """
  109. 判断该文章是否存在历史匹配视频
  110. :param content_id
  111. :return:
  112. """
  113. sql = f"""
  114. SELECT id
  115. FROM {self.article_crawler_video_table}
  116. WHERE content_id = '{content_id}' and download_status = {self.const.VIDEO_DOWNLOAD_SUCCESS_STATUS};
  117. """
  118. res_tuple = await self.mysql_client.async_select(sql)
  119. if len(res_tuple) >= self.const.MIN_MATCH_VIDEO_NUM:
  120. return True
  121. else:
  122. return False
  123. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  124. """
  125. :param new_content_status:
  126. :param trace_id:
  127. :param ori_content_status:
  128. :return:
  129. """
  130. update_sql = f"""
  131. UPDATE {self.article_match_video_table}
  132. SET content_status = %s, content_status_update_time = %s
  133. WHERE trace_id = %s and content_status = %s;
  134. """
  135. row_counts = await self.mysql_client.async_insert(
  136. sql=update_sql,
  137. params=(
  138. new_content_status,
  139. int(time.time()),
  140. trace_id,
  141. ori_content_status
  142. )
  143. )
  144. return row_counts
  145. async def roll_back_content_status_when_fails(self, process_times, trace_id):
  146. """
  147. 处理失败,回滚至初始状态,处理次数加 1
  148. :param process_times:
  149. :param trace_id:
  150. :return:
  151. """
  152. update_article_sql = f"""
  153. UPDATE {self.article_match_video_table}
  154. SET
  155. content_status = %s,
  156. content_status_update_time = %s,
  157. process_times = %s
  158. WHERE trace_id = %s and content_status = %s;
  159. """
  160. await self.mysql_client.async_insert(
  161. sql=update_article_sql,
  162. params=(
  163. self.const.TASK_INIT_STATUS,
  164. int(time.time()),
  165. process_times + 1,
  166. trace_id,
  167. self.const.TASK_PROCESSING_STATUS
  168. )
  169. )
  170. async def judge_whether_same_content_id_is_processing(self, content_id):
  171. """
  172. 同一个 content_id 只需要处理一次
  173. :param content_id:
  174. :return:
  175. success: 4
  176. init: 0
  177. fail: 99
  178. todo: 存在处理失败的content_id是否需要不再处理
  179. """
  180. select_sql = f"""
  181. SELECT distinct content_status
  182. FROM {self.article_match_video_table}
  183. WHERE content_id = '{content_id}';
  184. """
  185. result = await self.mysql_client.async_select(select_sql)
  186. if result:
  187. for item in result:
  188. content_status = item[0]
  189. # if content_status not in {self.TASK_INIT_STATUS, self.TASK_PUBLISHED_STATUS} :
  190. if content_status in {
  191. self.const.TASK_KIMI_FINISHED_STATUS,
  192. self.const.TASK_SPIDER_FINISHED_STATUS,
  193. self.const.TASK_ETL_COMPLETE_STATUS,
  194. self.const.TASK_PROCESSING_STATUS,
  195. self.const.TASK_PUBLISHED_STATUS
  196. }:
  197. return True
  198. return False
  199. else:
  200. return False
  201. async def get_downloaded_videos(self, content_id):
  202. """
  203. 获取已下载的视频
  204. :return:
  205. """
  206. sql = f"""
  207. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  208. FROM {self.article_crawler_video_table}
  209. WHERE content_id = '{content_id}' and download_status = {self.const.VIDEO_DOWNLOAD_SUCCESS_STATUS};
  210. ORDER BY score DESC;
  211. """
  212. res_tuple = await self.mysql_client.async_select(sql)
  213. return [
  214. {
  215. "platform": i[0],
  216. "play_count": i[1],
  217. "like_count": i[2],
  218. "video_oss_path": i[3],
  219. "cover_oss_path": i[4],
  220. "uid": i[5]
  221. }
  222. for i in res_tuple
  223. ]
  224. async def get_kimi_status(self, content_id):
  225. """
  226. 通过 content_id 获取kimi info
  227. :return:
  228. """
  229. select_sql = f"""
  230. select kimi_status
  231. from {self.article_text_table}
  232. where content_id = '{content_id}';
  233. """
  234. response = await self.mysql_client.async_select(select_sql)
  235. if response:
  236. kimi_status = response[0][0]
  237. return kimi_status
  238. else:
  239. return self.const.ARTICLE_TEXT_TABLE_ERROR
  240. async def kimi_task(self, params):
  241. """
  242. 执行 kimi 任务
  243. :return:
  244. """
  245. content_id = params['content_id']
  246. trace_id = params['trace_id']
  247. process_times = params['process_times']
  248. kimi_status_code = await self.get_kimi_status(content_id=content_id)
  249. if kimi_status_code == self.const.KIMI_SUCCESS_STATUS:
  250. affected_rows = await self.update_content_status(
  251. new_content_status=self.const.TASK_KIMI_FINISHED_STATUS,
  252. trace_id=trace_id,
  253. ori_content_status=self.const.TASK_INIT_STATUS
  254. )
  255. if affected_rows == 0:
  256. logging(
  257. code="6000",
  258. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  259. )
  260. return
  261. get_kimi_sql = f"""
  262. SELECT article_title, kimi_title, kimi_summary, kimi_keys
  263. FROM {self.article_text_table}
  264. WHERE content_id = '{content_id}';
  265. """
  266. kimi_info = await self.mysql_client.async_select(get_kimi_sql)
  267. return {
  268. "kimi_title": kimi_info[0][1],
  269. "ori_title": kimi_info[0][0],
  270. "kimi_summary": kimi_info[0][2],
  271. "kimi_keys": json.loads(kimi_info[0][3])
  272. }
  273. elif kimi_status_code == self.const.ARTICLE_TEXT_TABLE_ERROR:
  274. logging(
  275. code="4000",
  276. info="long_articles_text表中未找到 content_id"
  277. )
  278. else:
  279. # 开始处理,讲 content_status 从 0 改为 101
  280. affected_rows = await self.update_content_status(
  281. new_content_status=self.const.TASK_PROCESSING_STATUS,
  282. trace_id=trace_id,
  283. ori_content_status=self.const.TASK_INIT_STATUS
  284. )
  285. if affected_rows == 0:
  286. logging(
  287. code="6000",
  288. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  289. )
  290. return
  291. K = KimiServer()
  292. try:
  293. select_sql = f"""
  294. select article_title, article_text
  295. from {self.article_text_table}
  296. where content_id = '{content_id}'
  297. """
  298. res = await self.mysql_client.async_select(select_sql)
  299. article_obj = {
  300. "article_title": res[0][0],
  301. "article_text": res[0][1],
  302. "content_id": content_id
  303. }
  304. kimi_info = await K.search_kimi_schedule(params=article_obj)
  305. kimi_title = kimi_info['k_title']
  306. content_title = kimi_info['content_title'].replace("'", "").replace('"', "")
  307. content_keys = json.dumps(kimi_info['content_keys'], ensure_ascii=False)
  308. update_kimi_sql = f"""
  309. UPDATE {self.article_text_table}
  310. SET
  311. kimi_title = %s,
  312. kimi_summary = %s,
  313. kimi_keys = %s,
  314. kimi_status = %s
  315. WHERE content_id = %s;"""
  316. await self.mysql_client.async_insert(
  317. sql=update_kimi_sql,
  318. params=(kimi_title, content_title, content_keys, self.const.KIMI_SUCCESS_STATUS, params['content_id'])
  319. )
  320. await self.update_content_status(
  321. new_content_status=self.const.TASK_KIMI_FINISHED_STATUS,
  322. trace_id=trace_id,
  323. ori_content_status=self.const.TASK_PROCESSING_STATUS
  324. )
  325. return {
  326. "kimi_title": kimi_title,
  327. "ori_title": article_obj['article_title'],
  328. "kimi_summary": content_title,
  329. "kimi_keys": kimi_info['content_keys']
  330. }
  331. except Exception as e:
  332. # kimi 任务处理失败
  333. update_kimi_sql = f"""
  334. UPDATE {self.article_text_table}
  335. SET
  336. kimi_status = %s
  337. WHERE content_id = %s
  338. """
  339. await self.mysql_client.async_insert(
  340. sql=update_kimi_sql,
  341. params=(
  342. self.const.KIMI_FAIL_STATUS,
  343. content_id
  344. )
  345. )
  346. # 将状态由 101 回退为 0
  347. await self.roll_back_content_status_when_fails(
  348. process_times=process_times,
  349. trace_id=trace_id
  350. )
  351. return {}
  352. async def spider_task(self, params, kimi_result):
  353. """
  354. 爬虫任务
  355. :return:
  356. """
  357. trace_id = params['trace_id']
  358. content_id = params['content_id']
  359. process_times = params['process_times']
  360. gh_id = params['gh_id']
  361. select_sql = f"""
  362. SELECT count(id)
  363. FROM {self.article_crawler_video_table}
  364. WHERE content_id = '{content_id}'
  365. AND download_status = {self.const.VIDEO_DOWNLOAD_SUCCESS_STATUS};
  366. """
  367. count_tuple = await self.mysql_client.async_select(select_sql)
  368. counts = count_tuple[0][0]
  369. if counts >= self.const.MIN_MATCH_VIDEO_NUM:
  370. await self.update_content_status(
  371. new_content_status=self.const.TASK_SPIDER_FINISHED_STATUS,
  372. trace_id=trace_id,
  373. ori_content_status=self.const.TASK_KIMI_FINISHED_STATUS
  374. )
  375. return True
  376. # 开始处理,将状态由 1 改成 101
  377. affected_rows = await self.update_content_status(
  378. new_content_status=self.const.TASK_PROCESSING_STATUS,
  379. ori_content_status=self.const.TASK_KIMI_FINISHED_STATUS,
  380. trace_id=trace_id
  381. )
  382. if affected_rows == 0:
  383. logging(
  384. code="6000",
  385. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  386. )
  387. return False
  388. try:
  389. logging(
  390. code="spider_1001",
  391. info="开始执行搜索任务",
  392. trace_id=trace_id,
  393. data=kimi_result
  394. )
  395. search_videos_count = await search_videos_from_web(
  396. info={
  397. "ori_title": kimi_result['ori_title'],
  398. "kimi_summary": kimi_result['kimi_summary'],
  399. "kimi_keys": kimi_result['kimi_keys'],
  400. "trace_id": trace_id,
  401. "gh_id": gh_id,
  402. "content_id": content_id,
  403. "crawler_video_table": self.article_crawler_video_table
  404. },
  405. gh_id_map=self.account_map,
  406. db_client=self.mysql_client
  407. )
  408. if search_videos_count >= self.const.MIN_MATCH_VIDEO_NUM:
  409. # 表示爬虫任务执行成功, 将状态从 101 改为 2
  410. logging(
  411. code="spider_1002",
  412. info="搜索成功",
  413. trace_id=trace_id,
  414. data=kimi_result
  415. )
  416. await self.update_content_status(
  417. new_content_status=self.const.TASK_SPIDER_FINISHED_STATUS,
  418. trace_id=trace_id,
  419. ori_content_status=self.const.TASK_PROCESSING_STATUS
  420. )
  421. return True
  422. else:
  423. logging(
  424. code="spider_1003",
  425. info="搜索失败",
  426. trace_id=trace_id,
  427. data=kimi_result
  428. )
  429. await self.roll_back_content_status_when_fails(
  430. process_times=process_times + 1,
  431. trace_id=trace_id
  432. )
  433. return False
  434. except Exception as e:
  435. await self.roll_back_content_status_when_fails(
  436. process_times=process_times + 1,
  437. trace_id=trace_id
  438. )
  439. print("爬虫处理失败: {}".format(e))
  440. return False
  441. async def etl_task(self, params):
  442. """
  443. download && upload videos
  444. :param params:
  445. :return:
  446. """
  447. trace_id = params['trace_id']
  448. content_id = params['content_id']
  449. process_times = params['process_times']
  450. # 判断是否有三条已经下载完成的视频
  451. select_sql = f"""
  452. select count(id)
  453. from {self.article_crawler_video_table}
  454. where content_id = '{content_id}' and download_status = {self.const.VIDEO_DOWNLOAD_SUCCESS_STATUS};
  455. """
  456. video_count_tuple = await self.mysql_client.async_select(select_sql)
  457. video_count = video_count_tuple[0][0]
  458. if video_count >= self.const.MIN_MATCH_VIDEO_NUM:
  459. affect_rows = await self.update_content_status(
  460. ori_content_status=self.const.TASK_SPIDER_FINISHED_STATUS,
  461. trace_id=trace_id,
  462. new_content_status=self.const.TASK_ETL_COMPLETE_STATUS
  463. )
  464. if affect_rows == 0:
  465. logging(
  466. code="6000",
  467. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  468. )
  469. return False
  470. return True
  471. else:
  472. # 开始处理, 将文章状态修改为处理状态
  473. affected_rows = await self.update_content_status(
  474. ori_content_status=self.const.TASK_SPIDER_FINISHED_STATUS,
  475. trace_id=trace_id,
  476. new_content_status=self.const.TASK_PROCESSING_STATUS
  477. )
  478. if affected_rows == 0:
  479. logging(
  480. code="6000",
  481. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  482. )
  483. return False
  484. select_sql = f"""
  485. SELECT id, out_video_id, platform, video_title, video_url, cover_url, user_id, trace_id
  486. FROM {self.article_crawler_video_table}
  487. WHERE content_id = '{content_id}' and download_status != {self.const.VIDEO_DOWNLOAD_SUCCESS_STATUS}
  488. ORDER BY score DESC;
  489. """
  490. videos_need_to_download_tuple = await self.mysql_client.async_select(select_sql)
  491. downloaded_count = 0
  492. for line in videos_need_to_download_tuple:
  493. params = {
  494. "id": line[0],
  495. "video_id": line[1],
  496. "platform": line[2],
  497. "video_title": line[3],
  498. "video_url": line[4],
  499. "cover_url": line[5],
  500. "user_id": line[6],
  501. "trace_id": line[7]
  502. }
  503. try:
  504. local_video_path, local_cover_path = generate_video_path(params['platform'], params['video_id'])
  505. # download videos
  506. file_path = await download_video(
  507. file_path=local_video_path,
  508. platform=params['platform'],
  509. video_url=params['video_url']
  510. )
  511. if not file_path:
  512. # 说明视频下载失败,无需上传该视频, 将该条记录设置为失败状态
  513. update_sql = f"""
  514. UPDATE {self.article_crawler_video_table}
  515. SET download_status = %s
  516. WHERE id = %s;
  517. """
  518. await self.mysql_client.async_insert(
  519. sql=update_sql,
  520. params=(self.const.VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  521. )
  522. logging(
  523. code="etl_1001",
  524. info="etl_下载视频失败",
  525. trace_id=trace_id,
  526. function="etl_task"
  527. )
  528. else:
  529. # download cover
  530. cover_path = await download_cover(
  531. file_path=local_cover_path,
  532. platform=params['platform'],
  533. cover_url=params['cover_url']
  534. )
  535. # upload video to oss
  536. oss_video = await upload_to_oss(
  537. local_video_path=file_path,
  538. download_type="video"
  539. )
  540. # upload cover to oss
  541. if cover_path:
  542. oss_cover = await upload_to_oss(
  543. local_video_path=cover_path,
  544. download_type="image"
  545. )
  546. else:
  547. oss_cover = None
  548. # change status to success
  549. update_sql = f"""
  550. UPDATE {self.article_crawler_video_table}
  551. SET video_oss_path = %s, cover_oss_path = %s, download_status = %s
  552. WHERE id = %s;
  553. """
  554. await self.mysql_client.async_insert(
  555. sql=update_sql,
  556. params=(
  557. oss_video,
  558. oss_cover,
  559. self.const.VIDEO_DOWNLOAD_SUCCESS_STATUS,
  560. params['id']
  561. )
  562. )
  563. downloaded_count += 1
  564. logging(
  565. code="etl_1002",
  566. info="etl_视频下载成功",
  567. trace_id=trace_id,
  568. function="etl_task"
  569. )
  570. # 如果下载的视频数已经大于3, 则直接退出循环,修改状态为ETL成功状态
  571. if downloaded_count > self.const.MIN_MATCH_VIDEO_NUM:
  572. await self.update_content_status(
  573. ori_content_status=self.const.TASK_PROCESSING_STATUS,
  574. trace_id=trace_id,
  575. new_content_status=self.const.TASK_ETL_COMPLETE_STATUS
  576. )
  577. return True
  578. except Exception as e:
  579. update_sql = f"""
  580. UPDATE {self.article_crawler_video_table}
  581. SET download_status = %s
  582. WHERE id = %s;
  583. """
  584. await self.mysql_client.async_insert(
  585. sql=update_sql,
  586. params=(self.const.VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  587. )
  588. logging(
  589. code="etl_1001",
  590. info="etl_下载视频失败",
  591. trace_id=trace_id,
  592. function="etl_task"
  593. )
  594. if downloaded_count >= 3:
  595. await self.update_content_status(
  596. ori_content_status=self.const.TASK_PROCESSING_STATUS,
  597. trace_id=trace_id,
  598. new_content_status=self.const.TASK_ETL_COMPLETE_STATUS
  599. )
  600. return True
  601. else:
  602. await self.roll_back_content_status_when_fails(
  603. process_times=process_times + 1,
  604. trace_id=trace_id
  605. )
  606. return False
  607. async def publish_task(self, params, kimi_title):
  608. """
  609. 发布任务
  610. :param kimi_title:
  611. :param params:
  612. :return:
  613. """
  614. gh_id = params['gh_id']
  615. flow_pool_level = params['flow_pool_level']
  616. content_id = params['content_id']
  617. trace_id = params['trace_id']
  618. process_times = params['process_times']
  619. # 开始处理,将状态修改为操作状态
  620. affected_rows = await self.update_content_status(
  621. ori_content_status=self.const.TASK_ETL_COMPLETE_STATUS,
  622. trace_id=trace_id,
  623. new_content_status=self.const.TASK_PROCESSING_STATUS
  624. )
  625. if affected_rows == 0:
  626. logging(
  627. code="6000",
  628. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  629. )
  630. return False
  631. try:
  632. download_videos = await self.get_downloaded_videos(content_id)
  633. match flow_pool_level:
  634. case "autoArticlePoolLevel4":
  635. # 冷启层, 全量做
  636. video_list = shuffle_list(download_videos)[:3]
  637. case "autoArticlePoolLevel3":
  638. if self.gh_id_dict.get(gh_id):
  639. video_list = shuffle_list(download_videos)[:3]
  640. else:
  641. video_list = download_videos[:3]
  642. case "autoArticlePoolLevel2":
  643. # 次条,只针对具体账号做
  644. video_list = []
  645. case "autoArticlePoolLevel1":
  646. # 头条,先不做
  647. video_list = download_videos[:3]
  648. case _:
  649. video_list = download_videos[:3]
  650. L = []
  651. for video_obj in video_list:
  652. params = {
  653. "videoPath": video_obj['video_oss_path'],
  654. "uid": video_obj['uid'],
  655. "title": kimi_title
  656. }
  657. publish_response = await publish_to_pq(params)
  658. video_id = publish_response['data']['id']
  659. response = await get_pq_video_detail(video_id)
  660. obj = {
  661. "uid": video_obj['uid'],
  662. "source": video_obj['platform'],
  663. "kimiTitle": kimi_title,
  664. "videoId": response['data'][0]['id'],
  665. "videoCover": response['data'][0]['shareImgPath'],
  666. "videoPath": response['data'][0]['videoPath'],
  667. "videoOss": video_obj['video_oss_path']
  668. }
  669. L.append(obj)
  670. update_sql = f"""
  671. UPDATE {self.article_match_video_table}
  672. SET content_status = %s, response = %s, process_times = %s
  673. WHERE trace_id = %s and content_status = %s;
  674. """
  675. # 从操作中状态修改为已发布状态
  676. await self.mysql_client.async_insert(
  677. sql=update_sql,
  678. params=(
  679. self.const.TASK_PUBLISHED_STATUS,
  680. json.dumps(L, ensure_ascii=False),
  681. process_times + 1,
  682. trace_id,
  683. self.const.TASK_PROCESSING_STATUS
  684. )
  685. )
  686. except Exception as e:
  687. await self.roll_back_content_status_when_fails(
  688. process_times=params['process_times'] + 1,
  689. trace_id=params['trace_id']
  690. )
  691. print(e)
  692. async def start_process(self, params):
  693. """
  694. 处理单篇文章
  695. :param params:
  696. :return:
  697. """
  698. # step1: 执行 kimi 操作
  699. # time.sleep(5) # 测试多个进程操作同一个 task 的等待时间
  700. kimi_result = await self.kimi_task(params)
  701. trace_id = params['trace_id']
  702. process_times = params['process_times']
  703. content_id = params['content_id']
  704. gh_id = params['gh_id']
  705. publish_flag = params['publish_flag']
  706. print(kimi_result)
  707. if kimi_result:
  708. # 等待 kimi 操作执行完成之后,开始执行 spider_task
  709. print("kimi success")
  710. logging(
  711. code=3001,
  712. info="kimi success",
  713. trace_id=trace_id
  714. )
  715. spider_flag = await self.spider_task(params=params, kimi_result=kimi_result)
  716. if spider_flag:
  717. # 等待爬虫执行完成后,开始执行 etl_task
  718. print("spider success")
  719. logging(
  720. code=3002,
  721. info="spider_success",
  722. trace_id=trace_id
  723. )
  724. etl_flag = await self.etl_task(params)
  725. if etl_flag:
  726. # 等待下载上传完成,执行发布任务
  727. print("etl success")
  728. logging(
  729. code="3003",
  730. info="etl_success",
  731. trace_id=trace_id
  732. )
  733. """
  734. todo 若新建计划,计划为设置托管,但接入账号又在配置账号中,仍会走托管逻辑,需考虑历史存量的处理
  735. 目前先对这两种情况都做托管操作
  736. """
  737. if gh_id in self.new_method_gh_id or publish_flag == self.const.DO_NOT_NEED_PUBLISH:
  738. logging(
  739. code="3013",
  740. info="不需要发布,长文系统托管发布",
  741. trace_id=trace_id
  742. )
  743. return
  744. else:
  745. try:
  746. await self.publish_task(params, kimi_result['kimi_title'])
  747. logging(
  748. code="3004",
  749. info="publish_success",
  750. trace_id=trace_id
  751. )
  752. await record_trace_id(
  753. trace_id=trace_id,
  754. status=self.const.RECORD_SUCCESS_TRACE_ID_CODE
  755. )
  756. except Exception as e:
  757. logging(
  758. code="6004",
  759. info="publish 失败--{}".format(e),
  760. trace_id=params['trace_id']
  761. )
  762. else:
  763. logging(
  764. code="6003",
  765. info="ETL 处理失败",
  766. trace_id=params['trace_id']
  767. )
  768. else:
  769. logging(
  770. code="6002",
  771. info="爬虫处理失败",
  772. trace_id=params['trace_id']
  773. )
  774. else:
  775. logging(
  776. code="6001",
  777. info="kimi 处理失败",
  778. trace_id=trace_id
  779. )
  780. if process_times >= self.const.TASK_MAX_PROCESS_TIMES:
  781. logging(
  782. code="6011",
  783. info="kimi处理次数达到上限, 放弃处理",
  784. trace_id=trace_id
  785. )
  786. # 将相同的content_id && content_status = 0的状态修改为kimi 失败状态
  787. update_sql = f"""
  788. UPDATE {self.article_match_video_table}
  789. SET content_status = %s
  790. WHERE content_id = %s and content_status = %s;
  791. """
  792. affected_rows = await self.mysql_client.async_insert(
  793. sql=update_sql,
  794. params=(
  795. self.const.KIMI_ILLEGAL_STATUS,
  796. content_id,
  797. self.const.TASK_INIT_STATUS
  798. )
  799. )
  800. bot(
  801. title="KIMI 处理失败",
  802. detail={
  803. "content_id": content_id,
  804. "affected_rows": affected_rows
  805. }
  806. )
  807. async def process_task(self, params):
  808. """
  809. 处理任务
  810. :return:
  811. """
  812. content_id = params['content_id']
  813. download_videos = await self.get_video_list(content_id)
  814. if not download_videos:
  815. # 开始处理, 判断是否有相同的文章 id 正在处理
  816. processing_flag = await self.judge_whether_same_content_id_is_processing(content_id)
  817. if processing_flag:
  818. logging(
  819. code="9001",
  820. info="该 content id 正在处理中, 跳过此任务--{}".format(content_id)
  821. )
  822. else:
  823. await self.start_process(params=params)
  824. else:
  825. print("存在已下载视频")
  826. async def deal(self):
  827. """
  828. function
  829. :return:
  830. """
  831. task_list = await self.get_tasks()
  832. task_dict = {}
  833. # 对 content_id去重
  834. for task in task_list:
  835. key = task['content_id']
  836. task_dict[key] = task
  837. process_list = []
  838. for item in task_dict:
  839. process_list.append(task_dict[item])
  840. logging(
  841. code="5001",
  842. info="Match Task Got {} this time".format(len(process_list)),
  843. function="Publish Task"
  844. )
  845. if task_list:
  846. total_task = len(process_list)
  847. print(process_list)
  848. a = time.time()
  849. print("开始处理,一共{}个任务".format(total_task))
  850. tasks = [self.process_task(params) for params in process_list]
  851. await asyncio.gather(*tasks)
  852. b = time.time()
  853. print("处理时间: {} s".format(b - a))
  854. else:
  855. logging(
  856. code="9008",
  857. info="没有要处理的请求"
  858. )