new_contentId_task.py 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. from applications.config import Config, NewContentIdTaskConst
  7. from applications.log import logging
  8. from applications.functions.pqFunctions import publish_to_pq, get_pq_video_detail
  9. from applications.functions.common import shuffle_list
  10. from applications.functions.kimi import KimiServer
  11. from applications.spider import search_videos_from_web
  12. from applications.etl_function import *
  13. from applications.feishu import bot
  14. from applications.functions.aigc import record_trace_id
  15. class NewContentIdTask(object):
  16. """
  17. 不存在历史已经发布的文章的匹配流程
  18. """
  19. def __init__(self, mysql_client):
  20. self.mysql_client = mysql_client
  21. self.config = Config()
  22. self.article_match_video_table = self.config.article_match_video_table
  23. self.article_text_table = self.config.article_text_table
  24. self.article_crawler_video_table = self.config.article_crawler_video_table
  25. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  26. self.account_map = json.loads(self.config.get_config_value("accountMap"))
  27. self.spider_coroutines = self.config.get_config_value("spiderCoroutines")
  28. self.const = NewContentIdTaskConst()
  29. async def get_tasks(self):
  30. """
  31. 获取 task
  32. :return:
  33. """
  34. # 获取 content_status 为 处理中 的任务,判断时间, 如果超过 1h 则,则将它改为 0, process_times + 1
  35. select_processing_sql = f"""
  36. SELECT
  37. trace_id, content_status_update_time, process_times, content_status
  38. FROM
  39. {self.article_match_video_table}
  40. WHERE
  41. content_status in (
  42. {self.const.TASK_PROCESSING_STATUS},
  43. {self.const.TASK_KIMI_FINISHED_STATUS},
  44. {self.const.TASK_SPIDER_FINISHED_STATUS}
  45. )
  46. and process_times <= {self.const.TASK_MAX_PROCESS_TIMES};
  47. """
  48. processing_articles = await self.mysql_client.async_select(select_processing_sql)
  49. if processing_articles:
  50. processing_list = [
  51. {
  52. "trace_id": item[0],
  53. "content_status_update_time": item[1],
  54. "process_times": item[2],
  55. "content_status": item[3]
  56. }
  57. for item in processing_articles
  58. ]
  59. for obj in processing_list:
  60. if int(time.time()) - obj['content_status_update_time'] >= self.const.TASK_PROCESSING_TIMEOUT:
  61. # 认为该任务失败
  62. await self.roll_back_content_status_when_fails(
  63. process_times=obj['process_times'] + 1,
  64. trace_id=obj['trace_id'],
  65. ori_content_status=obj['content_status']
  66. )
  67. # 将 process_times > 3 且状态不为 4 的任务的状态修改为失败, 判断条件需要加上索引
  68. update_status_sql = f"""
  69. UPDATE
  70. {self.article_match_video_table}
  71. SET
  72. content_status = %s
  73. WHERE
  74. process_times > %s and content_status not in (%s, %s);
  75. """
  76. await self.mysql_client.async_insert(
  77. update_status_sql,
  78. params=(
  79. self.const.TASK_FAIL_STATUS,
  80. self.const.TASK_MAX_PROCESS_TIMES,
  81. self.const.TASK_ETL_COMPLETE_STATUS,
  82. self.const.TASK_PUBLISHED_STATUS
  83. )
  84. )
  85. # 获取 process_times <= 3 且 content_status = 0 的任务
  86. select_sql = f"""
  87. SELECT
  88. trace_id, content_id, flow_pool_level, gh_id, process_times, publish_flag
  89. FROM
  90. {self.article_match_video_table}
  91. WHERE
  92. content_status = {self.const.TASK_INIT_STATUS}
  93. and process_times <= {self.const.TASK_MAX_PROCESS_TIMES}
  94. ORDER BY flow_pool_level, request_timestamp
  95. LIMIT {self.spider_coroutines};
  96. """
  97. tasks = await self.mysql_client.async_select(select_sql)
  98. if tasks:
  99. return [
  100. {
  101. "trace_id": i[0],
  102. "content_id": i[1],
  103. "flow_pool_level": i[2],
  104. "gh_id": i[3],
  105. "process_times": i[4],
  106. "publish_flag": i[5]
  107. }
  108. for i in tasks
  109. ]
  110. else:
  111. return []
  112. async def get_video_list(self, content_id):
  113. """
  114. 判断该文章是否存在历史匹配视频
  115. :param content_id
  116. :return:
  117. """
  118. sql = f"""
  119. SELECT id
  120. FROM {self.article_crawler_video_table}
  121. WHERE content_id = '{content_id}' and download_status = {self.const.VIDEO_DOWNLOAD_SUCCESS_STATUS};
  122. """
  123. res_tuple = await self.mysql_client.async_select(sql)
  124. if len(res_tuple) >= self.const.MIN_MATCH_VIDEO_NUM:
  125. return True
  126. else:
  127. return False
  128. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  129. """
  130. :param new_content_status:
  131. :param trace_id:
  132. :param ori_content_status:
  133. :return:
  134. """
  135. update_sql = f"""
  136. UPDATE {self.article_match_video_table}
  137. SET content_status = %s, content_status_update_time = %s
  138. WHERE trace_id = %s and content_status = %s;
  139. """
  140. row_counts = await self.mysql_client.async_insert(
  141. sql=update_sql,
  142. params=(
  143. new_content_status,
  144. int(time.time()),
  145. trace_id,
  146. ori_content_status
  147. )
  148. )
  149. return row_counts
  150. async def roll_back_content_status_when_fails(self, process_times, trace_id, ori_content_status=None):
  151. """
  152. 处理失败,回滚至初始状态,处理次数加 1
  153. :param process_times:
  154. :param trace_id:
  155. :param ori_content_status:
  156. :return:
  157. """
  158. if not ori_content_status:
  159. ori_content_status = self.const.TASK_PROCESSING_STATUS
  160. update_article_sql = f"""
  161. UPDATE {self.article_match_video_table}
  162. SET
  163. content_status = %s,
  164. content_status_update_time = %s,
  165. process_times = %s
  166. WHERE trace_id = %s and content_status = %s;
  167. """
  168. await self.mysql_client.async_insert(
  169. sql=update_article_sql,
  170. params=(
  171. self.const.TASK_INIT_STATUS,
  172. int(time.time()),
  173. process_times + 1,
  174. trace_id,
  175. ori_content_status
  176. )
  177. )
  178. async def judge_whether_same_content_id_is_processing(self, content_id):
  179. """
  180. 同一个 content_id 只需要处理一次
  181. :param content_id:
  182. :return:
  183. success: 4
  184. init: 0
  185. fail: 99
  186. todo: 存在处理失败的content_id是否需要不再处理
  187. """
  188. select_sql = f"""
  189. SELECT distinct content_status
  190. FROM {self.article_match_video_table}
  191. WHERE content_id = '{content_id}';
  192. """
  193. result = await self.mysql_client.async_select(select_sql)
  194. if result:
  195. for item in result:
  196. content_status = item[0]
  197. # if content_status not in {self.TASK_INIT_STATUS, self.TASK_PUBLISHED_STATUS} :
  198. if content_status in {
  199. self.const.TASK_KIMI_FINISHED_STATUS,
  200. self.const.TASK_SPIDER_FINISHED_STATUS,
  201. self.const.TASK_ETL_COMPLETE_STATUS,
  202. self.const.TASK_PROCESSING_STATUS,
  203. self.const.TASK_PUBLISHED_STATUS
  204. }:
  205. return True
  206. return False
  207. else:
  208. return False
  209. async def get_downloaded_videos(self, content_id):
  210. """
  211. 获取已下载的视频
  212. :return:
  213. """
  214. sql = f"""
  215. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  216. FROM {self.article_crawler_video_table}
  217. WHERE content_id = '{content_id}' and download_status = {self.const.VIDEO_DOWNLOAD_SUCCESS_STATUS};
  218. ORDER BY score DESC;
  219. """
  220. res_tuple = await self.mysql_client.async_select(sql)
  221. return [
  222. {
  223. "platform": i[0],
  224. "play_count": i[1],
  225. "like_count": i[2],
  226. "video_oss_path": i[3],
  227. "cover_oss_path": i[4],
  228. "uid": i[5]
  229. }
  230. for i in res_tuple
  231. ]
  232. async def get_kimi_status(self, content_id):
  233. """
  234. 通过 content_id 获取kimi info
  235. :return:
  236. """
  237. select_sql = f"""
  238. select kimi_status
  239. from {self.article_text_table}
  240. where content_id = '{content_id}';
  241. """
  242. response = await self.mysql_client.async_select(select_sql)
  243. if response:
  244. kimi_status = response[0][0]
  245. return kimi_status
  246. else:
  247. return self.const.ARTICLE_TEXT_TABLE_ERROR
  248. async def kimi_task(self, params):
  249. """
  250. 执行 kimi 任务
  251. :return:
  252. """
  253. content_id = params['content_id']
  254. trace_id = params['trace_id']
  255. process_times = params['process_times']
  256. kimi_status_code = await self.get_kimi_status(content_id=content_id)
  257. if kimi_status_code == self.const.KIMI_SUCCESS_STATUS:
  258. affected_rows = await self.update_content_status(
  259. new_content_status=self.const.TASK_KIMI_FINISHED_STATUS,
  260. trace_id=trace_id,
  261. ori_content_status=self.const.TASK_INIT_STATUS
  262. )
  263. if affected_rows == 0:
  264. logging(
  265. code="6000",
  266. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  267. )
  268. return
  269. get_kimi_sql = f"""
  270. SELECT article_title, kimi_title, kimi_summary, kimi_keys
  271. FROM {self.article_text_table}
  272. WHERE content_id = '{content_id}';
  273. """
  274. kimi_info = await self.mysql_client.async_select(get_kimi_sql)
  275. return {
  276. "kimi_title": kimi_info[0][1],
  277. "ori_title": kimi_info[0][0],
  278. "kimi_summary": kimi_info[0][2],
  279. "kimi_keys": json.loads(kimi_info[0][3])
  280. }
  281. elif kimi_status_code == self.const.ARTICLE_TEXT_TABLE_ERROR:
  282. logging(
  283. code="4000",
  284. info="long_articles_text表中未找到 content_id"
  285. )
  286. else:
  287. # 开始处理,讲 content_status 从 0 改为 101
  288. affected_rows = await self.update_content_status(
  289. new_content_status=self.const.TASK_PROCESSING_STATUS,
  290. trace_id=trace_id,
  291. ori_content_status=self.const.TASK_INIT_STATUS
  292. )
  293. if affected_rows == 0:
  294. logging(
  295. code="6000",
  296. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  297. )
  298. return
  299. K = KimiServer()
  300. try:
  301. select_sql = f"""
  302. select article_title, article_text
  303. from {self.article_text_table}
  304. where content_id = '{content_id}'
  305. """
  306. res = await self.mysql_client.async_select(select_sql)
  307. article_obj = {
  308. "article_title": res[0][0],
  309. "article_text": res[0][1],
  310. "content_id": content_id
  311. }
  312. kimi_info = await K.search_kimi_schedule(params=article_obj)
  313. kimi_title = kimi_info['k_title']
  314. content_title = kimi_info['content_title'].replace("'", "").replace('"', "")
  315. content_keys = json.dumps(kimi_info['content_keys'], ensure_ascii=False)
  316. update_kimi_sql = f"""
  317. UPDATE {self.article_text_table}
  318. SET
  319. kimi_title = %s,
  320. kimi_summary = %s,
  321. kimi_keys = %s,
  322. kimi_status = %s
  323. WHERE content_id = %s;"""
  324. await self.mysql_client.async_insert(
  325. sql=update_kimi_sql,
  326. params=(kimi_title, content_title, content_keys, self.const.KIMI_SUCCESS_STATUS, params['content_id'])
  327. )
  328. await self.update_content_status(
  329. new_content_status=self.const.TASK_KIMI_FINISHED_STATUS,
  330. trace_id=trace_id,
  331. ori_content_status=self.const.TASK_PROCESSING_STATUS
  332. )
  333. return {
  334. "kimi_title": kimi_title,
  335. "ori_title": article_obj['article_title'],
  336. "kimi_summary": content_title,
  337. "kimi_keys": kimi_info['content_keys']
  338. }
  339. except Exception as e:
  340. # kimi 任务处理失败
  341. update_kimi_sql = f"""
  342. UPDATE {self.article_text_table}
  343. SET
  344. kimi_status = %s
  345. WHERE content_id = %s
  346. """
  347. await self.mysql_client.async_insert(
  348. sql=update_kimi_sql,
  349. params=(
  350. self.const.KIMI_FAIL_STATUS,
  351. content_id
  352. )
  353. )
  354. # 将状态由 101 回退为 0
  355. await self.roll_back_content_status_when_fails(
  356. process_times=process_times,
  357. trace_id=trace_id
  358. )
  359. return {}
  360. async def spider_task(self, params, kimi_result):
  361. """
  362. 爬虫任务
  363. :return:
  364. """
  365. trace_id = params['trace_id']
  366. content_id = params['content_id']
  367. process_times = params['process_times']
  368. gh_id = params['gh_id']
  369. select_sql = f"""
  370. SELECT count(id)
  371. FROM {self.article_crawler_video_table}
  372. WHERE content_id = '{content_id}'
  373. AND download_status = {self.const.VIDEO_DOWNLOAD_SUCCESS_STATUS};
  374. """
  375. count_tuple = await self.mysql_client.async_select(select_sql)
  376. counts = count_tuple[0][0]
  377. if counts >= self.const.MIN_MATCH_VIDEO_NUM:
  378. await self.update_content_status(
  379. new_content_status=self.const.TASK_SPIDER_FINISHED_STATUS,
  380. trace_id=trace_id,
  381. ori_content_status=self.const.TASK_KIMI_FINISHED_STATUS
  382. )
  383. return True
  384. # 开始处理,将状态由 1 改成 101
  385. affected_rows = await self.update_content_status(
  386. new_content_status=self.const.TASK_PROCESSING_STATUS,
  387. ori_content_status=self.const.TASK_KIMI_FINISHED_STATUS,
  388. trace_id=trace_id
  389. )
  390. if affected_rows == 0:
  391. logging(
  392. code="6000",
  393. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  394. )
  395. return False
  396. try:
  397. logging(
  398. code="spider_1001",
  399. info="开始执行搜索任务",
  400. trace_id=trace_id,
  401. data=kimi_result
  402. )
  403. search_videos_count = await search_videos_from_web(
  404. info={
  405. "ori_title": kimi_result['ori_title'],
  406. "kimi_summary": kimi_result['kimi_summary'],
  407. "kimi_keys": kimi_result['kimi_keys'],
  408. "trace_id": trace_id,
  409. "gh_id": gh_id,
  410. "content_id": content_id,
  411. "crawler_video_table": self.article_crawler_video_table
  412. },
  413. gh_id_map=self.account_map,
  414. db_client=self.mysql_client
  415. )
  416. if search_videos_count >= self.const.MIN_MATCH_VIDEO_NUM:
  417. # 表示爬虫任务执行成功, 将状态从 101 改为 2
  418. logging(
  419. code="spider_1002",
  420. info="搜索成功",
  421. trace_id=trace_id,
  422. data=kimi_result
  423. )
  424. await self.update_content_status(
  425. new_content_status=self.const.TASK_SPIDER_FINISHED_STATUS,
  426. trace_id=trace_id,
  427. ori_content_status=self.const.TASK_PROCESSING_STATUS
  428. )
  429. return True
  430. else:
  431. logging(
  432. code="spider_1003",
  433. info="搜索失败",
  434. trace_id=trace_id,
  435. data=kimi_result
  436. )
  437. await self.roll_back_content_status_when_fails(
  438. process_times=process_times + 1,
  439. trace_id=trace_id
  440. )
  441. return False
  442. except Exception as e:
  443. await self.roll_back_content_status_when_fails(
  444. process_times=process_times + 1,
  445. trace_id=trace_id
  446. )
  447. print("爬虫处理失败: {}".format(e))
  448. return False
  449. async def etl_task(self, params):
  450. """
  451. download && upload videos
  452. :param params:
  453. :return:
  454. """
  455. trace_id = params['trace_id']
  456. content_id = params['content_id']
  457. process_times = params['process_times']
  458. # 判断是否有三条已经下载完成的视频
  459. select_sql = f"""
  460. select count(id)
  461. from {self.article_crawler_video_table}
  462. where content_id = '{content_id}' and download_status = {self.const.VIDEO_DOWNLOAD_SUCCESS_STATUS};
  463. """
  464. video_count_tuple = await self.mysql_client.async_select(select_sql)
  465. video_count = video_count_tuple[0][0]
  466. if video_count >= self.const.MIN_MATCH_VIDEO_NUM:
  467. affect_rows = await self.update_content_status(
  468. ori_content_status=self.const.TASK_SPIDER_FINISHED_STATUS,
  469. trace_id=trace_id,
  470. new_content_status=self.const.TASK_ETL_COMPLETE_STATUS
  471. )
  472. if affect_rows == 0:
  473. logging(
  474. code="6000",
  475. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  476. )
  477. return False
  478. return True
  479. else:
  480. # 开始处理, 将文章状态修改为处理状态
  481. affected_rows = await self.update_content_status(
  482. ori_content_status=self.const.TASK_SPIDER_FINISHED_STATUS,
  483. trace_id=trace_id,
  484. new_content_status=self.const.TASK_PROCESSING_STATUS
  485. )
  486. if affected_rows == 0:
  487. logging(
  488. code="6000",
  489. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  490. )
  491. return False
  492. select_sql = f"""
  493. SELECT id, out_video_id, platform, video_title, video_url, cover_url, user_id, trace_id
  494. FROM {self.article_crawler_video_table}
  495. WHERE content_id = '{content_id}' and download_status != {self.const.VIDEO_DOWNLOAD_SUCCESS_STATUS}
  496. ORDER BY score DESC;
  497. """
  498. videos_need_to_download_tuple = await self.mysql_client.async_select(select_sql)
  499. downloaded_count = 0
  500. for line in videos_need_to_download_tuple:
  501. params = {
  502. "id": line[0],
  503. "video_id": line[1],
  504. "platform": line[2],
  505. "video_title": line[3],
  506. "video_url": line[4],
  507. "cover_url": line[5],
  508. "user_id": line[6],
  509. "trace_id": line[7]
  510. }
  511. try:
  512. local_video_path, local_cover_path = generate_video_path(params['platform'], params['video_id'])
  513. # download videos
  514. file_path = await download_video(
  515. file_path=local_video_path,
  516. platform=params['platform'],
  517. video_url=params['video_url']
  518. )
  519. if not file_path:
  520. # 说明视频下载失败,无需上传该视频, 将该条记录设置为失败状态
  521. update_sql = f"""
  522. UPDATE {self.article_crawler_video_table}
  523. SET download_status = %s
  524. WHERE id = %s;
  525. """
  526. await self.mysql_client.async_insert(
  527. sql=update_sql,
  528. params=(self.const.VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  529. )
  530. logging(
  531. code="etl_1001",
  532. info="etl_下载视频失败",
  533. trace_id=trace_id,
  534. function="etl_task"
  535. )
  536. else:
  537. # download cover
  538. cover_path = await download_cover(
  539. file_path=local_cover_path,
  540. platform=params['platform'],
  541. cover_url=params['cover_url']
  542. )
  543. # upload video to oss
  544. oss_video = await upload_to_oss(
  545. local_video_path=file_path,
  546. download_type="video"
  547. )
  548. # upload cover to oss
  549. if cover_path:
  550. oss_cover = await upload_to_oss(
  551. local_video_path=cover_path,
  552. download_type="image"
  553. )
  554. else:
  555. oss_cover = None
  556. # change status to success
  557. update_sql = f"""
  558. UPDATE {self.article_crawler_video_table}
  559. SET video_oss_path = %s, cover_oss_path = %s, download_status = %s
  560. WHERE id = %s;
  561. """
  562. await self.mysql_client.async_insert(
  563. sql=update_sql,
  564. params=(
  565. oss_video,
  566. oss_cover,
  567. self.const.VIDEO_DOWNLOAD_SUCCESS_STATUS,
  568. params['id']
  569. )
  570. )
  571. downloaded_count += 1
  572. logging(
  573. code="etl_1002",
  574. info="etl_视频下载成功",
  575. trace_id=trace_id,
  576. function="etl_task"
  577. )
  578. # 如果下载的视频数已经大于3, 则直接退出循环,修改状态为ETL成功状态
  579. if downloaded_count > self.const.MIN_MATCH_VIDEO_NUM:
  580. await self.update_content_status(
  581. ori_content_status=self.const.TASK_PROCESSING_STATUS,
  582. trace_id=trace_id,
  583. new_content_status=self.const.TASK_ETL_COMPLETE_STATUS
  584. )
  585. return True
  586. except Exception as e:
  587. update_sql = f"""
  588. UPDATE {self.article_crawler_video_table}
  589. SET download_status = %s
  590. WHERE id = %s;
  591. """
  592. await self.mysql_client.async_insert(
  593. sql=update_sql,
  594. params=(self.const.VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  595. )
  596. logging(
  597. code="etl_1001",
  598. info="etl_下载视频失败",
  599. trace_id=trace_id,
  600. function="etl_task"
  601. )
  602. if downloaded_count >= 3:
  603. await self.update_content_status(
  604. ori_content_status=self.const.TASK_PROCESSING_STATUS,
  605. trace_id=trace_id,
  606. new_content_status=self.const.TASK_ETL_COMPLETE_STATUS
  607. )
  608. return True
  609. else:
  610. await self.roll_back_content_status_when_fails(
  611. process_times=process_times + 1,
  612. trace_id=trace_id
  613. )
  614. return False
  615. async def publish_task(self, params, kimi_title):
  616. """
  617. 发布任务
  618. :param kimi_title:
  619. :param params:
  620. :return:
  621. """
  622. gh_id = params['gh_id']
  623. flow_pool_level = params['flow_pool_level']
  624. content_id = params['content_id']
  625. trace_id = params['trace_id']
  626. process_times = params['process_times']
  627. # 开始处理,将状态修改为操作状态
  628. affected_rows = await self.update_content_status(
  629. ori_content_status=self.const.TASK_ETL_COMPLETE_STATUS,
  630. trace_id=trace_id,
  631. new_content_status=self.const.TASK_PROCESSING_STATUS
  632. )
  633. if affected_rows == 0:
  634. logging(
  635. code="6000",
  636. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  637. )
  638. return False
  639. try:
  640. download_videos = await self.get_downloaded_videos(content_id)
  641. match flow_pool_level:
  642. case "autoArticlePoolLevel4":
  643. # 冷启层, 全量做
  644. video_list = shuffle_list(download_videos)[:3]
  645. case "autoArticlePoolLevel3":
  646. if self.gh_id_dict.get(gh_id):
  647. video_list = shuffle_list(download_videos)[:3]
  648. else:
  649. video_list = download_videos[:3]
  650. case "autoArticlePoolLevel2":
  651. # 次条,只针对具体账号做
  652. video_list = []
  653. case "autoArticlePoolLevel1":
  654. # 头条,先不做
  655. video_list = download_videos[:3]
  656. case _:
  657. video_list = download_videos[:3]
  658. L = []
  659. for video_obj in video_list:
  660. params = {
  661. "videoPath": video_obj['video_oss_path'],
  662. "uid": video_obj['uid'],
  663. "title": kimi_title
  664. }
  665. publish_response = await publish_to_pq(params)
  666. video_id = publish_response['data']['id']
  667. response = await get_pq_video_detail(video_id)
  668. obj = {
  669. "uid": video_obj['uid'],
  670. "source": video_obj['platform'],
  671. "kimiTitle": kimi_title,
  672. "videoId": response['data'][0]['id'],
  673. "videoCover": response['data'][0]['shareImgPath'],
  674. "videoPath": response['data'][0]['videoPath'],
  675. "videoOss": video_obj['video_oss_path']
  676. }
  677. L.append(obj)
  678. update_sql = f"""
  679. UPDATE {self.article_match_video_table}
  680. SET content_status = %s, response = %s, process_times = %s
  681. WHERE trace_id = %s and content_status = %s;
  682. """
  683. # 从操作中状态修改为已发布状态
  684. await self.mysql_client.async_insert(
  685. sql=update_sql,
  686. params=(
  687. self.const.TASK_PUBLISHED_STATUS,
  688. json.dumps(L, ensure_ascii=False),
  689. process_times + 1,
  690. trace_id,
  691. self.const.TASK_PROCESSING_STATUS
  692. )
  693. )
  694. except Exception as e:
  695. await self.roll_back_content_status_when_fails(
  696. process_times=params['process_times'] + 1,
  697. trace_id=params['trace_id']
  698. )
  699. print(e)
  700. async def start_process(self, params):
  701. """
  702. 处理单篇文章
  703. :param params:
  704. :return:
  705. """
  706. # step1: 执行 kimi 操作
  707. # time.sleep(5) # 测试多个进程操作同一个 task 的等待时间
  708. kimi_result = await self.kimi_task(params)
  709. trace_id = params['trace_id']
  710. process_times = params['process_times']
  711. content_id = params['content_id']
  712. gh_id = params['gh_id']
  713. publish_flag = params['publish_flag']
  714. print(kimi_result)
  715. if kimi_result:
  716. # 等待 kimi 操作执行完成之后,开始执行 spider_task
  717. print("kimi success")
  718. logging(
  719. code=3001,
  720. info="kimi success",
  721. trace_id=trace_id
  722. )
  723. spider_flag = await self.spider_task(params=params, kimi_result=kimi_result)
  724. if spider_flag:
  725. # 等待爬虫执行完成后,开始执行 etl_task
  726. print("spider success")
  727. logging(
  728. code=3002,
  729. info="spider_success",
  730. trace_id=trace_id
  731. )
  732. etl_flag = await self.etl_task(params)
  733. if etl_flag:
  734. # 等待下载上传完成,执行发布任务
  735. print("etl success")
  736. logging(
  737. code="3003",
  738. info="etl_success",
  739. trace_id=trace_id
  740. )
  741. """
  742. todo 若新建计划,计划为设置托管,但接入账号又在配置账号中,仍会走托管逻辑,需考虑历史存量的处理
  743. 目前先对这两种情况都做托管操作
  744. """
  745. if publish_flag == self.const.DO_NOT_NEED_PUBLISH:
  746. logging(
  747. code="3013",
  748. info="不需要发布,长文系统托管发布",
  749. trace_id=trace_id
  750. )
  751. return
  752. else:
  753. try:
  754. await self.publish_task(params, kimi_result['kimi_title'])
  755. logging(
  756. code="3004",
  757. info="publish_success",
  758. trace_id=trace_id
  759. )
  760. await record_trace_id(
  761. trace_id=trace_id,
  762. status=self.const.RECORD_SUCCESS_TRACE_ID_CODE
  763. )
  764. except Exception as e:
  765. logging(
  766. code="6004",
  767. info="publish 失败--{}".format(e),
  768. trace_id=params['trace_id']
  769. )
  770. else:
  771. logging(
  772. code="6003",
  773. info="ETL 处理失败",
  774. trace_id=params['trace_id']
  775. )
  776. else:
  777. logging(
  778. code="6002",
  779. info="爬虫处理失败",
  780. trace_id=params['trace_id']
  781. )
  782. else:
  783. logging(
  784. code="6001",
  785. info="kimi 处理失败",
  786. trace_id=trace_id
  787. )
  788. if process_times >= self.const.TASK_MAX_PROCESS_TIMES:
  789. logging(
  790. code="6011",
  791. info="kimi处理次数达到上限, 放弃处理",
  792. trace_id=trace_id
  793. )
  794. # 将相同的content_id && content_status = 0的状态修改为kimi 失败状态
  795. update_sql = f"""
  796. UPDATE {self.article_match_video_table}
  797. SET content_status = %s
  798. WHERE content_id = %s and content_status = %s;
  799. """
  800. affected_rows = await self.mysql_client.async_insert(
  801. sql=update_sql,
  802. params=(
  803. self.const.KIMI_ILLEGAL_STATUS,
  804. content_id,
  805. self.const.TASK_INIT_STATUS
  806. )
  807. )
  808. bot(
  809. title="KIMI 处理失败",
  810. detail={
  811. "content_id": content_id,
  812. "affected_rows": affected_rows
  813. }
  814. )
  815. async def process_task(self, params):
  816. """
  817. 处理任务
  818. :return:
  819. """
  820. content_id = params['content_id']
  821. download_videos = await self.get_video_list(content_id)
  822. if not download_videos:
  823. # 开始处理, 判断是否有相同的文章 id 正在处理
  824. processing_flag = await self.judge_whether_same_content_id_is_processing(content_id)
  825. if processing_flag:
  826. logging(
  827. code="9001",
  828. info="该 content id 正在处理中, 跳过此任务--{}".format(content_id)
  829. )
  830. else:
  831. await self.start_process(params=params)
  832. else:
  833. print("存在已下载视频")
  834. async def deal(self):
  835. """
  836. function
  837. :return:
  838. """
  839. task_list = await self.get_tasks()
  840. task_dict = {}
  841. # 对 content_id去重
  842. for task in task_list:
  843. key = task['content_id']
  844. task_dict[key] = task
  845. process_list = []
  846. for item in task_dict:
  847. process_list.append(task_dict[item])
  848. logging(
  849. code="5001",
  850. info="Match Task Got {} this time".format(len(process_list)),
  851. function="Publish Task"
  852. )
  853. if task_list:
  854. total_task = len(process_list)
  855. print(process_list)
  856. a = time.time()
  857. print("开始处理,一共{}个任务".format(total_task))
  858. tasks = [self.process_task(params) for params in process_list]
  859. await asyncio.gather(*tasks)
  860. b = time.time()
  861. print("处理时间: {} s".format(b - a))
  862. else:
  863. logging(
  864. code="9008",
  865. info="没有要处理的请求"
  866. )