new_contentId_task.py 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. from applications.config import Config
  7. from applications.config.const import new_content_id_task as NewContentIdTaskConst
  8. from applications.log import logging
  9. from applications.functions.pqFunctions import publish_to_pq, get_pq_video_detail
  10. from applications.functions.common import shuffle_list
  11. from applications.functions.kimi import KimiServer
  12. from applications.spider import search_videos_from_web
  13. from applications.etl_function import *
  14. from applications.feishu import bot
  15. from applications.functions.aigc import record_trace_id
  16. class NewContentIdTask(object):
  17. """
  18. 不存在历史已经发布的文章的匹配流程
  19. """
  20. def __init__(self, mysql_client):
  21. self.mysql_client = mysql_client
  22. self.config = Config()
  23. self.article_match_video_table = self.config.article_match_video_table
  24. self.article_text_table = self.config.article_text_table
  25. self.article_crawler_video_table = self.config.article_crawler_video_table
  26. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  27. self.account_map = json.loads(self.config.get_config_value("accountMap"))
  28. self.spider_coroutines = self.config.get_config_value("spiderCoroutines")
  29. async def get_tasks(self):
  30. """
  31. 获取 task
  32. :return:
  33. """
  34. # 处理未托管的任务
  35. await self.roll_back_unfinished_tasks(publish_flag=NewContentIdTaskConst.NEED_PUBLISH)
  36. # 处理托管任务
  37. await self.roll_back_unfinished_tasks(publish_flag=NewContentIdTaskConst.DO_NOT_NEED_PUBLISH)
  38. # 将 process_times > 3 且状态不为 4 的任务的状态修改为失败, 判断条件需要加上索引
  39. update_status_sql = f"""
  40. UPDATE
  41. {self.article_match_video_table}
  42. SET
  43. content_status = %s
  44. WHERE
  45. process_times > %s and content_status not in (%s, %s);
  46. """
  47. await self.mysql_client.async_insert(
  48. update_status_sql,
  49. params=(
  50. NewContentIdTaskConst.TASK_FAIL_STATUS,
  51. NewContentIdTaskConst.TASK_MAX_PROCESS_TIMES,
  52. NewContentIdTaskConst.TASK_ETL_COMPLETE_STATUS,
  53. NewContentIdTaskConst.TASK_PUBLISHED_STATUS
  54. )
  55. )
  56. # 获取 process_times <= 3 且 content_status = 0 的任务
  57. select_sql = f"""
  58. SELECT
  59. trace_id, content_id, flow_pool_level, gh_id, process_times, publish_flag
  60. FROM
  61. {self.article_match_video_table}
  62. WHERE
  63. content_status = {NewContentIdTaskConst.TASK_INIT_STATUS}
  64. and process_times <= {NewContentIdTaskConst.TASK_MAX_PROCESS_TIMES}
  65. ORDER BY flow_pool_level, request_timestamp
  66. LIMIT {self.spider_coroutines};
  67. """
  68. tasks = await self.mysql_client.async_select(select_sql)
  69. if tasks:
  70. return [
  71. {
  72. "trace_id": i[0],
  73. "content_id": i[1],
  74. "flow_pool_level": i[2],
  75. "gh_id": i[3],
  76. "process_times": i[4],
  77. "publish_flag": i[5]
  78. }
  79. for i in tasks
  80. ]
  81. else:
  82. return []
  83. async def roll_back_unfinished_tasks(self, publish_flag):
  84. """
  85. 将长时间处于中间状态的任务回滚
  86. """
  87. # 获取 content_status 为 处理中 的任务,判断时间, 如果超过 1h 则,则将它改为 0, process_times + 1
  88. if publish_flag == NewContentIdTaskConst.NEED_PUBLISH:
  89. processing_status_tuple = (
  90. NewContentIdTaskConst.TASK_PROCESSING_STATUS,
  91. NewContentIdTaskConst.TASK_KIMI_FINISHED_STATUS,
  92. NewContentIdTaskConst.TASK_SPIDER_FINISHED_STATUS,
  93. NewContentIdTaskConst.TASK_ETL_COMPLETE_STATUS
  94. )
  95. elif publish_flag == NewContentIdTaskConst.DO_NOT_NEED_PUBLISH:
  96. processing_status_tuple = (
  97. NewContentIdTaskConst.TASK_PROCESSING_STATUS,
  98. NewContentIdTaskConst.TASK_KIMI_FINISHED_STATUS,
  99. NewContentIdTaskConst.TASK_SPIDER_FINISHED_STATUS
  100. )
  101. else:
  102. return
  103. select_processing_sql = f"""
  104. SELECT
  105. trace_id, content_status_update_time, process_times, content_status
  106. FROM
  107. {self.article_match_video_table}
  108. WHERE
  109. content_status in {processing_status_tuple}
  110. and process_times <= {NewContentIdTaskConst.TASK_MAX_PROCESS_TIMES}
  111. and publish_flag = {publish_flag};
  112. """
  113. processing_articles = await self.mysql_client.async_select(select_processing_sql)
  114. if processing_articles:
  115. processing_list = [
  116. {
  117. "trace_id": item[0],
  118. "content_status_update_time": item[1],
  119. "process_times": item[2],
  120. "content_status": item[3]
  121. }
  122. for item in processing_articles
  123. ]
  124. for obj in processing_list:
  125. if int(time.time()) - obj['content_status_update_time'] >= NewContentIdTaskConst.TASK_PROCESSING_TIMEOUT:
  126. # 认为该任务失败
  127. await self.roll_back_content_status_when_fails(
  128. process_times=obj['process_times'] + 1,
  129. trace_id=obj['trace_id'],
  130. ori_content_status=obj['content_status']
  131. )
  132. async def get_video_list(self, content_id):
  133. """
  134. 判断该文章是否存在历史匹配视频
  135. :param content_id
  136. :return:
  137. """
  138. sql = f"""
  139. SELECT id
  140. FROM {self.article_crawler_video_table}
  141. WHERE content_id = '{content_id}' and download_status = {NewContentIdTaskConst.VIDEO_DOWNLOAD_SUCCESS_STATUS};
  142. """
  143. res_tuple = await self.mysql_client.async_select(sql)
  144. if len(res_tuple) >= NewContentIdTaskConst.MIN_MATCH_VIDEO_NUM:
  145. return True
  146. else:
  147. return False
  148. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  149. """
  150. :param new_content_status:
  151. :param trace_id:
  152. :param ori_content_status:
  153. :return:
  154. """
  155. update_sql = f"""
  156. UPDATE {self.article_match_video_table}
  157. SET content_status = %s, content_status_update_time = %s
  158. WHERE trace_id = %s and content_status = %s;
  159. """
  160. row_counts = await self.mysql_client.async_insert(
  161. sql=update_sql,
  162. params=(
  163. new_content_status,
  164. int(time.time()),
  165. trace_id,
  166. ori_content_status
  167. )
  168. )
  169. return row_counts
  170. async def roll_back_content_status_when_fails(self, process_times, trace_id, ori_content_status=NewContentIdTaskConst.TASK_PROCESSING_STATUS):
  171. """
  172. 处理失败,回滚至初始状态,处理次数加 1
  173. :param process_times:
  174. :param trace_id:
  175. :param ori_content_status:
  176. :return:
  177. """
  178. update_article_sql = f"""
  179. UPDATE {self.article_match_video_table}
  180. SET
  181. content_status = %s,
  182. content_status_update_time = %s,
  183. process_times = %s
  184. WHERE trace_id = %s and content_status = %s;
  185. """
  186. await self.mysql_client.async_insert(
  187. sql=update_article_sql,
  188. params=(
  189. NewContentIdTaskConst.TASK_INIT_STATUS,
  190. int(time.time()),
  191. process_times + 1,
  192. trace_id,
  193. ori_content_status
  194. )
  195. )
  196. async def judge_whether_same_content_id_is_processing(self, content_id):
  197. """
  198. 同一个 content_id 只需要处理一次
  199. :param content_id:
  200. :return:
  201. success: 4
  202. init: 0
  203. fail: 99
  204. todo: 存在处理失败的content_id是否需要不再处理
  205. """
  206. select_sql = f"""
  207. SELECT distinct content_status
  208. FROM {self.article_match_video_table}
  209. WHERE content_id = '{content_id}';
  210. """
  211. result = await self.mysql_client.async_select(select_sql)
  212. if result:
  213. for item in result:
  214. content_status = item[0]
  215. # if content_status not in {self.TASK_INIT_STATUS, self.TASK_PUBLISHED_STATUS} :
  216. if content_status in {
  217. NewContentIdTaskConst.TASK_KIMI_FINISHED_STATUS,
  218. NewContentIdTaskConst.TASK_SPIDER_FINISHED_STATUS,
  219. NewContentIdTaskConst.TASK_ETL_COMPLETE_STATUS,
  220. NewContentIdTaskConst.TASK_PROCESSING_STATUS,
  221. NewContentIdTaskConst.TASK_PUBLISHED_STATUS
  222. }:
  223. return True
  224. return False
  225. else:
  226. return False
  227. async def get_downloaded_videos(self, content_id):
  228. """
  229. 获取已下载的视频
  230. :return:
  231. """
  232. sql = f"""
  233. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  234. FROM {self.article_crawler_video_table}
  235. WHERE content_id = '{content_id}' and download_status = {NewContentIdTaskConst.VIDEO_DOWNLOAD_SUCCESS_STATUS};
  236. ORDER BY score DESC;
  237. """
  238. res_tuple = await self.mysql_client.async_select(sql)
  239. return [
  240. {
  241. "platform": i[0],
  242. "play_count": i[1],
  243. "like_count": i[2],
  244. "video_oss_path": i[3],
  245. "cover_oss_path": i[4],
  246. "uid": i[5]
  247. }
  248. for i in res_tuple
  249. ]
  250. async def get_kimi_status(self, content_id):
  251. """
  252. 通过 content_id 获取kimi info
  253. :return:
  254. """
  255. select_sql = f"""
  256. select kimi_status
  257. from {self.article_text_table}
  258. where content_id = '{content_id}';
  259. """
  260. response = await self.mysql_client.async_select(select_sql)
  261. if response:
  262. kimi_status = response[0][0]
  263. return kimi_status
  264. else:
  265. return NewContentIdTaskConst.ARTICLE_TEXT_TABLE_ERROR
  266. async def kimi_task(self, params):
  267. """
  268. 执行 kimi 任务
  269. :return:
  270. """
  271. content_id = params['content_id']
  272. trace_id = params['trace_id']
  273. process_times = params['process_times']
  274. kimi_status_code = await self.get_kimi_status(content_id=content_id)
  275. if kimi_status_code == NewContentIdTaskConst.KIMI_SUCCESS_STATUS:
  276. affected_rows = await self.update_content_status(
  277. new_content_status=NewContentIdTaskConst.TASK_KIMI_FINISHED_STATUS,
  278. trace_id=trace_id,
  279. ori_content_status=NewContentIdTaskConst.TASK_INIT_STATUS
  280. )
  281. if affected_rows == 0:
  282. logging(
  283. code="6000",
  284. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  285. )
  286. return
  287. get_kimi_sql = f"""
  288. SELECT article_title, kimi_title, kimi_summary, kimi_keys
  289. FROM {self.article_text_table}
  290. WHERE content_id = '{content_id}';
  291. """
  292. kimi_info = await self.mysql_client.async_select(get_kimi_sql)
  293. return {
  294. "kimi_title": kimi_info[0][1],
  295. "ori_title": kimi_info[0][0],
  296. "kimi_summary": kimi_info[0][2],
  297. "kimi_keys": json.loads(kimi_info[0][3])
  298. }
  299. elif kimi_status_code == NewContentIdTaskConst.ARTICLE_TEXT_TABLE_ERROR:
  300. logging(
  301. code="4000",
  302. info="long_articles_text表中未找到 content_id"
  303. )
  304. else:
  305. # 开始处理,讲 content_status 从 0 改为 101
  306. affected_rows = await self.update_content_status(
  307. new_content_status=NewContentIdTaskConst.TASK_PROCESSING_STATUS,
  308. trace_id=trace_id,
  309. ori_content_status=NewContentIdTaskConst.TASK_INIT_STATUS
  310. )
  311. if affected_rows == 0:
  312. logging(
  313. code="6000",
  314. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  315. )
  316. return
  317. K = KimiServer()
  318. try:
  319. select_sql = f"""
  320. select article_title, article_text
  321. from {self.article_text_table}
  322. where content_id = '{content_id}'
  323. """
  324. res = await self.mysql_client.async_select(select_sql)
  325. article_obj = {
  326. "article_title": res[0][0],
  327. "article_text": res[0][1],
  328. "content_id": content_id
  329. }
  330. kimi_info = await K.search_kimi_schedule(params=article_obj)
  331. kimi_title = kimi_info['k_title']
  332. content_title = kimi_info['content_title'].replace("'", "").replace('"', "")
  333. content_keys = json.dumps(kimi_info['content_keys'], ensure_ascii=False)
  334. update_kimi_sql = f"""
  335. UPDATE {self.article_text_table}
  336. SET
  337. kimi_title = %s,
  338. kimi_summary = %s,
  339. kimi_keys = %s,
  340. kimi_status = %s
  341. WHERE content_id = %s;"""
  342. await self.mysql_client.async_insert(
  343. sql=update_kimi_sql,
  344. params=(
  345. kimi_title, content_title, content_keys, NewContentIdTaskConst.KIMI_SUCCESS_STATUS, params['content_id'])
  346. )
  347. await self.update_content_status(
  348. new_content_status=NewContentIdTaskConst.TASK_KIMI_FINISHED_STATUS,
  349. trace_id=trace_id,
  350. ori_content_status=NewContentIdTaskConst.TASK_PROCESSING_STATUS
  351. )
  352. return {
  353. "kimi_title": kimi_title,
  354. "ori_title": article_obj['article_title'],
  355. "kimi_summary": content_title,
  356. "kimi_keys": kimi_info['content_keys']
  357. }
  358. except Exception as e:
  359. # kimi 任务处理失败
  360. update_kimi_sql = f"""
  361. UPDATE {self.article_text_table}
  362. SET
  363. kimi_status = %s
  364. WHERE content_id = %s
  365. """
  366. await self.mysql_client.async_insert(
  367. sql=update_kimi_sql,
  368. params=(
  369. NewContentIdTaskConst.KIMI_FAIL_STATUS,
  370. content_id
  371. )
  372. )
  373. # 将状态由 101 回退为 0
  374. await self.roll_back_content_status_when_fails(
  375. process_times=process_times,
  376. trace_id=trace_id
  377. )
  378. return {}
  379. async def spider_task(self, params, kimi_result):
  380. """
  381. 爬虫任务
  382. :return:
  383. """
  384. trace_id = params['trace_id']
  385. content_id = params['content_id']
  386. process_times = params['process_times']
  387. gh_id = params['gh_id']
  388. select_sql = f"""
  389. SELECT count(id)
  390. FROM {self.article_crawler_video_table}
  391. WHERE content_id = '{content_id}'
  392. AND download_status = {NewContentIdTaskConst.VIDEO_DOWNLOAD_SUCCESS_STATUS};
  393. """
  394. count_tuple = await self.mysql_client.async_select(select_sql)
  395. counts = count_tuple[0][0]
  396. if counts >= NewContentIdTaskConst.MIN_MATCH_VIDEO_NUM:
  397. await self.update_content_status(
  398. new_content_status=NewContentIdTaskConst.TASK_SPIDER_FINISHED_STATUS,
  399. trace_id=trace_id,
  400. ori_content_status=NewContentIdTaskConst.TASK_KIMI_FINISHED_STATUS
  401. )
  402. return True
  403. # 开始处理,将状态由 1 改成 101
  404. affected_rows = await self.update_content_status(
  405. new_content_status=NewContentIdTaskConst.TASK_PROCESSING_STATUS,
  406. ori_content_status=NewContentIdTaskConst.TASK_KIMI_FINISHED_STATUS,
  407. trace_id=trace_id
  408. )
  409. if affected_rows == 0:
  410. logging(
  411. code="6000",
  412. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  413. )
  414. return False
  415. try:
  416. logging(
  417. code="spider_1001",
  418. info="开始执行搜索任务",
  419. trace_id=trace_id,
  420. data=kimi_result
  421. )
  422. search_videos_count = await search_videos_from_web(
  423. info={
  424. "ori_title": kimi_result['ori_title'],
  425. "kimi_summary": kimi_result['kimi_summary'],
  426. "kimi_keys": kimi_result['kimi_keys'],
  427. "trace_id": trace_id,
  428. "gh_id": gh_id,
  429. "content_id": content_id,
  430. "crawler_video_table": self.article_crawler_video_table
  431. },
  432. gh_id_map=self.account_map,
  433. db_client=self.mysql_client
  434. )
  435. if search_videos_count >= NewContentIdTaskConst.MIN_MATCH_VIDEO_NUM:
  436. # 表示爬虫任务执行成功, 将状态从 101 改为 2
  437. logging(
  438. code="spider_1002",
  439. info="搜索成功",
  440. trace_id=trace_id,
  441. data=kimi_result
  442. )
  443. await self.update_content_status(
  444. new_content_status=NewContentIdTaskConst.TASK_SPIDER_FINISHED_STATUS,
  445. trace_id=trace_id,
  446. ori_content_status=NewContentIdTaskConst.TASK_PROCESSING_STATUS
  447. )
  448. return True
  449. else:
  450. logging(
  451. code="spider_1003",
  452. info="搜索失败",
  453. trace_id=trace_id,
  454. data=kimi_result
  455. )
  456. await self.roll_back_content_status_when_fails(
  457. process_times=process_times + 1,
  458. trace_id=trace_id
  459. )
  460. return False
  461. except Exception as e:
  462. await self.roll_back_content_status_when_fails(
  463. process_times=process_times + 1,
  464. trace_id=trace_id
  465. )
  466. print("爬虫处理失败: {}".format(e))
  467. return False
  468. async def etl_task(self, params):
  469. """
  470. download && upload videos
  471. :param params:
  472. :return:
  473. """
  474. trace_id = params['trace_id']
  475. content_id = params['content_id']
  476. process_times = params['process_times']
  477. # 判断是否有三条已经下载完成的视频
  478. select_sql = f"""
  479. select count(id)
  480. from {self.article_crawler_video_table}
  481. where content_id = '{content_id}' and download_status = {NewContentIdTaskConst.VIDEO_DOWNLOAD_SUCCESS_STATUS};
  482. """
  483. video_count_tuple = await self.mysql_client.async_select(select_sql)
  484. video_count = video_count_tuple[0][0]
  485. if video_count >= NewContentIdTaskConst.MIN_MATCH_VIDEO_NUM:
  486. affect_rows = await self.update_content_status(
  487. ori_content_status=NewContentIdTaskConst.TASK_SPIDER_FINISHED_STATUS,
  488. trace_id=trace_id,
  489. new_content_status=NewContentIdTaskConst.TASK_ETL_COMPLETE_STATUS
  490. )
  491. if affect_rows == 0:
  492. logging(
  493. code="6000",
  494. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  495. )
  496. return False
  497. return True
  498. else:
  499. # 开始处理, 将文章状态修改为处理状态
  500. affected_rows = await self.update_content_status(
  501. ori_content_status=NewContentIdTaskConst.TASK_SPIDER_FINISHED_STATUS,
  502. trace_id=trace_id,
  503. new_content_status=NewContentIdTaskConst.TASK_PROCESSING_STATUS
  504. )
  505. if affected_rows == 0:
  506. logging(
  507. code="6000",
  508. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  509. )
  510. return False
  511. select_sql = f"""
  512. SELECT id, out_video_id, platform, video_title, video_url, cover_url, user_id, trace_id
  513. FROM {self.article_crawler_video_table}
  514. WHERE content_id = '{content_id}' and download_status != {NewContentIdTaskConst.VIDEO_DOWNLOAD_SUCCESS_STATUS}
  515. ORDER BY score DESC;
  516. """
  517. videos_need_to_download_tuple = await self.mysql_client.async_select(select_sql)
  518. downloaded_count = 0
  519. for line in videos_need_to_download_tuple:
  520. params = {
  521. "id": line[0],
  522. "video_id": line[1],
  523. "platform": line[2],
  524. "video_title": line[3],
  525. "video_url": line[4],
  526. "cover_url": line[5],
  527. "user_id": line[6],
  528. "trace_id": line[7]
  529. }
  530. try:
  531. local_video_path, local_cover_path = generate_video_path(params['platform'], params['video_id'])
  532. # download videos
  533. file_path = await download_video(
  534. file_path=local_video_path,
  535. platform=params['platform'],
  536. video_url=params['video_url']
  537. )
  538. if not file_path:
  539. # 说明视频下载失败,无需上传该视频, 将该条记录设置为失败状态
  540. update_sql = f"""
  541. UPDATE {self.article_crawler_video_table}
  542. SET download_status = %s
  543. WHERE id = %s;
  544. """
  545. await self.mysql_client.async_insert(
  546. sql=update_sql,
  547. params=(NewContentIdTaskConst.VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  548. )
  549. logging(
  550. code="etl_1001",
  551. info="etl_下载视频失败",
  552. trace_id=trace_id,
  553. function="etl_task"
  554. )
  555. else:
  556. # download cover
  557. cover_path = await download_cover(
  558. file_path=local_cover_path,
  559. platform=params['platform'],
  560. cover_url=params['cover_url']
  561. )
  562. # upload video to oss
  563. oss_video = await upload_to_oss(
  564. local_video_path=file_path,
  565. download_type="video"
  566. )
  567. # upload cover to oss
  568. if cover_path:
  569. oss_cover = await upload_to_oss(
  570. local_video_path=cover_path,
  571. download_type="image"
  572. )
  573. else:
  574. oss_cover = None
  575. # change status to success
  576. update_sql = f"""
  577. UPDATE {self.article_crawler_video_table}
  578. SET video_oss_path = %s, cover_oss_path = %s, download_status = %s
  579. WHERE id = %s;
  580. """
  581. await self.mysql_client.async_insert(
  582. sql=update_sql,
  583. params=(
  584. oss_video,
  585. oss_cover,
  586. NewContentIdTaskConst.VIDEO_DOWNLOAD_SUCCESS_STATUS,
  587. params['id']
  588. )
  589. )
  590. downloaded_count += 1
  591. logging(
  592. code="etl_1002",
  593. info="etl_视频下载成功",
  594. trace_id=trace_id,
  595. function="etl_task"
  596. )
  597. # 如果下载的视频数已经大于3, 则直接退出循环,修改状态为ETL成功状态
  598. if downloaded_count > NewContentIdTaskConst.MIN_MATCH_VIDEO_NUM:
  599. await self.update_content_status(
  600. ori_content_status=NewContentIdTaskConst.TASK_PROCESSING_STATUS,
  601. trace_id=trace_id,
  602. new_content_status=NewContentIdTaskConst.TASK_ETL_COMPLETE_STATUS
  603. )
  604. return True
  605. except Exception as e:
  606. update_sql = f"""
  607. UPDATE {self.article_crawler_video_table}
  608. SET download_status = %s
  609. WHERE id = %s;
  610. """
  611. await self.mysql_client.async_insert(
  612. sql=update_sql,
  613. params=(NewContentIdTaskConst.VIDEO_DOWNLOAD_FAIL_STATUS, params['id'])
  614. )
  615. logging(
  616. code="etl_1001",
  617. info="etl_下载视频失败",
  618. trace_id=trace_id,
  619. function="etl_task"
  620. )
  621. if downloaded_count >= 3:
  622. await self.update_content_status(
  623. ori_content_status=NewContentIdTaskConst.TASK_PROCESSING_STATUS,
  624. trace_id=trace_id,
  625. new_content_status=NewContentIdTaskConst.TASK_ETL_COMPLETE_STATUS
  626. )
  627. return True
  628. else:
  629. await self.roll_back_content_status_when_fails(
  630. process_times=process_times + 1,
  631. trace_id=trace_id
  632. )
  633. return False
  634. async def publish_task(self, params, kimi_title):
  635. """
  636. 发布任务
  637. :param kimi_title:
  638. :param params:
  639. :return:
  640. """
  641. gh_id = params['gh_id']
  642. flow_pool_level = params['flow_pool_level']
  643. content_id = params['content_id']
  644. trace_id = params['trace_id']
  645. process_times = params['process_times']
  646. # 开始处理,将状态修改为操作状态
  647. affected_rows = await self.update_content_status(
  648. ori_content_status=NewContentIdTaskConst.TASK_ETL_COMPLETE_STATUS,
  649. trace_id=trace_id,
  650. new_content_status=NewContentIdTaskConst.TASK_PROCESSING_STATUS
  651. )
  652. if affected_rows == 0:
  653. logging(
  654. code="6000",
  655. info="多个进程抢占同一个任务的执行状态锁,抢占失败,return"
  656. )
  657. return False
  658. try:
  659. download_videos = await self.get_downloaded_videos(content_id)
  660. match flow_pool_level:
  661. case "autoArticlePoolLevel4":
  662. # 冷启层, 全量做
  663. video_list = shuffle_list(download_videos)[:3]
  664. case "autoArticlePoolLevel3":
  665. if self.gh_id_dict.get(gh_id):
  666. video_list = shuffle_list(download_videos)[:3]
  667. else:
  668. video_list = download_videos[:3]
  669. case "autoArticlePoolLevel2":
  670. # 次条,只针对具体账号做
  671. video_list = []
  672. case "autoArticlePoolLevel1":
  673. # 头条,先不做
  674. video_list = download_videos[:3]
  675. case _:
  676. video_list = download_videos[:3]
  677. L = []
  678. for video_obj in video_list:
  679. params = {
  680. "videoPath": video_obj['video_oss_path'],
  681. "uid": video_obj['uid'],
  682. "title": kimi_title
  683. }
  684. publish_response = await publish_to_pq(params)
  685. video_id = publish_response['data']['id']
  686. response = await get_pq_video_detail(video_id)
  687. obj = {
  688. "uid": video_obj['uid'],
  689. "source": video_obj['platform'],
  690. "kimiTitle": kimi_title,
  691. "videoId": response['data'][0]['id'],
  692. "videoCover": response['data'][0]['shareImgPath'],
  693. "videoPath": response['data'][0]['videoPath'],
  694. "videoOss": video_obj['video_oss_path']
  695. }
  696. L.append(obj)
  697. update_sql = f"""
  698. UPDATE {self.article_match_video_table}
  699. SET content_status = %s, response = %s, process_times = %s
  700. WHERE trace_id = %s and content_status = %s;
  701. """
  702. # 从操作中状态修改为已发布状态
  703. await self.mysql_client.async_insert(
  704. sql=update_sql,
  705. params=(
  706. NewContentIdTaskConst.TASK_PUBLISHED_STATUS,
  707. json.dumps(L, ensure_ascii=False),
  708. process_times + 1,
  709. trace_id,
  710. NewContentIdTaskConst.TASK_PROCESSING_STATUS
  711. )
  712. )
  713. except Exception as e:
  714. await self.roll_back_content_status_when_fails(
  715. process_times=params['process_times'] + 1,
  716. trace_id=params['trace_id']
  717. )
  718. print(e)
  719. async def start_process(self, params):
  720. """
  721. 处理单篇文章
  722. :param params:
  723. :return:
  724. """
  725. # step1: 执行 kimi 操作
  726. # time.sleep(5) # 测试多个进程操作同一个 task 的等待时间
  727. kimi_result = await self.kimi_task(params)
  728. trace_id = params['trace_id']
  729. process_times = params['process_times']
  730. content_id = params['content_id']
  731. gh_id = params['gh_id']
  732. publish_flag = params['publish_flag']
  733. print(kimi_result)
  734. if kimi_result:
  735. # 等待 kimi 操作执行完成之后,开始执行 spider_task
  736. print("kimi success")
  737. logging(
  738. code=3001,
  739. info="kimi success",
  740. trace_id=trace_id
  741. )
  742. spider_flag = await self.spider_task(params=params, kimi_result=kimi_result)
  743. if spider_flag:
  744. # 等待爬虫执行完成后,开始执行 etl_task
  745. print("spider success")
  746. logging(
  747. code=3002,
  748. info="spider_success",
  749. trace_id=trace_id
  750. )
  751. etl_flag = await self.etl_task(params)
  752. if etl_flag:
  753. # 等待下载上传完成,执行发布任务
  754. print("etl success")
  755. logging(
  756. code="3003",
  757. info="etl_success",
  758. trace_id=trace_id
  759. )
  760. """
  761. todo 若新建计划,计划为设置托管,但接入账号又在配置账号中,仍会走托管逻辑,需考虑历史存量的处理
  762. 目前先对这两种情况都做托管操作
  763. """
  764. if publish_flag == NewContentIdTaskConst.DO_NOT_NEED_PUBLISH:
  765. logging(
  766. code="3013",
  767. info="不需要发布,长文系统托管发布",
  768. trace_id=trace_id
  769. )
  770. return
  771. else:
  772. try:
  773. await self.publish_task(params, kimi_result['kimi_title'])
  774. logging(
  775. code="3004",
  776. info="publish_success",
  777. trace_id=trace_id
  778. )
  779. await record_trace_id(
  780. trace_id=trace_id,
  781. status=NewContentIdTaskConst.RECORD_SUCCESS_TRACE_ID_CODE
  782. )
  783. except Exception as e:
  784. logging(
  785. code="6004",
  786. info="publish 失败--{}".format(e),
  787. trace_id=params['trace_id']
  788. )
  789. else:
  790. logging(
  791. code="6003",
  792. info="ETL 处理失败",
  793. trace_id=params['trace_id']
  794. )
  795. else:
  796. logging(
  797. code="6002",
  798. info="爬虫处理失败",
  799. trace_id=params['trace_id']
  800. )
  801. else:
  802. logging(
  803. code="6001",
  804. info="kimi 处理失败",
  805. trace_id=trace_id
  806. )
  807. if process_times >= NewContentIdTaskConst.TASK_MAX_PROCESS_TIMES:
  808. logging(
  809. code="6011",
  810. info="kimi处理次数达到上限, 放弃处理",
  811. trace_id=trace_id
  812. )
  813. # 将相同的content_id && content_status = 0的状态修改为kimi 失败状态
  814. update_sql = f"""
  815. UPDATE {self.article_match_video_table}
  816. SET content_status = %s
  817. WHERE content_id = %s and content_status = %s;
  818. """
  819. affected_rows = await self.mysql_client.async_insert(
  820. sql=update_sql,
  821. params=(
  822. NewContentIdTaskConst.KIMI_ILLEGAL_STATUS,
  823. content_id,
  824. NewContentIdTaskConst.TASK_INIT_STATUS
  825. )
  826. )
  827. bot(
  828. title="KIMI 处理失败",
  829. detail={
  830. "content_id": content_id,
  831. "affected_rows": affected_rows
  832. }
  833. )
  834. async def process_task(self, params):
  835. """
  836. 处理任务
  837. :return:
  838. """
  839. content_id = params['content_id']
  840. download_videos = await self.get_video_list(content_id)
  841. if not download_videos:
  842. # 开始处理, 判断是否有相同的文章 id 正在处理
  843. processing_flag = await self.judge_whether_same_content_id_is_processing(content_id)
  844. if processing_flag:
  845. logging(
  846. code="9001",
  847. info="该 content id 正在处理中, 跳过此任务--{}".format(content_id)
  848. )
  849. else:
  850. await self.start_process(params=params)
  851. else:
  852. print("存在已下载视频")
  853. async def deal(self):
  854. """
  855. function
  856. :return:
  857. """
  858. task_list = await self.get_tasks()
  859. task_dict = {}
  860. # 对 content_id去重
  861. for task in task_list:
  862. key = task['content_id']
  863. task_dict[key] = task
  864. process_list = []
  865. for item in task_dict:
  866. process_list.append(task_dict[item])
  867. logging(
  868. code="5001",
  869. info="Match Task Got {} this time".format(len(process_list)),
  870. function="Publish Task"
  871. )
  872. if task_list:
  873. total_task = len(process_list)
  874. print(process_list)
  875. a = time.time()
  876. print("开始处理,一共{}个任务".format(total_task))
  877. tasks = [self.process_task(params) for params in process_list]
  878. await asyncio.gather(*tasks)
  879. b = time.time()
  880. print("处理时间: {} s".format(b - a))
  881. else:
  882. logging(
  883. code="9008",
  884. info="没有要处理的请求"
  885. )