newContentIdTask.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. import asyncio
  7. from applications.config import Config
  8. from applications.log import logging
  9. from applications.functions.pqFunctions import publish_to_pq
  10. from applications.functions.common import shuffle_list
  11. from applications.functions.kimi import KimiServer
  12. from applications.spider import search_videos_from_web
  13. from applications.etl_function import *
  14. class NewContentIdTask(object):
  15. """
  16. 不存在历史已经发布的文章的匹配流程
  17. """
  18. def __init__(self, mysql_client):
  19. self.mysql_client = mysql_client
  20. self.config = Config()
  21. self.article_match_video_table = self.config.article_match_video_table
  22. self.article_text_table = self.config.article_text_table
  23. self.article_crawler_video_table = self.config.article_crawler_video_table
  24. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  25. self.account_map = json.loads(self.config.get_config_value("accountMap"))
  26. self.spider_coroutines = self.config.get_config_value("spiderCoroutines")
  27. self.default_status = 0
  28. self.task_processing_status = 101
  29. self.task_defeat_status = 99
  30. self.article_text_table_error = 4
  31. self.max_process_times = 3
  32. async def get_tasks(self):
  33. """
  34. 获取 task
  35. :return:
  36. """
  37. # 获取 content_status 为 处理中 的任务,判断时间, 如果超过 1h 则,则将它改为 0, process_times + 1
  38. select_processing_sql = f"""
  39. SELECT trace_id, content_status_update_time, process_times
  40. FROM {self.article_match_video_table}
  41. WHERE content_status = {self.task_processing_status} and process_times <= {self.max_process_times};
  42. """
  43. processing_articles = await self.mysql_client.async_select(select_processing_sql)
  44. if processing_articles:
  45. processing_list = [
  46. {
  47. "trace_id": item[0],
  48. "content_status_update_time": item[1],
  49. "process_times": item[2]
  50. }
  51. for item in processing_articles[0]
  52. ]
  53. for obj in processing_list:
  54. if int(time.time()) - obj['content_status_update_time'] >= 3600:
  55. # 认为该任务失败
  56. await self.roll_back_content_status_when_fails(
  57. process_times=obj['process_times'] + 1,
  58. trace_id=obj['trace_id']
  59. )
  60. # 将 process_times > 3 的任务的状态修改为失败
  61. update_status_sql = f"""
  62. UPDATE {self.article_match_video_table}
  63. SET content_status = %s
  64. WHERE process_times > %s;
  65. """
  66. await self.mysql_client.async_insert(
  67. update_status_sql,
  68. params=(self.default_status, self.max_process_times)
  69. )
  70. # 获取 process_times <= 3 且 content_status = 0 的任务
  71. select_sql = f"""
  72. SELECT trace_id, content_id, flow_pool_level, gh_id, process_times
  73. FROM {self.article_match_video_table}
  74. WHERE content_status = {self.default_status} and process_times <= {self.max_process_times}
  75. limit {self.spider_coroutines};
  76. """
  77. tasks = await self.mysql_client.async_select(select_sql)
  78. if tasks:
  79. return [
  80. {
  81. "trace_id": i[0],
  82. "content_id": i[1],
  83. "flow_pool_level": i[2],
  84. "gh_id": i[3],
  85. "process_times": i[4]
  86. }
  87. for i in tasks
  88. ]
  89. else:
  90. return []
  91. async def get_video_list(self, content_id):
  92. """
  93. 判断该文章是否存在历史匹配视频
  94. :param content_id
  95. :return:
  96. """
  97. sql = f"""
  98. SELECT id
  99. FROM {self.article_crawler_video_table}
  100. WHERE content_id = '{content_id}' and download_status = 2;
  101. """
  102. res_tuple = await self.mysql_client.async_select(sql)
  103. if len(res_tuple) >= 3:
  104. return True
  105. else:
  106. return False
  107. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  108. """
  109. :param new_content_status:
  110. :param trace_id:
  111. :param ori_content_status:
  112. :return:
  113. """
  114. update_sql = f"""
  115. UPDATE {self.article_match_video_table}
  116. SET content_status = %s, content_status_update_time = %s
  117. WHERE trace_id = %s and content_status = %s;
  118. """
  119. await self.mysql_client.async_insert(
  120. sql=update_sql,
  121. params=(
  122. new_content_status,
  123. int(time.time()),
  124. trace_id,
  125. ori_content_status
  126. )
  127. )
  128. async def roll_back_content_status_when_fails(self, process_times, trace_id):
  129. """
  130. 处理失败,回滚至初始状态,处理次数加 1
  131. :param process_times:
  132. :param trace_id:
  133. :return:
  134. """
  135. update_article_sql = f"""
  136. UPDATE {self.article_match_video_table}
  137. SET
  138. content_status = %s,
  139. content_status_update_time = %s,
  140. process_times = %s,
  141. WHERE trace_id = %s and content_status = %s;
  142. """
  143. await self.mysql_client.async_insert(
  144. sql=update_article_sql,
  145. params=(
  146. self.default_status,
  147. int(time.time()),
  148. process_times + 1,
  149. trace_id,
  150. self.task_processing_status
  151. )
  152. )
  153. async def judge_whether_same_content_id_is_processing(self, content_id):
  154. """
  155. 同一个 content_id只需要处理一次
  156. :param content_id:
  157. :return:
  158. """
  159. select_sql = f"""
  160. SELECT distinct content_status
  161. FROM {self.article_match_video_table}
  162. WHERE content_id = '{content_id}';
  163. """
  164. result = await self.mysql_client.async_select(select_sql)
  165. if result:
  166. for item in result:
  167. content_status = item[0]
  168. if content_status != self.default_status:
  169. return True
  170. return False
  171. else:
  172. return False
  173. async def get_downloaded_videos(self, content_id):
  174. """
  175. 获取已下载的视频
  176. :return:
  177. """
  178. sql = f"""
  179. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  180. FROM {self.article_crawler_video_table}
  181. WHERE content_id = '{content_id}' and download_status = 2;
  182. """
  183. res_tuple = await self.mysql_client.async_select(sql)
  184. return [
  185. {
  186. "platform": i[0],
  187. "play_count": i[1],
  188. "like_count": i[2],
  189. "video_oss_path": i[3],
  190. "cover_oss_path": i[4],
  191. "uid": i[5]
  192. }
  193. for i in res_tuple]
  194. async def get_kimi_status(self, content_id):
  195. """
  196. 通过 content_id 获取kimi info
  197. :return:
  198. """
  199. select_sql = f"""
  200. select kimi_status
  201. from {self.article_text_table}
  202. where content_id = '{content_id}';
  203. """
  204. response = await self.mysql_client.async_select(select_sql)
  205. if response:
  206. kimi_status = response[0][0]
  207. return kimi_status
  208. else:
  209. return self.article_text_table_error
  210. async def kimi_task(self, params):
  211. """
  212. 执行 kimi 任务
  213. :return:
  214. """
  215. kimi_success_status = 1
  216. kimi_fail_status = 2
  217. content_id = params['content_id']
  218. trace_id = params['trace_id']
  219. process_times = params['process_times']
  220. kimi_status_code = await self.get_kimi_status(content_id=content_id)
  221. if kimi_status_code == kimi_success_status:
  222. await self.update_content_status(
  223. new_content_status=kimi_success_status,
  224. trace_id=trace_id,
  225. ori_content_status=self.default_status
  226. )
  227. """
  228. {
  229. "kimi_title": kimi_title,
  230. "ori_title": article_obj['article_title'],
  231. "kimi_summary": content_title,
  232. "kimi_keys": kimi_info['content_keys']
  233. }
  234. """
  235. get_kimi_sql = f"""
  236. SELECT article_title, kimi_title, kimi_summary, kimi_keys
  237. FROM {self.article_text_table}
  238. WHERE content_id = '{content_id}';
  239. """
  240. kimi_info = await self.mysql_client.async_select(get_kimi_sql)
  241. return {
  242. "kimi_title": kimi_info[0][1],
  243. "ori_title": kimi_info[0][0],
  244. "kimi_summary": kimi_info[0][2],
  245. "kimi_keys": json.loads(kimi_info[0][3])
  246. }
  247. elif kimi_status_code == self.article_text_table_error:
  248. """
  249. todo: 文章表和匹配表没有同步更新,暂时不处理此次任务
  250. """
  251. print("article_text表还没有更新")
  252. else:
  253. # 开始处理,讲 content_status 从 0 改为 101
  254. await self.update_content_status(
  255. new_content_status=self.task_processing_status,
  256. trace_id=trace_id,
  257. ori_content_status=self.default_status
  258. )
  259. K = KimiServer()
  260. try:
  261. select_sql = f"""
  262. select article_title, article_text
  263. from {self.article_text_table}
  264. where content_id = '{content_id}'
  265. """
  266. res = await self.mysql_client.async_select(select_sql)
  267. article_obj = {
  268. "article_title": res[0][0],
  269. "article_text": res[0][1],
  270. "content_id": content_id
  271. }
  272. kimi_info = await K.search_kimi_schedule(params=article_obj)
  273. kimi_title = kimi_info['k_title']
  274. content_title = kimi_info['content_title'].replace("'", "").replace('"', "")
  275. content_keys = json.dumps(kimi_info['content_keys'], ensure_ascii=False)
  276. update_kimi_sql = f"""
  277. UPDATE {self.article_text_table}
  278. SET
  279. kimi_title = %s,
  280. kimi_summary = %s,
  281. kimi_keys = %s,
  282. kimi_status = %s
  283. WHERE content_id = %s;"""
  284. await self.mysql_client.async_insert(
  285. sql=update_kimi_sql,
  286. params=(kimi_title, content_title, content_keys, kimi_success_status, params['content_id'])
  287. )
  288. await self.update_content_status(
  289. new_content_status=kimi_success_status,
  290. trace_id=trace_id,
  291. ori_content_status=self.task_processing_status
  292. )
  293. return {
  294. "kimi_title": kimi_title,
  295. "ori_title": article_obj['article_title'],
  296. "kimi_summary": content_title,
  297. "kimi_keys": kimi_info['content_keys']
  298. }
  299. except Exception as e:
  300. # kimi 任务处理失败
  301. update_kimi_sql = f"""
  302. UPDATE {self.article_text_table}
  303. SET
  304. kimi_status = %s
  305. WHERE content_id = %s
  306. """
  307. await self.mysql_client.async_insert(
  308. sql=update_kimi_sql,
  309. params=(kimi_fail_status, content_id)
  310. )
  311. # 将状态由 101 回退为 0
  312. await self.roll_back_content_status_when_fails(
  313. process_times=process_times,
  314. trace_id=trace_id
  315. )
  316. return {}
  317. async def spider_task(self, params, kimi_result):
  318. """
  319. 爬虫任务
  320. :return:
  321. """
  322. spider_default_status = 1
  323. spider_success_status = 2
  324. trace_id = params['trace_id']
  325. content_id = params['content_id']
  326. process_times = params['process_times']
  327. gh_id = params['gh_id']
  328. try:
  329. # 开始处理,将状态由 1 改成 101
  330. await self.update_content_status(
  331. new_content_status=self.task_processing_status,
  332. ori_content_status=spider_default_status,
  333. trace_id=trace_id
  334. )
  335. search_videos_count = await search_videos_from_web(
  336. info={
  337. "ori_title": kimi_result['ori_title'],
  338. "kimi_summary": kimi_result['kimi_summary'],
  339. "kimi_keys": kimi_result['kimi_keys'],
  340. "trace_id": trace_id,
  341. "gh_id": gh_id,
  342. "content_id": content_id,
  343. "crawler_video_table": self.article_crawler_video_table
  344. },
  345. gh_id_map=self.account_map,
  346. db_client=self.mysql_client
  347. )
  348. if search_videos_count >= 3:
  349. # 表示爬虫任务执行成功, 将状态从 101 改未 2
  350. await self.update_content_status(
  351. new_content_status=spider_success_status,
  352. trace_id=trace_id,
  353. ori_content_status=self.task_processing_status
  354. )
  355. return True
  356. else:
  357. await self.roll_back_content_status_when_fails(
  358. process_times=process_times + 1,
  359. trace_id=trace_id
  360. )
  361. return False
  362. except Exception as e:
  363. await self.roll_back_content_status_when_fails(
  364. process_times=process_times + 1,
  365. trace_id=trace_id
  366. )
  367. print("爬虫处理失败: {}".format(e))
  368. return False
  369. async def etl_task(self, params):
  370. """
  371. download && upload videos
  372. :param params:
  373. :return:
  374. """
  375. video_download_success_status = 2
  376. video_download_fail_status = 3
  377. etl_task_default_status = 2
  378. etl_task_success_status = 3
  379. trace_id = params['trace_id']
  380. content_id = params['content_id']
  381. # 判断是否有三条已经下载完成的视频
  382. select_sql = f"""
  383. select count(id)
  384. from {self.article_crawler_video_table}
  385. where content_id = '{content_id}' and download_status = {video_download_success_status};
  386. """
  387. video_count_tuple = await self.mysql_client.async_select(select_sql)
  388. video_count = video_count_tuple[0][0]
  389. if video_count > 3:
  390. await self.update_content_status(
  391. ori_content_status=etl_task_default_status,
  392. trace_id=trace_id,
  393. new_content_status=etl_task_success_status
  394. )
  395. return True
  396. else:
  397. # 开始处理, 将文章状态修改为处理状态
  398. await self.update_content_status(
  399. ori_content_status=etl_task_default_status,
  400. trace_id=trace_id,
  401. new_content_status=self.task_processing_status
  402. )
  403. select_sql = f"""
  404. SELECT id, out_video_id, platform, video_title, video_url, cover_url, user_id, trace_id
  405. FROM {self.article_crawler_video_table}
  406. WHERE content_id = '{content_id}' and download_status != {video_download_success_status}
  407. ORDER BY score DESC;
  408. """
  409. videos_need_to_download_tuple = await self.mysql_client.async_select(select_sql)
  410. videos_need_to_download_list = videos_need_to_download_tuple[0]
  411. downloaded_count = 0
  412. for line in videos_need_to_download_list:
  413. params = {
  414. "id": line[0],
  415. "video_id": line[1],
  416. "platform": line[2],
  417. "video_title": line[3],
  418. "video_url": line[4],
  419. "cover_url": line[5],
  420. "user_id": line[6],
  421. "trace_id": line[7]
  422. }
  423. try:
  424. local_video_path, local_cover_path = generate_video_path(params['platform'], params['video_id'])
  425. # download videos
  426. file_path = await download_video(
  427. file_path=local_video_path,
  428. platform=params['platform'],
  429. video_url=params['video_url']
  430. )
  431. # download cover
  432. cover_path = await download_cover(
  433. file_path=local_cover_path,
  434. platform=params['platform'],
  435. cover_url=params['cover_url']
  436. )
  437. oss_video = await upload_to_oss(
  438. local_video_path=file_path,
  439. download_type="video"
  440. )
  441. if cover_path:
  442. oss_cover = await upload_to_oss(
  443. local_video_path=cover_path,
  444. download_type="image"
  445. )
  446. else:
  447. oss_cover = None
  448. update_sql = f"""
  449. UPDATE {self.article_crawler_video_table}
  450. SET video_oss_path = %s, cover_oss_path = %s, download_status = %s
  451. WHERE id = %s;
  452. """
  453. await self.mysql_client.async_insert(
  454. sql=update_sql,
  455. params=(
  456. oss_video,
  457. oss_cover,
  458. video_download_success_status,
  459. params['id']
  460. )
  461. )
  462. downloaded_count += 1
  463. except Exception as e:
  464. update_sql = f"""
  465. UPDATE {self.article_crawler_video_table}
  466. SET download_status = %s
  467. WHERE id = %s;
  468. """
  469. await self.mysql_client.async_insert(
  470. sql=update_sql,
  471. params=(video_download_fail_status, params['id'])
  472. )
  473. if downloaded_count >= 3:
  474. await self.update_content_status(
  475. ori_content_status=self.task_processing_status,
  476. trace_id=trace_id,
  477. new_content_status=etl_task_success_status
  478. )
  479. return True
  480. else:
  481. await self.roll_back_content_status_when_fails(
  482. process_times=params['process_times'] + 1,
  483. trace_id=params['trace_id']
  484. )
  485. return False
  486. async def publish_task(self, params, kimi_title):
  487. """
  488. 发布任务
  489. :param kimi_title:
  490. :param params:
  491. :return:
  492. """
  493. publish_default_status = 3
  494. publish_success_status = 4
  495. gh_id = params['gh_id']
  496. flow_pool_level = params['flow_pool_level']
  497. content_id = params['content_id']
  498. trace_id = params['trace_id']
  499. process_times = params['process_times']
  500. # 开始处理,将状态修改为操作状态
  501. await self.update_content_status(
  502. ori_content_status=publish_default_status,
  503. trace_id=trace_id,
  504. new_content_status=self.task_processing_status
  505. )
  506. try:
  507. download_videos = await self.get_video_list(content_id)
  508. match flow_pool_level:
  509. case "autoArticlePoolLevel4":
  510. # 冷启层, 全量做
  511. video_list = shuffle_list(download_videos)[:3]
  512. case "autoArticlePoolLevel3":
  513. if self.gh_id_dict.get(gh_id):
  514. video_list = shuffle_list(download_videos)[:3]
  515. else:
  516. video_list = download_videos[:3]
  517. case "autoArticlePoolLevel2":
  518. # 次条,只针对具体账号做
  519. video_list = []
  520. case "autoArticlePoolLevel1":
  521. # 头条,先不做
  522. video_list = download_videos[:3]
  523. case _:
  524. video_list = download_videos[:3]
  525. L = []
  526. for video_obj in video_list:
  527. params = {
  528. "videoPath": video_obj['video_oss_path'],
  529. "uid": video_obj['uid'],
  530. "title": kimi_title
  531. }
  532. response = await publish_to_pq(params)
  533. time.sleep(2)
  534. obj = {
  535. "uid": video_obj['uid'],
  536. "source": video_obj['platform'],
  537. "kimiTitle": kimi_title,
  538. "videoId": response['data']['id'],
  539. "videoCover": response['data']['shareImgPath'],
  540. "videoPath": response['data']['videoPath'],
  541. "videoOss": video_obj['video_oss_path']
  542. }
  543. L.append(obj)
  544. update_sql = f"""
  545. UPDATE {self.article_match_video_table}
  546. SET content_status = %s, response = %s, process_times = %s
  547. WHERE trace_id = %s and content_status = %s;
  548. """
  549. # 从操作中状态修改为已发布状态
  550. await self.mysql_client.async_insert(
  551. sql=update_sql,
  552. params=(
  553. publish_success_status,
  554. json.dumps(L, ensure_ascii=False),
  555. process_times + 1,
  556. trace_id,
  557. self.task_processing_status
  558. )
  559. )
  560. except Exception as e:
  561. await self.roll_back_content_status_when_fails(
  562. process_times=params['process_times'] + 1,
  563. trace_id=params['trace_id']
  564. )
  565. print(e)
  566. async def start_process(self, params):
  567. """
  568. 处理单篇文章
  569. :param params:
  570. :return:
  571. """
  572. # step1: 执行 kimi 操作
  573. kimi_result = await self.kimi_task(params)
  574. if kimi_result:
  575. # 等待 kimi 操作执行完成之后,开始执行 spider_task
  576. spider_flag = await self.spider_task(params=params, kimi_result=kimi_result)
  577. if spider_flag:
  578. # 等待爬虫执行完成后,开始执行 etl_task
  579. etl_flag = await self.etl_task(params)
  580. if etl_flag:
  581. # 等待下载上传完成,执行发布任务
  582. try:
  583. await self.publish_task(params, kimi_result['kimi_title'])
  584. except Exception as e:
  585. logging(
  586. code="9001",
  587. info="publish 失败--{}".format(e),
  588. trace_id=params['trace_id']
  589. )
  590. else:
  591. logging(
  592. code="8001",
  593. info="ETL 处理失败",
  594. trace_id=params['trace_id']
  595. )
  596. else:
  597. logging(
  598. code="7002",
  599. info="爬虫处理失败",
  600. trace_id=params['trace_id']
  601. )
  602. else:
  603. logging(
  604. code="6001",
  605. info="kimi 处理失败",
  606. trace_id=params['trace_id']
  607. )
  608. async def process_task(self, params):
  609. """
  610. 处理任务
  611. :return:
  612. """
  613. content_id = params['content_id']
  614. download_videos = await self.get_video_list(content_id)
  615. if not download_videos:
  616. # 开始处理, 判断是否有相同的文章 id 正在处理
  617. processing_flag = await self.judge_whether_same_content_id_is_processing(content_id)
  618. if processing_flag:
  619. logging(
  620. code="9001",
  621. info="该 content id 正在处理中, 跳过此任务"
  622. )
  623. else:
  624. await self.start_process(params=params)
  625. async def deal(self):
  626. """
  627. function
  628. :return:
  629. """
  630. task_list = await self.get_tasks()
  631. logging(
  632. code="5001",
  633. info="Match Task Got {} this time".format(len(task_list)),
  634. function="Publish Task"
  635. )
  636. if task_list:
  637. tasks = [self.process_task(params) for params in task_list]
  638. await asyncio.gather(*tasks)
  639. else:
  640. logging(
  641. code="9008",
  642. info="没有要处理的请求"
  643. )